Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 93c9d7f6 authored by Linus Torvalds's avatar Linus Torvalds
Browse files

Merge branch 'x86-atomic-for-linus' of...

Merge branch 'x86-atomic-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip

* 'x86-atomic-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/linux-2.6-tip:
  x86: Fix LOCK_PREFIX_HERE for uniprocessor build
  x86, atomic64: In selftest, distinguish x86-64 from 586+
  x86-32: Fix atomic64_inc_not_zero return value convention
  lib: Fix atomic64_inc_not_zero test
  lib: Fix atomic64_add_unless return value convention
  x86-32: Fix atomic64_add_unless return value convention
  lib: Fix atomic64_add_unless test
  x86: Implement atomic[64]_dec_if_positive()
  lib: Only test atomic64_dec_if_positive on archs having it
  x86-32: Rewrite 32-bit atomic64 functions in assembly
  lib: Add self-test for atomic64_t
  x86-32: Allow UP/SMP lock replacement in cmpxchg64
  x86: Add support for lock prefix in alternatives
parents 7421a10d d9c5841e
Loading
Loading
Loading
Loading
+6 −3
Original line number Diff line number Diff line
@@ -28,14 +28,17 @@
 */

#ifdef CONFIG_SMP
#define LOCK_PREFIX \
#define LOCK_PREFIX_HERE \
		".section .smp_locks,\"a\"\n"	\
		".balign 4\n"			\
		".long 661f - .\n" /* offset */	\
		".long 671f - .\n" /* offset */	\
		".previous\n"			\
		"661:\n\tlock; "
		"671:"

#define LOCK_PREFIX LOCK_PREFIX_HERE "\n\tlock; "

#else /* ! CONFIG_SMP */
#define LOCK_PREFIX_HERE ""
#define LOCK_PREFIX ""
#endif

+23 −0
Original line number Diff line number Diff line
@@ -246,6 +246,29 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)

#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)

/*
 * atomic_dec_if_positive - decrement by 1 if old value positive
 * @v: pointer of type atomic_t
 *
 * The function returns the old value of *v minus 1, even if
 * the atomic variable, v, was not decremented.
 */
static inline int atomic_dec_if_positive(atomic_t *v)
{
	int c, old, dec;
	c = atomic_read(v);
	for (;;) {
		dec = c - 1;
		if (unlikely(dec < 0))
			break;
		old = atomic_cmpxchg((v), c, dec);
		if (likely(old == c))
			break;
		c = old;
	}
	return dec;
}

/**
 * atomic_inc_short - increment of a short integer
 * @v: pointer to type int
+211 −67
Original line number Diff line number Diff line
@@ -14,109 +14,193 @@ typedef struct {

#define ATOMIC64_INIT(val)	{ (val) }

extern u64 atomic64_cmpxchg(atomic64_t *ptr, u64 old_val, u64 new_val);
#ifdef CONFIG_X86_CMPXCHG64
#define ATOMIC64_ALTERNATIVE_(f, g) "call atomic64_" #g "_cx8"
#else
#define ATOMIC64_ALTERNATIVE_(f, g) ALTERNATIVE("call atomic64_" #f "_386", "call atomic64_" #g "_cx8", X86_FEATURE_CX8)
#endif

#define ATOMIC64_ALTERNATIVE(f) ATOMIC64_ALTERNATIVE_(f, f)

/**
 * atomic64_cmpxchg - cmpxchg atomic64 variable
 * @p: pointer to type atomic64_t
 * @o: expected value
 * @n: new value
 *
 * Atomically sets @v to @n if it was equal to @o and returns
 * the old value.
 */

static inline long long atomic64_cmpxchg(atomic64_t *v, long long o, long long n)
{
	return cmpxchg64(&v->counter, o, n);
}

/**
 * atomic64_xchg - xchg atomic64 variable
 * @ptr:      pointer to type atomic64_t
 * @new_val:  value to assign
 * @v: pointer to type atomic64_t
 * @n: value to assign
 *
 * Atomically xchgs the value of @ptr to @new_val and returns
 * Atomically xchgs the value of @v to @n and returns
 * the old value.
 */
extern u64 atomic64_xchg(atomic64_t *ptr, u64 new_val);
static inline long long atomic64_xchg(atomic64_t *v, long long n)
{
	long long o;
	unsigned high = (unsigned)(n >> 32);
	unsigned low = (unsigned)n;
	asm volatile(ATOMIC64_ALTERNATIVE(xchg)
		     : "=A" (o), "+b" (low), "+c" (high)
		     : "S" (v)
		     : "memory"
		     );
	return o;
}

/**
 * atomic64_set - set atomic64 variable
 * @ptr:      pointer to type atomic64_t
 * @new_val:  value to assign
 * @v: pointer to type atomic64_t
 * @n: value to assign
 *
 * Atomically sets the value of @ptr to @new_val.
 * Atomically sets the value of @v to @n.
 */
extern void atomic64_set(atomic64_t *ptr, u64 new_val);
static inline void atomic64_set(atomic64_t *v, long long i)
{
	unsigned high = (unsigned)(i >> 32);
	unsigned low = (unsigned)i;
	asm volatile(ATOMIC64_ALTERNATIVE(set)
		     : "+b" (low), "+c" (high)
		     : "S" (v)
		     : "eax", "edx", "memory"
		     );
}

/**
 * atomic64_read - read atomic64 variable
 * @ptr:      pointer to type atomic64_t
 * @v: pointer to type atomic64_t
 *
 * Atomically reads the value of @ptr and returns it.
 * Atomically reads the value of @v and returns it.
 */
static inline u64 atomic64_read(atomic64_t *ptr)
static inline long long atomic64_read(atomic64_t *v)
{
	u64 res;

	/*
	 * Note, we inline this atomic64_t primitive because
	 * it only clobbers EAX/EDX and leaves the others
	 * untouched. We also (somewhat subtly) rely on the
	 * fact that cmpxchg8b returns the current 64-bit value
	 * of the memory location we are touching:
	 */
	asm volatile(
		"mov %%ebx, %%eax\n\t"
		"mov %%ecx, %%edx\n\t"
		LOCK_PREFIX "cmpxchg8b %1\n"
			: "=&A" (res)
			: "m" (*ptr)
	long long r;
	asm volatile(ATOMIC64_ALTERNATIVE(read)
		     : "=A" (r), "+c" (v)
		     : : "memory"
		     );

	return res;
	return r;
 }

extern u64 atomic64_read(atomic64_t *ptr);

/**
 * atomic64_add_return - add and return
 * @delta: integer value to add
 * @ptr:   pointer to type atomic64_t
 * @i: integer value to add
 * @v: pointer to type atomic64_t
 *
 * Atomically adds @delta to @ptr and returns @delta + *@ptr
 * Atomically adds @i to @v and returns @i + *@v
 */
extern u64 atomic64_add_return(u64 delta, atomic64_t *ptr);
static inline long long atomic64_add_return(long long i, atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE(add_return)
		     : "+A" (i), "+c" (v)
		     : : "memory"
		     );
	return i;
}

/*
 * Other variants with different arithmetic operators:
 */
extern u64 atomic64_sub_return(u64 delta, atomic64_t *ptr);
extern u64 atomic64_inc_return(atomic64_t *ptr);
extern u64 atomic64_dec_return(atomic64_t *ptr);
static inline long long atomic64_sub_return(long long i, atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE(sub_return)
		     : "+A" (i), "+c" (v)
		     : : "memory"
		     );
	return i;
}

static inline long long atomic64_inc_return(atomic64_t *v)
{
	long long a;
	asm volatile(ATOMIC64_ALTERNATIVE(inc_return)
		     : "=A" (a)
		     : "S" (v)
		     : "memory", "ecx"
		     );
	return a;
}

static inline long long atomic64_dec_return(atomic64_t *v)
{
	long long a;
	asm volatile(ATOMIC64_ALTERNATIVE(dec_return)
		     : "=A" (a)
		     : "S" (v)
		     : "memory", "ecx"
		     );
	return a;
}

/**
 * atomic64_add - add integer to atomic64 variable
 * @delta: integer value to add
 * @ptr:   pointer to type atomic64_t
 * @i: integer value to add
 * @v: pointer to type atomic64_t
 *
 * Atomically adds @delta to @ptr.
 * Atomically adds @i to @v.
 */
extern void atomic64_add(u64 delta, atomic64_t *ptr);
static inline long long atomic64_add(long long i, atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE_(add, add_return)
		     : "+A" (i), "+c" (v)
		     : : "memory"
		     );
	return i;
}

/**
 * atomic64_sub - subtract the atomic64 variable
 * @delta: integer value to subtract
 * @ptr:   pointer to type atomic64_t
 * @i: integer value to subtract
 * @v: pointer to type atomic64_t
 *
 * Atomically subtracts @delta from @ptr.
 * Atomically subtracts @i from @v.
 */
extern void atomic64_sub(u64 delta, atomic64_t *ptr);
static inline long long atomic64_sub(long long i, atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE_(sub, sub_return)
		     : "+A" (i), "+c" (v)
		     : : "memory"
		     );
	return i;
}

/**
 * atomic64_sub_and_test - subtract value from variable and test result
 * @delta: integer value to subtract
 * @ptr:   pointer to type atomic64_t
 * @i: integer value to subtract
 * @v: pointer to type atomic64_t
  *
 * Atomically subtracts @delta from @ptr and returns
 * Atomically subtracts @i from @v and returns
 * true if the result is zero, or false for all
 * other cases.
 */
extern int atomic64_sub_and_test(u64 delta, atomic64_t *ptr);
static inline int atomic64_sub_and_test(long long i, atomic64_t *v)
{
	return atomic64_sub_return(i, v) == 0;
}

/**
 * atomic64_inc - increment atomic64 variable
 * @ptr: pointer to type atomic64_t
 * @v: pointer to type atomic64_t
 *
 * Atomically increments @ptr by 1.
 * Atomically increments @v by 1.
 */
extern void atomic64_inc(atomic64_t *ptr);
static inline void atomic64_inc(atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE_(inc, inc_return)
		     : : "S" (v)
		     : "memory", "eax", "ecx", "edx"
		     );
}

/**
 * atomic64_dec - decrement atomic64 variable
@@ -124,37 +208,97 @@ extern void atomic64_inc(atomic64_t *ptr);
 *
 * Atomically decrements @ptr by 1.
 */
extern void atomic64_dec(atomic64_t *ptr);
static inline void atomic64_dec(atomic64_t *v)
{
	asm volatile(ATOMIC64_ALTERNATIVE_(dec, dec_return)
		     : : "S" (v)
		     : "memory", "eax", "ecx", "edx"
		     );
}

/**
 * atomic64_dec_and_test - decrement and test
 * @ptr: pointer to type atomic64_t
 * @v: pointer to type atomic64_t
 *
 * Atomically decrements @ptr by 1 and
 * Atomically decrements @v by 1 and
 * returns true if the result is 0, or false for all other
 * cases.
 */
extern int atomic64_dec_and_test(atomic64_t *ptr);
static inline int atomic64_dec_and_test(atomic64_t *v)
{
	return atomic64_dec_return(v) == 0;
}

/**
 * atomic64_inc_and_test - increment and test
 * @ptr: pointer to type atomic64_t
 * @v: pointer to type atomic64_t
 *
 * Atomically increments @ptr by 1
 * Atomically increments @v by 1
 * and returns true if the result is zero, or false for all
 * other cases.
 */
extern int atomic64_inc_and_test(atomic64_t *ptr);
static inline int atomic64_inc_and_test(atomic64_t *v)
{
	return atomic64_inc_return(v) == 0;
}

/**
 * atomic64_add_negative - add and test if negative
 * @delta: integer value to add
 * @ptr:   pointer to type atomic64_t
 * @i: integer value to add
 * @v: pointer to type atomic64_t
 *
 * Atomically adds @delta to @ptr and returns true
 * Atomically adds @i to @v and returns true
 * if the result is negative, or false when
 * result is greater than or equal to zero.
 */
extern int atomic64_add_negative(u64 delta, atomic64_t *ptr);
static inline int atomic64_add_negative(long long i, atomic64_t *v)
{
	return atomic64_add_return(i, v) < 0;
}

/**
 * atomic64_add_unless - add unless the number is a given value
 * @v: pointer of type atomic64_t
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 *
 * Atomically adds @a to @v, so long as it was not @u.
 * Returns non-zero if @v was not @u, and zero otherwise.
 */
static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
{
	unsigned low = (unsigned)u;
	unsigned high = (unsigned)(u >> 32);
	asm volatile(ATOMIC64_ALTERNATIVE(add_unless) "\n\t"
		     : "+A" (a), "+c" (v), "+S" (low), "+D" (high)
		     : : "memory");
	return (int)a;
}


static inline int atomic64_inc_not_zero(atomic64_t *v)
{
	int r;
	asm volatile(ATOMIC64_ALTERNATIVE(inc_not_zero)
		     : "=a" (r)
		     : "S" (v)
		     : "ecx", "edx", "memory"
		     );
	return r;
}

static inline long long atomic64_dec_if_positive(atomic64_t *v)
{
	long long r;
	asm volatile(ATOMIC64_ALTERNATIVE(dec_if_positive)
		     : "=A" (r)
		     : "S" (v)
		     : "ecx", "memory"
		     );
	return r;
}

#undef ATOMIC64_ALTERNATIVE
#undef ATOMIC64_ALTERNATIVE_

#endif /* _ASM_X86_ATOMIC64_32_H */
+23 −0
Original line number Diff line number Diff line
@@ -221,4 +221,27 @@ static inline int atomic64_add_unless(atomic64_t *v, long a, long u)

#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)

/*
 * atomic64_dec_if_positive - decrement by 1 if old value positive
 * @v: pointer of type atomic_t
 *
 * The function returns the old value of *v minus 1, even if
 * the atomic variable, v, was not decremented.
 */
static inline long atomic64_dec_if_positive(atomic64_t *v)
{
	long c, old, dec;
	c = atomic64_read(v);
	for (;;) {
		dec = c - 1;
		if (unlikely(dec < 0))
			break;
		old = atomic64_cmpxchg((v), c, dec);
		if (likely(old == c))
			break;
		c = old;
	}
	return dec;
}

#endif /* _ASM_X86_ATOMIC64_64_H */
+2 −1
Original line number Diff line number Diff line
@@ -271,7 +271,8 @@ extern unsigned long long cmpxchg_486_u64(volatile void *, u64, u64);
	__typeof__(*(ptr)) __ret;				\
	__typeof__(*(ptr)) __old = (o);				\
	__typeof__(*(ptr)) __new = (n);				\
	alternative_io("call cmpxchg8b_emu",			\
	alternative_io(LOCK_PREFIX_HERE				\
			"call cmpxchg8b_emu",			\
			"lock; cmpxchg8b (%%esi)" ,		\
		       X86_FEATURE_CX8,				\
		       "=A" (__ret),				\
Loading