Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 25547b6b authored by Ingo Molnar's avatar Ingo Molnar
Browse files

Merge branch 'WIP.locking/atomics' into locking/core



Merge two uncontroversial cleanups from this branch while the rest is being reworked.

Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parents 520eccdf 007d185b
Loading
Loading
Loading
Loading
+46 −23
Original line number Original line Diff line number Diff line
@@ -197,35 +197,56 @@ static inline int atomic_xchg(atomic_t *v, int new)
	return xchg(&v->counter, new);
	return xchg(&v->counter, new);
}
}


#define ATOMIC_OP(op)							\
static inline void atomic_and(int i, atomic_t *v)
static inline void atomic_##op(int i, atomic_t *v)			\
{
{									\
	asm volatile(LOCK_PREFIX "andl %1,%0"
	asm volatile(LOCK_PREFIX #op"l %1,%0"				\
			: "+m" (v->counter)
			: "+m" (v->counter)				\
			: "ir" (i)
			: "ir" (i)					\
			: "memory");
			: "memory");					\
}

static inline int atomic_fetch_and(int i, atomic_t *v)
{
	int val = atomic_read(v);

	do { } while (!atomic_try_cmpxchg(v, &val, val & i));

	return val;
}

static inline void atomic_or(int i, atomic_t *v)
{
	asm volatile(LOCK_PREFIX "orl %1,%0"
			: "+m" (v->counter)
			: "ir" (i)
			: "memory");
}
}


#define ATOMIC_FETCH_OP(op, c_op)					\
static inline int atomic_fetch_or(int i, atomic_t *v)
static inline int atomic_fetch_##op(int i, atomic_t *v)			\
{
{									\
	int val = atomic_read(v);
	int val = atomic_read(v);					\

	do {								\
	do { } while (!atomic_try_cmpxchg(v, &val, val | i));
	} while (!atomic_try_cmpxchg(v, &val, val c_op i));		\

	return val;							\
	return val;
}
}


#define ATOMIC_OPS(op, c_op)						\
static inline void atomic_xor(int i, atomic_t *v)
	ATOMIC_OP(op)							\
{
	ATOMIC_FETCH_OP(op, c_op)
	asm volatile(LOCK_PREFIX "xorl %1,%0"
			: "+m" (v->counter)
			: "ir" (i)
			: "memory");
}


ATOMIC_OPS(and, &)
static inline int atomic_fetch_xor(int i, atomic_t *v)
ATOMIC_OPS(or , |)
{
ATOMIC_OPS(xor, ^)
	int val = atomic_read(v);

	do { } while (!atomic_try_cmpxchg(v, &val, val ^ i));


#undef ATOMIC_OPS
	return val;
#undef ATOMIC_FETCH_OP
}
#undef ATOMIC_OP


/**
/**
 * __atomic_add_unless - add unless the number is already a given value
 * __atomic_add_unless - add unless the number is already a given value
@@ -239,10 +260,12 @@ ATOMIC_OPS(xor, ^)
static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
{
	int c = atomic_read(v);
	int c = atomic_read(v);

	do {
	do {
		if (unlikely(c == u))
		if (unlikely(c == u))
			break;
			break;
	} while (!atomic_try_cmpxchg(v, &c, c + a));
	} while (!atomic_try_cmpxchg(v, &c, c + a));

	return c;
	return c;
}
}


+57 −24
Original line number Original line Diff line number Diff line
@@ -312,37 +312,70 @@ static inline long long atomic64_dec_if_positive(atomic64_t *v)
#undef alternative_atomic64
#undef alternative_atomic64
#undef __alternative_atomic64
#undef __alternative_atomic64


#define ATOMIC64_OP(op, c_op)						\
static inline void atomic64_and(long long i, atomic64_t *v)
static inline void atomic64_##op(long long i, atomic64_t *v)		\
{
{									\
	long long old, c = 0;
	long long old, c = 0;						\

	while ((old = atomic64_cmpxchg(v, c, c c_op i)) != c)		\
	while ((old = atomic64_cmpxchg(v, c, c & i)) != c)
		c = old;						\
		c = old;
}
}


#define ATOMIC64_FETCH_OP(op, c_op)					\
static inline long long atomic64_fetch_and(long long i, atomic64_t *v)
static inline long long atomic64_fetch_##op(long long i, atomic64_t *v)	\
{
{									\
	long long old, c = 0;
	long long old, c = 0;						\

	while ((old = atomic64_cmpxchg(v, c, c c_op i)) != c)		\
	while ((old = atomic64_cmpxchg(v, c, c & i)) != c)
		c = old;						\
		c = old;
	return old;							\

	return old;
}
}


ATOMIC64_FETCH_OP(add, +)
static inline void atomic64_or(long long i, atomic64_t *v)
{
	long long old, c = 0;


#define atomic64_fetch_sub(i, v)	atomic64_fetch_add(-(i), (v))
	while ((old = atomic64_cmpxchg(v, c, c | i)) != c)
		c = old;
}

static inline long long atomic64_fetch_or(long long i, atomic64_t *v)
{
	long long old, c = 0;

	while ((old = atomic64_cmpxchg(v, c, c | i)) != c)
		c = old;

	return old;
}


#define ATOMIC64_OPS(op, c_op)						\
static inline void atomic64_xor(long long i, atomic64_t *v)
	ATOMIC64_OP(op, c_op)						\
{
	ATOMIC64_FETCH_OP(op, c_op)
	long long old, c = 0;

	while ((old = atomic64_cmpxchg(v, c, c ^ i)) != c)
		c = old;
}


ATOMIC64_OPS(and, &)
static inline long long atomic64_fetch_xor(long long i, atomic64_t *v)
ATOMIC64_OPS(or, |)
{
ATOMIC64_OPS(xor, ^)
	long long old, c = 0;

	while ((old = atomic64_cmpxchg(v, c, c ^ i)) != c)
		c = old;

	return old;
}


#undef ATOMIC64_OPS
static inline long long atomic64_fetch_add(long long i, atomic64_t *v)
#undef ATOMIC64_FETCH_OP
{
#undef ATOMIC64_OP
	long long old, c = 0;

	while ((old = atomic64_cmpxchg(v, c, c + i)) != c)
		c = old;

	return old;
}

#define atomic64_fetch_sub(i, v)	atomic64_fetch_add(-(i), (v))


#endif /* _ASM_X86_ATOMIC64_32_H */
#endif /* _ASM_X86_ATOMIC64_32_H */
+47 −26
Original line number Original line Diff line number Diff line
@@ -177,7 +177,7 @@ static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
}
}


#define atomic64_try_cmpxchg atomic64_try_cmpxchg
#define atomic64_try_cmpxchg atomic64_try_cmpxchg
static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, long *old, long new)
static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, long new)
{
{
	return try_cmpxchg(&v->counter, old, new);
	return try_cmpxchg(&v->counter, old, new);
}
}
@@ -198,7 +198,7 @@ static inline long atomic64_xchg(atomic64_t *v, long new)
 */
 */
static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
{
{
	long c = atomic64_read(v);
	s64 c = atomic64_read(v);
	do {
	do {
		if (unlikely(c == u))
		if (unlikely(c == u))
			return false;
			return false;
@@ -217,7 +217,7 @@ static inline bool atomic64_add_unless(atomic64_t *v, long a, long u)
 */
 */
static inline long atomic64_dec_if_positive(atomic64_t *v)
static inline long atomic64_dec_if_positive(atomic64_t *v)
{
{
	long dec, c = atomic64_read(v);
	s64 dec, c = atomic64_read(v);
	do {
	do {
		dec = c - 1;
		dec = c - 1;
		if (unlikely(dec < 0))
		if (unlikely(dec < 0))
@@ -226,34 +226,55 @@ static inline long atomic64_dec_if_positive(atomic64_t *v)
	return dec;
	return dec;
}
}


#define ATOMIC64_OP(op)							\
static inline void atomic64_and(long i, atomic64_t *v)
static inline void atomic64_##op(long i, atomic64_t *v)			\
{
{									\
	asm volatile(LOCK_PREFIX "andq %1,%0"
	asm volatile(LOCK_PREFIX #op"q %1,%0"				\
			: "+m" (v->counter)
			: "+m" (v->counter)				\
			: "er" (i)
			: "er" (i)					\
			: "memory");
			: "memory");					\
}
}


#define ATOMIC64_FETCH_OP(op, c_op)					\
static inline long atomic64_fetch_and(long i, atomic64_t *v)
static inline long atomic64_fetch_##op(long i, atomic64_t *v)		\
{
{									\
	s64 val = atomic64_read(v);
	long val = atomic64_read(v);					\

	do {								\
	do {
	} while (!atomic64_try_cmpxchg(v, &val, val c_op i));		\
	} while (!atomic64_try_cmpxchg(v, &val, val & i));
	return val;							\
	return val;
}
}


#define ATOMIC64_OPS(op, c_op)						\
static inline void atomic64_or(long i, atomic64_t *v)
	ATOMIC64_OP(op)							\
{
	ATOMIC64_FETCH_OP(op, c_op)
	asm volatile(LOCK_PREFIX "orq %1,%0"
			: "+m" (v->counter)
			: "er" (i)
			: "memory");
}


ATOMIC64_OPS(and, &)
static inline long atomic64_fetch_or(long i, atomic64_t *v)
ATOMIC64_OPS(or, |)
{
ATOMIC64_OPS(xor, ^)
	s64 val = atomic64_read(v);


#undef ATOMIC64_OPS
	do {
#undef ATOMIC64_FETCH_OP
	} while (!atomic64_try_cmpxchg(v, &val, val | i));
#undef ATOMIC64_OP
	return val;
}

static inline void atomic64_xor(long i, atomic64_t *v)
{
	asm volatile(LOCK_PREFIX "xorq %1,%0"
			: "+m" (v->counter)
			: "er" (i)
			: "memory");
}

static inline long atomic64_fetch_xor(long i, atomic64_t *v)
{
	s64 val = atomic64_read(v);

	do {
	} while (!atomic64_try_cmpxchg(v, &val, val ^ i));
	return val;
}


#endif /* _ASM_X86_ATOMIC64_64_H */
#endif /* _ASM_X86_ATOMIC64_64_H */
+1 −1
Original line number Original line Diff line number Diff line
@@ -157,7 +157,7 @@ extern void __add_wrong_size(void)
#define __raw_try_cmpxchg(_ptr, _pold, _new, size, lock)		\
#define __raw_try_cmpxchg(_ptr, _pold, _new, size, lock)		\
({									\
({									\
	bool success;							\
	bool success;							\
	__typeof__(_ptr) _old = (_pold);				\
	__typeof__(_ptr) _old = (__typeof__(_ptr))(_pold);		\
	__typeof__(*(_ptr)) __old = *_old;				\
	__typeof__(*(_ptr)) __old = *_old;				\
	__typeof__(*(_ptr)) __new = (_new);				\
	__typeof__(*(_ptr)) __new = (_new);				\
	switch (size) {							\
	switch (size) {							\