Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f52609fd authored by Ingo Molnar's avatar Ingo Molnar
Browse files

Merge branch 'locking/arch-atomic' into locking/core, because it's ready for upstream



Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parents 20f9ed15 41b9e9fc
Loading
Loading
Loading
Loading
+27 −15
Original line number Diff line number Diff line
@@ -29,13 +29,13 @@
 * branch back to restart the operation.
 */

#define ATOMIC_OP(op)							\
#define ATOMIC_OP(op, asm_op)						\
static __inline__ void atomic_##op(int i, atomic_t * v)			\
{									\
	unsigned long temp;						\
	__asm__ __volatile__(						\
	"1:	ldl_l %0,%1\n"						\
	"	" #op "l %0,%2,%0\n"					\
	"	" #asm_op " %0,%2,%0\n"					\
	"	stl_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
@@ -45,15 +45,15 @@ static __inline__ void atomic_##op(int i, atomic_t * v) \
	:"Ir" (i), "m" (v->counter));					\
}									\

#define ATOMIC_OP_RETURN(op)						\
#define ATOMIC_OP_RETURN(op, asm_op)					\
static inline int atomic_##op##_return(int i, atomic_t *v)		\
{									\
	long temp, result;						\
	smp_mb();							\
	__asm__ __volatile__(						\
	"1:	ldl_l %0,%1\n"						\
	"	" #op "l %0,%3,%2\n"					\
	"	" #op "l %0,%3,%0\n"					\
	"	" #asm_op " %0,%3,%2\n"					\
	"	" #asm_op " %0,%3,%0\n"					\
	"	stl_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
@@ -65,13 +65,13 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
	return result;							\
}

#define ATOMIC64_OP(op)							\
#define ATOMIC64_OP(op, asm_op)						\
static __inline__ void atomic64_##op(long i, atomic64_t * v)		\
{									\
	unsigned long temp;						\
	__asm__ __volatile__(						\
	"1:	ldq_l %0,%1\n"						\
	"	" #op "q %0,%2,%0\n"					\
	"	" #asm_op " %0,%2,%0\n"					\
	"	stq_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
@@ -81,15 +81,15 @@ static __inline__ void atomic64_##op(long i, atomic64_t * v) \
	:"Ir" (i), "m" (v->counter));					\
}									\

#define ATOMIC64_OP_RETURN(op)						\
#define ATOMIC64_OP_RETURN(op, asm_op)					\
static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)	\
{									\
	long temp, result;						\
	smp_mb();							\
	__asm__ __volatile__(						\
	"1:	ldq_l %0,%1\n"						\
	"	" #op "q %0,%3,%2\n"					\
	"	" #op "q %0,%3,%0\n"					\
	"	" #asm_op " %0,%3,%2\n"					\
	"	" #asm_op " %0,%3,%0\n"					\
	"	stq_c %0,%1\n"						\
	"	beq %0,2f\n"						\
	".subsection 2\n"						\
@@ -101,15 +101,27 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v) \
	return result;							\
}

#define ATOMIC_OPS(opg)							\
	ATOMIC_OP(opg)							\
	ATOMIC_OP_RETURN(opg)						\
	ATOMIC64_OP(opg)						\
	ATOMIC64_OP_RETURN(opg)
#define ATOMIC_OPS(op)							\
	ATOMIC_OP(op, op##l)						\
	ATOMIC_OP_RETURN(op, op##l)					\
	ATOMIC64_OP(op, op##q)						\
	ATOMIC64_OP_RETURN(op, op##q)

ATOMIC_OPS(add)
ATOMIC_OPS(sub)

#define atomic_andnot atomic_andnot
#define atomic64_andnot atomic64_andnot

ATOMIC_OP(and, and)
ATOMIC_OP(andnot, bic)
ATOMIC_OP(or, bis)
ATOMIC_OP(xor, xor)
ATOMIC64_OP(and, and)
ATOMIC64_OP(andnot, bic)
ATOMIC64_OP(or, bis)
ATOMIC64_OP(xor, xor)

#undef ATOMIC_OPS
#undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP
+6 −2
Original line number Diff line number Diff line
@@ -143,9 +143,13 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \

ATOMIC_OPS(add, +=, add)
ATOMIC_OPS(sub, -=, sub)
ATOMIC_OP(and, &=, and)

#define atomic_clear_mask(mask, v) atomic_and(~(mask), (v))
#define atomic_andnot atomic_andnot

ATOMIC_OP(and, &=, and)
ATOMIC_OP(andnot, &= ~, bic)
ATOMIC_OP(or, |=, or)
ATOMIC_OP(xor, ^=, xor)

#undef ATOMIC_OPS
#undef ATOMIC_OP_RETURN
+14 −0
Original line number Diff line number Diff line
@@ -194,6 +194,13 @@ static inline int __atomic_add_unless(atomic_t *v, int a, int u)
ATOMIC_OPS(add, +=, add)
ATOMIC_OPS(sub, -=, sub)

#define atomic_andnot atomic_andnot

ATOMIC_OP(and, &=, and)
ATOMIC_OP(andnot, &= ~, bic)
ATOMIC_OP(or,  |=, orr)
ATOMIC_OP(xor, ^=, eor)

#undef ATOMIC_OPS
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP
@@ -321,6 +328,13 @@ static inline long long atomic64_##op##_return(long long i, atomic64_t *v) \
ATOMIC64_OPS(add, adds, adc)
ATOMIC64_OPS(sub, subs, sbc)

#define atomic64_andnot atomic64_andnot

ATOMIC64_OP(and, and, and)
ATOMIC64_OP(andnot, bic, bic)
ATOMIC64_OP(or,  orr, orr)
ATOMIC64_OP(xor, eor, eor)

#undef ATOMIC64_OPS
#undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP
+14 −0
Original line number Diff line number Diff line
@@ -85,6 +85,13 @@ static inline int atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OPS(add, add)
ATOMIC_OPS(sub, sub)

#define atomic_andnot atomic_andnot

ATOMIC_OP(and, and)
ATOMIC_OP(andnot, bic)
ATOMIC_OP(or, orr)
ATOMIC_OP(xor, eor)

#undef ATOMIC_OPS
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP
@@ -183,6 +190,13 @@ static inline long atomic64_##op##_return(long i, atomic64_t *v) \
ATOMIC64_OPS(add, add)
ATOMIC64_OPS(sub, sub)

#define atomic64_andnot atomic64_andnot

ATOMIC64_OP(and, and)
ATOMIC64_OP(andnot, bic)
ATOMIC64_OP(or, orr)
ATOMIC64_OP(xor, eor)

#undef ATOMIC64_OPS
#undef ATOMIC64_OP_RETURN
#undef ATOMIC64_OP
+12 −0
Original line number Diff line number Diff line
@@ -44,6 +44,18 @@ static inline int __atomic_##op##_return(int i, atomic_t *v) \
ATOMIC_OP_RETURN(sub, sub, rKs21)
ATOMIC_OP_RETURN(add, add, r)

#define ATOMIC_OP(op, asm_op)						\
ATOMIC_OP_RETURN(op, asm_op, r)						\
static inline void atomic_##op(int i, atomic_t *v)			\
{									\
	(void)__atomic_##op##_return(i, v);				\
}

ATOMIC_OP(and, and)
ATOMIC_OP(or, or)
ATOMIC_OP(xor, eor)

#undef ATOMIC_OP
#undef ATOMIC_OP_RETURN

/*
Loading