Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 304a0d69 authored by Peter Zijlstra's avatar Peter Zijlstra Committed by Thomas Gleixner
Browse files

sparc: Provide atomic_{or,xor,and}



Implement atomic logic ops -- atomic_{or,xor,and}.

These will replace the atomic_{set,clear}_mask functions that are
available on some archs.

Acked-by: default avatarDavid S. Miller <davem@davemloft.net>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
parent 658aa514
Loading
Loading
Loading
Loading
+4 −0
Original line number Diff line number Diff line
@@ -17,10 +17,14 @@
#include <asm/barrier.h>
#include <asm-generic/atomic64.h>

#define CONFIG_ARCH_HAS_ATOMIC_OR

#define ATOMIC_INIT(i)  { (i) }

int atomic_add_return(int, atomic_t *);
void atomic_and(int, atomic_t *);
void atomic_or(int, atomic_t *);
void atomic_xor(int, atomic_t *);
int atomic_cmpxchg(atomic_t *, int, int);
int atomic_xchg(atomic_t *, int);
int __atomic_add_unless(atomic_t *, int, int);
+6 −0
Original line number Diff line number Diff line
@@ -33,6 +33,12 @@ long atomic64_##op##_return(long, atomic64_t *);
ATOMIC_OPS(add)
ATOMIC_OPS(sub)

#define CONFIG_ARCH_HAS_ATOMIC_OR

ATOMIC_OP(and)
ATOMIC_OP(or)
ATOMIC_OP(xor)

#undef ATOMIC_OPS
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP
+19 −3
Original line number Diff line number Diff line
@@ -27,22 +27,38 @@ static DEFINE_SPINLOCK(dummy);

#endif /* SMP */

#define ATOMIC_OP(op, cop)						\
#define ATOMIC_OP_RETURN(op, c_op)					\
int atomic_##op##_return(int i, atomic_t *v)				\
{									\
	int ret;							\
	unsigned long flags;						\
	spin_lock_irqsave(ATOMIC_HASH(v), flags);			\
									\
	ret = (v->counter cop i);					\
	ret = (v->counter c_op i);					\
									\
	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);			\
	return ret;							\
}									\
EXPORT_SYMBOL(atomic_##op##_return);

ATOMIC_OP(add, +=)
#define ATOMIC_OP(op, c_op)						\
void atomic_##op(int i, atomic_t *v)					\
{									\
	unsigned long flags;						\
	spin_lock_irqsave(ATOMIC_HASH(v), flags);			\
									\
	v->counter c_op i;						\
									\
	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);			\
}									\
EXPORT_SYMBOL(atomic_##op);

ATOMIC_OP_RETURN(add, +=)
ATOMIC_OP(and, &=)
ATOMIC_OP(or, |=)
ATOMIC_OP(xor, ^=)

#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP

int atomic_xchg(atomic_t *v, int new)
+6 −0
Original line number Diff line number Diff line
@@ -47,6 +47,9 @@ ENDPROC(atomic_##op##_return);

ATOMIC_OPS(add)
ATOMIC_OPS(sub)
ATOMIC_OP(and)
ATOMIC_OP(or)
ATOMIC_OP(xor)

#undef ATOMIC_OPS
#undef ATOMIC_OP_RETURN
@@ -84,6 +87,9 @@ ENDPROC(atomic64_##op##_return);

ATOMIC64_OPS(add)
ATOMIC64_OPS(sub)
ATOMIC64_OP(and)
ATOMIC64_OP(or)
ATOMIC64_OP(xor)

#undef ATOMIC64_OPS
#undef ATOMIC64_OP_RETURN
+3 −0
Original line number Diff line number Diff line
@@ -111,6 +111,9 @@ EXPORT_SYMBOL(atomic64_##op##_return);

ATOMIC_OPS(add)
ATOMIC_OPS(sub)
ATOMIC_OP(and)
ATOMIC_OP(or)
ATOMIC_OP(xor)

#undef ATOMIC_OPS
#undef ATOMIC_OP_RETURN