Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 4a6dae6d authored by Nick Piggin's avatar Nick Piggin Committed by Linus Torvalds
Browse files

[PATCH] atomic: cmpxchg



Introduce an atomic_cmpxchg operation.

Signed-off-by: default avatarNick Piggin <npiggin@suse.de>
Cc: "Paul E. McKenney" <paulmck@us.ibm.com>
Signed-off-by: default avatarAndrew Morton <akpm@osdl.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@osdl.org>
parent 53e86b91
Loading
Loading
Loading
Loading
+15 −0
Original line number Diff line number Diff line
@@ -115,6 +115,21 @@ boolean is return which indicates whether the resulting counter value
is negative.  It requires explicit memory barrier semantics around the
operation.

Finally:

	int atomic_cmpxchg(atomic_t *v, int old, int new);

This performs an atomic compare exchange operation on the atomic value v,
with the given old and new values. Like all atomic_xxx operations,
atomic_cmpxchg will only satisfy its atomicity semantics as long as all
other accesses of *v are performed through atomic_xxx operations.

atomic_cmpxchg requires explicit memory barriers around the operation.

The semantics for atomic_cmpxchg are the same as those defined for 'cas'
below.


If a caller requires memory barrier semantics around an atomic_t
operation which does not return a value, a set of interfaces are
defined which accomplish this:
+16 −5
Original line number Diff line number Diff line
@@ -37,17 +37,28 @@ int __atomic_add_return(int i, atomic_t *v)
	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
	return ret;
}
EXPORT_SYMBOL(__atomic_add_return);

void atomic_set(atomic_t *v, int i)
int atomic_cmpxchg(atomic_t *v, int old, int new)
{
	int ret;
	unsigned long flags;
	spin_lock_irqsave(ATOMIC_HASH(v), flags);

	v->counter = i;
	spin_lock_irqsave(ATOMIC_HASH(v), flags);
	ret = v->counter;
	if (likely(ret == old))
		v->counter = new;

	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
	return ret;
}

EXPORT_SYMBOL(__atomic_add_return);
EXPORT_SYMBOL(atomic_set);
void atomic_set(atomic_t *v, int i)
{
	unsigned long flags;

	spin_lock_irqsave(ATOMIC_HASH(v), flags);
	v->counter = i;
	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
}
EXPORT_SYMBOL(atomic_set);
+2 −0
Original line number Diff line number Diff line
@@ -177,6 +177,8 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
	return result;
}

#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))

#define atomic_dec_return(v) atomic_sub_return(1,(v))
#define atomic64_dec_return(v) atomic64_sub_return(1,(v))

+31 −0
Original line number Diff line number Diff line
@@ -80,6 +80,23 @@ static inline int atomic_sub_return(int i, atomic_t *v)
	return result;
}

static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new)
{
	u32 oldval, res;

	do {
		__asm__ __volatile__("@ atomic_cmpxchg\n"
		"ldrex	%1, [%2]\n"
		"teq	%1, %3\n"
		"strexeq %0, %4, [%2]\n"
		    : "=&r" (res), "=&r" (oldval)
		    : "r" (&ptr->counter), "Ir" (old), "r" (new)
		    : "cc");
	} while (res);

	return oldval;
}

static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
{
	unsigned long tmp, tmp2;
@@ -131,6 +148,20 @@ static inline int atomic_sub_return(int i, atomic_t *v)
	return val;
}

static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
{
	int ret;
	unsigned long flags;

	local_irq_save(flags);
	ret = v->counter;
	if (likely(ret == old))
		v->counter = new;
	local_irq_restore(flags);

	return ret;
}

static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
{
	unsigned long flags;
+14 −0
Original line number Diff line number Diff line
@@ -62,6 +62,20 @@ static inline int atomic_sub_return(int i, atomic_t *v)
        return val;
}

static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
{
	int ret;
	unsigned long flags;

	local_irq_save(flags);
	ret = v->counter;
	if (likely(ret == old))
		v->counter = new;
	local_irq_restore(flags);

	return ret;
}

static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
{
        unsigned long flags;
Loading