Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f24219b4 authored by Arun Sharma's avatar Arun Sharma Committed by Linus Torvalds
Browse files

atomic: move atomic_add_unless to generic code



This is in preparation for more generic atomic primitives based on
__atomic_add_unless.

Signed-off-by: default avatarArun Sharma <asharma@fb.com>
Signed-off-by: default avatarHans-Christian Egtvedt <hans-christian.egtvedt@atmel.com>
Reviewed-by: default avatarEric Dumazet <eric.dumazet@gmail.com>
Cc: Ingo Molnar <mingo@elte.hu>
Cc: David Miller <davem@davemloft.net>
Acked-by: default avatarMike Frysinger <vapier@gentoo.org>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@linux-foundation.org>
parent 60063497
Loading
Loading
Loading
Loading
+5 −5
Original line number Original line Diff line number Diff line
@@ -176,15 +176,15 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))


/**
/**
 * atomic_add_unless - add unless the number is a given value
 * __atomic_add_unless - add unless the number is a given value
 * @v: pointer of type atomic_t
 * @v: pointer of type atomic_t
 * @a: the amount to add to v...
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 * @u: ...unless v is equal to u.
 *
 *
 * Atomically adds @a to @v, so long as it was not @u.
 * Atomically adds @a to @v, so long as it was not @u.
 * Returns non-zero if @v was not @u, and zero otherwise.
 * Returns the old value of @v.
 */
 */
static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
{
{
	int c, old;
	int c, old;
	c = atomic_read(v);
	c = atomic_read(v);
@@ -196,7 +196,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
			break;
			break;
		c = old;
		c = old;
	}
	}
	return c != (u);
	return c;
}
}




@@ -207,7 +207,7 @@ static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
 * @u: ...unless v is equal to u.
 * @u: ...unless v is equal to u.
 *
 *
 * Atomically adds @a to @v, so long as it was not @u.
 * Atomically adds @a to @v, so long as it was not @u.
 * Returns non-zero if @v was not @u, and zero otherwise.
 * Returns the old value of @v.
 */
 */
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
{
{
+2 −2
Original line number Original line Diff line number Diff line
@@ -208,14 +208,14 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)


#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))


static inline int atomic_add_unless(atomic_t *v, int a, int u)
static inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
{
	int c, old;
	int c, old;


	c = atomic_read(v);
	c = atomic_read(v);
	while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c)
	while (c != u && (old = atomic_cmpxchg((v), c, c + a)) != c)
		c = old;
		c = old;
	return c != u;
	return c;
}
}


#define atomic_inc(v)		atomic_add(1, v)
#define atomic_inc(v)		atomic_add(1, v)
+25 −32
Original line number Original line Diff line number Diff line
@@ -78,70 +78,63 @@ static inline int atomic_add_return(int i, atomic_t *v)
/*
/*
 * atomic_sub_unless - sub unless the number is a given value
 * atomic_sub_unless - sub unless the number is a given value
 * @v: pointer of type atomic_t
 * @v: pointer of type atomic_t
 * @a: the amount to add to v...
 * @a: the amount to subtract from v...
 * @u: ...unless v is equal to u.
 * @u: ...unless v is equal to u.
 *
 *
 * If the atomic value v is not equal to u, this function subtracts a
 * Atomically subtract @a from @v, so long as it was not @u.
 * from v, and returns non zero. If v is equal to u then it returns
 * Returns the old value of @v.
 * zero. This is done as an atomic operation.
*/
*/
static inline int atomic_sub_unless(atomic_t *v, int a, int u)
static inline void atomic_sub_unless(atomic_t *v, int a, int u)
{
{
	int tmp, result = 0;
	int tmp;


	asm volatile(
	asm volatile(
		"/* atomic_sub_unless */\n"
		"/* atomic_sub_unless */\n"
		"1:	ssrf	5\n"
		"1:	ssrf	5\n"
		"	ld.w	%0, %3\n"
		"	ld.w	%0, %2\n"
		"	cp.w	%0, %5\n"
		"	cp.w	%0, %4\n"
		"	breq	1f\n"
		"	breq	1f\n"
		"	sub	%0, %4\n"
		"	sub	%0, %3\n"
		"	stcond	%2, %0\n"
		"	stcond	%1, %0\n"
		"	brne	1b\n"
		"	brne	1b\n"
		"	mov	%1, 1\n"
		"1:"
		"1:"
		: "=&r"(tmp), "=&r"(result), "=o"(v->counter)
		: "=&r"(tmp), "=o"(v->counter)
		: "m"(v->counter), "rKs21"(a), "rKs21"(u), "1"(result)
		: "m"(v->counter), "rKs21"(a), "rKs21"(u)
		: "cc", "memory");
		: "cc", "memory");

	return result;
}
}


/*
/*
 * atomic_add_unless - add unless the number is a given value
 * __atomic_add_unless - add unless the number is a given value
 * @v: pointer of type atomic_t
 * @v: pointer of type atomic_t
 * @a: the amount to add to v...
 * @a: the amount to add to v...
 * @u: ...unless v is equal to u.
 * @u: ...unless v is equal to u.
 *
 *
 * If the atomic value v is not equal to u, this function adds a to v,
 * Atomically adds @a to @v, so long as it was not @u.
 * and returns non zero. If v is equal to u then it returns zero. This
 * Returns the old value of @v.
 * is done as an atomic operation.
*/
*/
static inline int atomic_add_unless(atomic_t *v, int a, int u)
static inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
{
	int tmp, result;
	int tmp, old = atomic_read(v);


	if (__builtin_constant_p(a) && (a >= -1048575) && (a <= 1048576))
	if (__builtin_constant_p(a) && (a >= -1048575) && (a <= 1048576))
		result = atomic_sub_unless(v, -a, u);
		atomic_sub_unless(v, -a, u);
	else {
	else {
		result = 0;
		asm volatile(
		asm volatile(
			"/* atomic_add_unless */\n"
			"/* __atomic_add_unless */\n"
			"1:	ssrf	5\n"
			"1:	ssrf	5\n"
			"	ld.w	%0, %3\n"
			"	ld.w	%0, %2\n"
			"	cp.w	%0, %5\n"
			"	cp.w	%0, %4\n"
			"	breq	1f\n"
			"	breq	1f\n"
			"	add	%0, %4\n"
			"	add	%0, %3\n"
			"	stcond	%2, %0\n"
			"	stcond	%1, %0\n"
			"	brne	1b\n"
			"	brne	1b\n"
			"	mov	%1, 1\n"
			"1:"
			"1:"
			: "=&r"(tmp), "=&r"(result), "=o"(v->counter)
			: "=&r"(tmp), "=o"(v->counter)
			: "m"(v->counter), "r"(a), "ir"(u), "1"(result)
			: "m"(v->counter), "r"(a), "ir"(u)
			: "cc", "memory");
			: "cc", "memory");
	}
	}


	return result;
	return old;
}
}


/*
/*
+2 −2
Original line number Original line Diff line number Diff line
@@ -89,13 +89,13 @@ static inline void atomic_set_mask(int mask, atomic_t *v)
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))


#define atomic_add_unless(v, a, u)				\
#define __atomic_add_unless(v, a, u)				\
({								\
({								\
	int c, old;						\
	int c, old;						\
	c = atomic_read(v);					\
	c = atomic_read(v);					\
	while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
	while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
		c = old;					\
		c = old;					\
	c != (u);						\
	c;							\
})
})


/*
/*
+2 −2
Original line number Original line Diff line number Diff line
@@ -138,7 +138,7 @@ static inline int atomic_cmpxchg(atomic_t *v, int old, int new)


#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))


static inline int atomic_add_unless(atomic_t *v, int a, int u)
static inline int __atomic_add_unless(atomic_t *v, int a, int u)
{
{
	int ret;
	int ret;
	unsigned long flags;
	unsigned long flags;
@@ -148,7 +148,7 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u)
	if (ret != u)
	if (ret != u)
		v->counter += a;
		v->counter += a;
	cris_atomic_restore(v, flags);
	cris_atomic_restore(v, flags);
	return ret != u;
	return ret;
}
}


/* Atomic operations are already serializing */
/* Atomic operations are already serializing */
Loading