Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 26760fc1 authored by Boqun Feng's avatar Boqun Feng Committed by Michael Ellerman
Browse files

powerpc: atomic: Implement acquire/release/relaxed variants for xchg



Implement xchg{,64}_relaxed and atomic{,64}_xchg_relaxed, based on these
_relaxed variants, release/acquire variants and fully ordered versions
can be built.

Note that xchg{,64}_relaxed and atomic_{,64}_xchg_relaxed are not
compiler barriers.

Signed-off-by: default avatarBoqun Feng <boqun.feng@gmail.com>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
parent dc53617c
Loading
Loading
Loading
Loading
+2 −0
Original line number Original line Diff line number Diff line
@@ -177,6 +177,7 @@ static __inline__ int atomic_dec_return_relaxed(atomic_t *v)


#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new))


/**
/**
 * __atomic_add_unless - add unless the number is a given value
 * __atomic_add_unless - add unless the number is a given value
@@ -444,6 +445,7 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v)


#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
#define atomic64_xchg_relaxed(v, new) xchg_relaxed(&((v)->counter), (new))


/**
/**
 * atomic64_add_unless - add unless the number is a given value
 * atomic64_add_unless - add unless the number is a given value
+30 −39
Original line number Original line Diff line number Diff line
@@ -9,21 +9,20 @@
/*
/*
 * Atomic exchange
 * Atomic exchange
 *
 *
 * Changes the memory location '*ptr' to be val and returns
 * Changes the memory location '*p' to be val and returns
 * the previous value stored there.
 * the previous value stored there.
 */
 */

static __always_inline unsigned long
static __always_inline unsigned long
__xchg_u32(volatile void *p, unsigned long val)
__xchg_u32_local(volatile void *p, unsigned long val)
{
{
	unsigned long prev;
	unsigned long prev;


	__asm__ __volatile__(
	__asm__ __volatile__(
	PPC_ATOMIC_ENTRY_BARRIER
"1:	lwarx	%0,0,%2 \n"
"1:	lwarx	%0,0,%2 \n"
	PPC405_ERR77(0,%2)
	PPC405_ERR77(0,%2)
"	stwcx.	%3,0,%2 \n\
"	stwcx.	%3,0,%2 \n\
	bne-	1b"
	bne-	1b"
	PPC_ATOMIC_EXIT_BARRIER
	: "=&r" (prev), "+m" (*(volatile unsigned int *)p)
	: "=&r" (prev), "+m" (*(volatile unsigned int *)p)
	: "r" (p), "r" (val)
	: "r" (p), "r" (val)
	: "cc", "memory");
	: "cc", "memory");
@@ -31,42 +30,34 @@ __xchg_u32(volatile void *p, unsigned long val)
	return prev;
	return prev;
}
}


/*
 * Atomic exchange
 *
 * Changes the memory location '*ptr' to be val and returns
 * the previous value stored there.
 */
static __always_inline unsigned long
static __always_inline unsigned long
__xchg_u32_local(volatile void *p, unsigned long val)
__xchg_u32_relaxed(u32 *p, unsigned long val)
{
{
	unsigned long prev;
	unsigned long prev;


	__asm__ __volatile__(
	__asm__ __volatile__(
"1:	lwarx	%0,0,%2\n"
"1:	lwarx	%0,0,%2\n"
	PPC405_ERR77(0, %2)
	PPC405_ERR77(0, %2)
"	stwcx.	%3,0,%2 \n\
"	stwcx.	%3,0,%2\n"
	bne-	1b"
"	bne-	1b"
	: "=&r" (prev), "+m" (*(volatile unsigned int *)p)
	: "=&r" (prev), "+m" (*p)
	: "r" (p), "r" (val)
	: "r" (p), "r" (val)
	: "cc", "memory");
	: "cc");


	return prev;
	return prev;
}
}


#ifdef CONFIG_PPC64
#ifdef CONFIG_PPC64
static __always_inline unsigned long
static __always_inline unsigned long
__xchg_u64(volatile void *p, unsigned long val)
__xchg_u64_local(volatile void *p, unsigned long val)
{
{
	unsigned long prev;
	unsigned long prev;


	__asm__ __volatile__(
	__asm__ __volatile__(
	PPC_ATOMIC_ENTRY_BARRIER
"1:	ldarx	%0,0,%2 \n"
"1:	ldarx	%0,0,%2 \n"
	PPC405_ERR77(0,%2)
	PPC405_ERR77(0,%2)
"	stdcx.	%3,0,%2 \n\
"	stdcx.	%3,0,%2 \n\
	bne-	1b"
	bne-	1b"
	PPC_ATOMIC_EXIT_BARRIER
	: "=&r" (prev), "+m" (*(volatile unsigned long *)p)
	: "=&r" (prev), "+m" (*(volatile unsigned long *)p)
	: "r" (p), "r" (val)
	: "r" (p), "r" (val)
	: "cc", "memory");
	: "cc", "memory");
@@ -75,18 +66,18 @@ __xchg_u64(volatile void *p, unsigned long val)
}
}


static __always_inline unsigned long
static __always_inline unsigned long
__xchg_u64_local(volatile void *p, unsigned long val)
__xchg_u64_relaxed(u64 *p, unsigned long val)
{
{
	unsigned long prev;
	unsigned long prev;


	__asm__ __volatile__(
	__asm__ __volatile__(
"1:	ldarx	%0,0,%2\n"
"1:	ldarx	%0,0,%2\n"
	PPC405_ERR77(0, %2)
	PPC405_ERR77(0, %2)
"	stdcx.	%3,0,%2 \n\
"	stdcx.	%3,0,%2\n"
	bne-	1b"
"	bne-	1b"
	: "=&r" (prev), "+m" (*(volatile unsigned long *)p)
	: "=&r" (prev), "+m" (*p)
	: "r" (p), "r" (val)
	: "r" (p), "r" (val)
	: "cc", "memory");
	: "cc");


	return prev;
	return prev;
}
}
@@ -99,14 +90,14 @@ __xchg_u64_local(volatile void *p, unsigned long val)
extern void __xchg_called_with_bad_pointer(void);
extern void __xchg_called_with_bad_pointer(void);


static __always_inline unsigned long
static __always_inline unsigned long
__xchg(volatile void *ptr, unsigned long x, unsigned int size)
__xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
{
{
	switch (size) {
	switch (size) {
	case 4:
	case 4:
		return __xchg_u32(ptr, x);
		return __xchg_u32_local(ptr, x);
#ifdef CONFIG_PPC64
#ifdef CONFIG_PPC64
	case 8:
	case 8:
		return __xchg_u64(ptr, x);
		return __xchg_u64_local(ptr, x);
#endif
#endif
	}
	}
	__xchg_called_with_bad_pointer();
	__xchg_called_with_bad_pointer();
@@ -114,32 +105,32 @@ __xchg(volatile void *ptr, unsigned long x, unsigned int size)
}
}


static __always_inline unsigned long
static __always_inline unsigned long
__xchg_local(volatile void *ptr, unsigned long x, unsigned int size)
__xchg_relaxed(void *ptr, unsigned long x, unsigned int size)
{
{
	switch (size) {
	switch (size) {
	case 4:
	case 4:
		return __xchg_u32_local(ptr, x);
		return __xchg_u32_relaxed(ptr, x);
#ifdef CONFIG_PPC64
#ifdef CONFIG_PPC64
	case 8:
	case 8:
		return __xchg_u64_local(ptr, x);
		return __xchg_u64_relaxed(ptr, x);
#endif
#endif
	}
	}
	__xchg_called_with_bad_pointer();
	__xchg_called_with_bad_pointer();
	return x;
	return x;
}
}
#define xchg(ptr,x)							     \
#define xchg_local(ptr,x)						     \
  ({									     \
  ({									     \
     __typeof__(*(ptr)) _x_ = (x);					     \
     __typeof__(*(ptr)) _x_ = (x);					     \
     (__typeof__(*(ptr))) __xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
     (__typeof__(*(ptr))) __xchg_local((ptr),				     \
     		(unsigned long)_x_, sizeof(*(ptr))); 			     \
  })
  })


#define xchg_local(ptr,x)						     \
#define xchg_relaxed(ptr, x)						\
({									\
({									\
	__typeof__(*(ptr)) _x_ = (x);					\
	__typeof__(*(ptr)) _x_ = (x);					\
     (__typeof__(*(ptr))) __xchg_local((ptr),				     \
	(__typeof__(*(ptr))) __xchg_relaxed((ptr),			\
			(unsigned long)_x_, sizeof(*(ptr)));		\
			(unsigned long)_x_, sizeof(*(ptr)));		\
})
})

/*
/*
 * Compare and exchange - if *p == old, set it to new,
 * Compare and exchange - if *p == old, set it to new,
 * and return the old value of *p.
 * and return the old value of *p.