Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 1638fb72 authored by Michael S. Tsirkin's avatar Michael S. Tsirkin
Browse files

x86: define __smp_xxx



This defines __smp_xxx barriers for x86,
for use by virtualization.

smp_xxx barriers are removed as they are
defined correctly by asm-generic/barriers.h

Signed-off-by: default avatarMichael S. Tsirkin <mst@redhat.com>
Acked-by: default avatarArnd Bergmann <arnd@arndb.de>
Acked-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: default avatarThomas Gleixner <tglx@linutronix.de>
parent 1ce79091
Loading
Loading
Loading
Loading
+12 −19
Original line number Original line Diff line number Diff line
@@ -31,17 +31,10 @@
#endif
#endif
#define dma_wmb()	barrier()
#define dma_wmb()	barrier()


#ifdef CONFIG_SMP
#define __smp_mb()	mb()
#define smp_mb()	mb()
#define __smp_rmb()	dma_rmb()
#define smp_rmb()	dma_rmb()
#define __smp_wmb()	barrier()
#define smp_wmb()	barrier()
#define __smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
#define smp_store_mb(var, value) do { (void)xchg(&var, value); } while (0)
#else /* !SMP */
#define smp_mb()	barrier()
#define smp_rmb()	barrier()
#define smp_wmb()	barrier()
#define smp_store_mb(var, value) do { WRITE_ONCE(var, value); barrier(); } while (0)
#endif /* SMP */


#if defined(CONFIG_X86_PPRO_FENCE)
#if defined(CONFIG_X86_PPRO_FENCE)


@@ -50,31 +43,31 @@
 * model and we should fall back to full barriers.
 * model and we should fall back to full barriers.
 */
 */


#define smp_store_release(p, v)						\
#define __smp_store_release(p, v)					\
do {									\
do {									\
	compiletime_assert_atomic_type(*p);				\
	compiletime_assert_atomic_type(*p);				\
	smp_mb();							\
	__smp_mb();							\
	WRITE_ONCE(*p, v);						\
	WRITE_ONCE(*p, v);						\
} while (0)
} while (0)


#define smp_load_acquire(p)						\
#define __smp_load_acquire(p)						\
({									\
({									\
	typeof(*p) ___p1 = READ_ONCE(*p);				\
	typeof(*p) ___p1 = READ_ONCE(*p);				\
	compiletime_assert_atomic_type(*p);				\
	compiletime_assert_atomic_type(*p);				\
	smp_mb();							\
	__smp_mb();							\
	___p1;								\
	___p1;								\
})
})


#else /* regular x86 TSO memory ordering */
#else /* regular x86 TSO memory ordering */


#define smp_store_release(p, v)						\
#define __smp_store_release(p, v)					\
do {									\
do {									\
	compiletime_assert_atomic_type(*p);				\
	compiletime_assert_atomic_type(*p);				\
	barrier();							\
	barrier();							\
	WRITE_ONCE(*p, v);						\
	WRITE_ONCE(*p, v);						\
} while (0)
} while (0)


#define smp_load_acquire(p)						\
#define __smp_load_acquire(p)						\
({									\
({									\
	typeof(*p) ___p1 = READ_ONCE(*p);				\
	typeof(*p) ___p1 = READ_ONCE(*p);				\
	compiletime_assert_atomic_type(*p);				\
	compiletime_assert_atomic_type(*p);				\
@@ -85,8 +78,8 @@ do { \
#endif
#endif


/* Atomic operations are already serializing on x86 */
/* Atomic operations are already serializing on x86 */
#define smp_mb__before_atomic()	barrier()
#define __smp_mb__before_atomic()	barrier()
#define smp_mb__after_atomic()	barrier()
#define __smp_mb__after_atomic()	barrier()


#include <asm-generic/barrier.h>
#include <asm-generic/barrier.h>