Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit febdbfe8 authored by Peter Zijlstra's avatar Peter Zijlstra Committed by Ingo Molnar
Browse files

arch: Prepare for smp_mb__{before,after}_atomic()



Since the smp_mb__{before,after}*() ops are fundamentally dependent on
how an arch can implement atomics it doesn't make sense to have 3
variants of them. They must all be the same.

Furthermore, the 3 variants suggest they're only valid for those 3
atomic ops, while we have many more where they could be applied.

So move away from
smp_mb__{before,after}_{atomic,clear}_{dec,inc,bit}() and reduce the
interface to just the two: smp_mb__{before,after}_atomic().

This patch prepares the way by introducing default implementations in
asm-generic/barrier.h that default to a full barrier and providing
__deprecated inlines for the previous 6 barriers if they're not
provided by the arch.

This should allow for a mostly painless transition (lots of deprecated
warns in the interim).

Signed-off-by: default avatarPeter Zijlstra <peterz@infradead.org>
Acked-by: default avatarPaul E. McKenney <paulmck@linux.vnet.ibm.com>
Link: http://lkml.kernel.org/n/tip-wr59327qdyi9mbzn6x937s4e@git.kernel.org


Cc: Arnd Bergmann <arnd@arndb.de>
Cc: "Chen, Gong" <gong.chen@linux.intel.com>
Cc: John Sullivan <jsrhbz@kanargh.force9.co.uk>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Mauro Carvalho Chehab <m.chehab@samsung.com>
Cc: Srinivas Pandruvada <srinivas.pandruvada@linux.intel.com>
Cc: "Theodore Ts'o" <tytso@mit.edu>
Cc: linux-arch@vger.kernel.org
Cc: linux-kernel@vger.kernel.org
Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent 2ab08ee9
Loading
Loading
Loading
Loading
+1 −6
Original line number Original line Diff line number Diff line
@@ -16,6 +16,7 @@
#define __ASM_GENERIC_ATOMIC_H
#define __ASM_GENERIC_ATOMIC_H


#include <asm/cmpxchg.h>
#include <asm/cmpxchg.h>
#include <asm/barrier.h>


#ifdef CONFIG_SMP
#ifdef CONFIG_SMP
/* Force people to define core atomics */
/* Force people to define core atomics */
@@ -182,11 +183,5 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
}
}
#endif
#endif


/* Assume that atomic operations are already serializing */
#define smp_mb__before_atomic_dec()	barrier()
#define smp_mb__after_atomic_dec()	barrier()
#define smp_mb__before_atomic_inc()	barrier()
#define smp_mb__after_atomic_inc()	barrier()

#endif /* __KERNEL__ */
#endif /* __KERNEL__ */
#endif /* __ASM_GENERIC_ATOMIC_H */
#endif /* __ASM_GENERIC_ATOMIC_H */
+8 −0
Original line number Original line Diff line number Diff line
@@ -62,6 +62,14 @@
#define set_mb(var, value)  do { (var) = (value); mb(); } while (0)
#define set_mb(var, value)  do { (var) = (value); mb(); } while (0)
#endif
#endif


#ifndef smp_mb__before_atomic
#define smp_mb__before_atomic()	smp_mb()
#endif

#ifndef smp_mb__after_atomic
#define smp_mb__after_atomic()	smp_mb()
#endif

#define smp_store_release(p, v)						\
#define smp_store_release(p, v)						\
do {									\
do {									\
	compiletime_assert_atomic_type(*p);				\
	compiletime_assert_atomic_type(*p);				\
+1 −8
Original line number Original line Diff line number Diff line
@@ -11,14 +11,7 @@


#include <linux/irqflags.h>
#include <linux/irqflags.h>
#include <linux/compiler.h>
#include <linux/compiler.h>

#include <asm/barrier.h>
/*
 * clear_bit may not imply a memory barrier
 */
#ifndef smp_mb__before_clear_bit
#define smp_mb__before_clear_bit()	smp_mb()
#define smp_mb__after_clear_bit()	smp_mb()
#endif


#include <asm-generic/bitops/__ffs.h>
#include <asm-generic/bitops/__ffs.h>
#include <asm-generic/bitops/ffz.h>
#include <asm-generic/bitops/ffz.h>
+36 −0
Original line number Original line Diff line number Diff line
@@ -3,6 +3,42 @@
#define _LINUX_ATOMIC_H
#define _LINUX_ATOMIC_H
#include <asm/atomic.h>
#include <asm/atomic.h>


/*
 * Provide __deprecated wrappers for the new interface, avoid flag day changes.
 * We need the ugly external functions to break header recursion hell.
 */
#ifndef smp_mb__before_atomic_inc
static inline void __deprecated smp_mb__before_atomic_inc(void)
{
	extern void __smp_mb__before_atomic(void);
	__smp_mb__before_atomic();
}
#endif

#ifndef smp_mb__after_atomic_inc
static inline void __deprecated smp_mb__after_atomic_inc(void)
{
	extern void __smp_mb__after_atomic(void);
	__smp_mb__after_atomic();
}
#endif

#ifndef smp_mb__before_atomic_dec
static inline void __deprecated smp_mb__before_atomic_dec(void)
{
	extern void __smp_mb__before_atomic(void);
	__smp_mb__before_atomic();
}
#endif

#ifndef smp_mb__after_atomic_dec
static inline void __deprecated smp_mb__after_atomic_dec(void)
{
	extern void __smp_mb__after_atomic(void);
	__smp_mb__after_atomic();
}
#endif

/**
/**
 * atomic_add_unless - add unless the number is already a given value
 * atomic_add_unless - add unless the number is already a given value
 * @v: pointer of type atomic_t
 * @v: pointer of type atomic_t
+20 −0
Original line number Original line Diff line number Diff line
@@ -32,6 +32,26 @@ extern unsigned long __sw_hweight64(__u64 w);
 */
 */
#include <asm/bitops.h>
#include <asm/bitops.h>


/*
 * Provide __deprecated wrappers for the new interface, avoid flag day changes.
 * We need the ugly external functions to break header recursion hell.
 */
#ifndef smp_mb__before_clear_bit
static inline void __deprecated smp_mb__before_clear_bit(void)
{
	extern void __smp_mb__before_atomic(void);
	__smp_mb__before_atomic();
}
#endif

#ifndef smp_mb__after_clear_bit
static inline void __deprecated smp_mb__after_clear_bit(void)
{
	extern void __smp_mb__after_atomic(void);
	__smp_mb__after_atomic();
}
#endif

#define for_each_set_bit(bit, addr, size) \
#define for_each_set_bit(bit, addr, size) \
	for ((bit) = find_first_bit((addr), (size));		\
	for ((bit) = find_first_bit((addr), (size));		\
	     (bit) < (size);					\
	     (bit) < (size);					\
Loading