Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 46d075be authored by Nick Piggin's avatar Nick Piggin Committed by Paul Mackerras
Browse files

powerpc: Optimise smp_wmb



Change 2d1b2027 ("powerpc: Fixup
lwsync at runtime") removed __SUBARCH_HAS_LWSYNC, causing smp_wmb to
revert back to eieio for all CPUs.  This restores the behaviour
intorduced in 74f06095 ("powerpc:
Optimise smp_wmb on 64-bit processors").

Signed-off-by: default avatarNick Piggin <npiggin@suse.de>
Signed-off-by: default avatarPaul Mackerras <paulus@samba.org>
parent a4e22f02
Loading
Loading
Loading
Loading
+4 −0
Original line number Original line Diff line number Diff line
@@ -5,6 +5,10 @@
#include <linux/stringify.h>
#include <linux/stringify.h>
#include <asm/feature-fixups.h>
#include <asm/feature-fixups.h>


#if defined(__powerpc64__) || defined(CONFIG_PPC_E500MC)
#define __SUBARCH_HAS_LWSYNC
#endif

#ifndef __ASSEMBLY__
#ifndef __ASSEMBLY__
extern unsigned int __start___lwsync_fixup, __stop___lwsync_fixup;
extern unsigned int __start___lwsync_fixup, __stop___lwsync_fixup;
extern void do_lwsync_fixups(unsigned long value, void *fixup_start,
extern void do_lwsync_fixups(unsigned long value, void *fixup_start,
+2 −2
Original line number Original line Diff line number Diff line
@@ -45,14 +45,14 @@
#ifdef CONFIG_SMP
#ifdef CONFIG_SMP


#ifdef __SUBARCH_HAS_LWSYNC
#ifdef __SUBARCH_HAS_LWSYNC
#    define SMPWMB      lwsync
#    define SMPWMB      LWSYNC
#else
#else
#    define SMPWMB      eieio
#    define SMPWMB      eieio
#endif
#endif


#define smp_mb()	mb()
#define smp_mb()	mb()
#define smp_rmb()	rmb()
#define smp_rmb()	rmb()
#define smp_wmb()	__asm__ __volatile__ (__stringify(SMPWMB) : : :"memory")
#define smp_wmb()	__asm__ __volatile__ (stringify_in_c(SMPWMB) : : :"memory")
#define smp_read_barrier_depends()	read_barrier_depends()
#define smp_read_barrier_depends()	read_barrier_depends()
#else
#else
#define smp_mb()	barrier()
#define smp_mb()	barrier()