Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit d7aab291 authored by Trilok Soni's avatar Trilok Soni
Browse files

Revert "ARM64: Insert barriers before Store-Release operations"



This workaround is no longer needed for latest 8996 revisions
so revert 49c07968 ("ARM64: Insert barriers before Store-Release operations")
now.

CRs-Fixed: 960849
Change-Id: I40c5ec9af60e7d5b3355dd3b80f6e9854d16e4c0
Signed-off-by: default avatarTrilok Soni <tsoni@codeaurora.org>
parent 0df0dc1c
Loading
Loading
Loading
Loading
+0 −11
Original line number Diff line number Diff line
@@ -529,17 +529,6 @@ config CPU_BIG_ENDIAN
       help
         Say Y if you plan on running a kernel in big-endian mode.

config ARM64_STLR_NEEDS_BARRIER
	bool "Store-Release operations require explicit barriers"
	def_bool ARCH_MSM8996
	help
	  Some early samples of MSMTHULIUM SoCs require that an explicit barrier
	  be executed prior to any Store-Release operation (STLR) to comform to
	  ARM memory ordering requirements. If you are building the kernel to
	  work on one of these early designs, select 'Y' here.

	  For production kernels, you should say 'N' here.

config SMP
	bool "Symmetric Multi-Processing"
	help
+0 −19
Original line number Diff line number Diff line
@@ -58,24 +58,6 @@ do { \
#define smp_rmb()	dmb(ishld)
#define smp_wmb()	dmb(ishst)

#ifdef CONFIG_ARM64_STLR_NEEDS_BARRIER
#define smp_store_release(p, v)						\
do {									\
	compiletime_assert_atomic_type(*p);				\
	switch (sizeof(*p)) {						\
	case 4:								\
		asm volatile ("dmb nsh\n"				\
			      "stlr %w1, %0"				\
				: "=Q" (*p) : "r" (v) : "memory");	\
		break;							\
	case 8:								\
		asm volatile ("dmb nsh\n"				\
			      "stlr %1, %0"				\
				: "=Q" (*p) : "r" (v) : "memory");	\
		break;							\
	}								\
} while (0)
#else
#define smp_store_release(p, v)						\
do {									\
	compiletime_assert_atomic_type(*p);				\
@@ -90,7 +72,6 @@ do { \
		break;							\
	}								\
} while (0)
#endif

#define smp_load_acquire(p)						\
({									\
+0 −6
Original line number Diff line number Diff line
@@ -85,9 +85,6 @@ static inline int arch_spin_trylock(arch_spinlock_t *lock)
static inline void arch_spin_unlock(arch_spinlock_t *lock)
{
	asm volatile(
#ifdef CONFIG_ARM64_STLR_NEEDS_BARRIER
"	dmb nsh\n"
#endif
"	stlrh	%w1, %0\n"
	: "=Q" (lock->owner)
	: "r" (lock->owner + 1)
@@ -156,9 +153,6 @@ static inline int arch_write_trylock(arch_rwlock_t *rw)
static inline void arch_write_unlock(arch_rwlock_t *rw)
{
	asm volatile(
#ifdef CONFIG_ARM64_STLR_NEEDS_BARRIER
"	dmb nsh\n"
#endif
	"	stlr	%w1, %0\n"
	: "=Q" (rw->lock) : "r" (0) : "memory");
}