Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit cb401347 authored by Linux Build Service Account's avatar Linux Build Service Account Committed by Gerrit - the friendly Code Review server
Browse files

Merge "ARM64: spinlock: Add SEV and dsb in unlock code"

parents 35b060cd 88a06607
Loading
Loading
Loading
Loading
+10 −0
Original line number Original line Diff line number Diff line
@@ -316,6 +316,16 @@ config ARM64_A57_ERRATA_832075
	  c) Following the branch instruction, there are six or more loads
	  c) Following the branch instruction, there are six or more loads
	  to device memory locations
	  to device memory locations


config ARM64_SEV_IN_LOCK_UNLOCK
	bool "Add explicit SEV in the spinlock unlock code path"
	def_bool ARCH_MSM8994
	help
	  In certain unexplained cases, the stlr alone might not wakeup
	  the processor waiting in WFE on a spinlock.
	  Add an explicity dsb and SEV in write_unlock, read_unlock
	  and spin_unlock to ensure that the core waiting on the lock
	  wakes up from WFE.

config SMP
config SMP
	bool "Symmetric Multi-Processing"
	bool "Symmetric Multi-Processing"
	help
	help
+12 −0
Original line number Original line Diff line number Diff line
@@ -86,6 +86,10 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock)
{
{
	asm volatile(
	asm volatile(
"	stlrh	%w1, %0\n"
"	stlrh	%w1, %0\n"
#ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK
"	dsb sy\n"
"	sev\n"
#endif
	: "=Q" (lock->owner)
	: "=Q" (lock->owner)
	: "r" (lock->owner + 1)
	: "r" (lock->owner + 1)
	: "memory");
	: "memory");
@@ -155,6 +159,10 @@ static inline void arch_write_unlock(arch_rwlock_t *rw)
{
{
	asm volatile(
	asm volatile(
	"	stlr	%w1, %0\n"
	"	stlr	%w1, %0\n"
#ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK
	"	dsb sy\n"
	"	sev\n"
#endif
	: "=Q" (rw->lock) : "r" (0) : "memory");
	: "=Q" (rw->lock) : "r" (0) : "memory");
}
}


@@ -198,6 +206,10 @@ static inline void arch_read_unlock(arch_rwlock_t *rw)
	"1:	ldxr	%w0, %2\n"
	"1:	ldxr	%w0, %2\n"
	"	sub	%w0, %w0, #1\n"
	"	sub	%w0, %w0, #1\n"
	"	stlxr	%w1, %w0, %2\n"
	"	stlxr	%w1, %w0, %2\n"
#ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK
	"	dsb sy\n"
	"	sev\n"
#endif
	"	cbnz	%w1, 1b\n"
	"	cbnz	%w1, 1b\n"
	: "=&r" (tmp), "=&r" (tmp2), "+Q" (rw->lock)
	: "=&r" (tmp), "=&r" (tmp2), "+Q" (rw->lock)
	:
	: