Loading arch/arm64/Kconfig +9 −0 Original line number Diff line number Diff line Loading @@ -529,6 +529,15 @@ config CPU_BIG_ENDIAN help Say Y if you plan on running a kernel in big-endian mode. config ARM64_SEV_IN_LOCK_UNLOCK bool "Add explicit SEV in the spinlock unlock code path" help In certain unexplained cases, the stlr alone might not wakeup the processor waiting in WFE on a spinlock. Add an explicity dsb and SEV in write_unlock, read_unlock and spin_unlock to ensure that the core waiting on the lock wakes up from WFE. config SMP bool "Symmetric Multi-Processing" help Loading arch/arm64/include/asm/spinlock.h +12 −0 Original line number Diff line number Diff line Loading @@ -86,6 +86,10 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock) { asm volatile( " stlrh %w1, %0\n" #ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK " dsb ishst\n" " sev\n" #endif : "=Q" (lock->owner) : "r" (lock->owner + 1) : "memory"); Loading Loading @@ -154,6 +158,10 @@ static inline void arch_write_unlock(arch_rwlock_t *rw) { asm volatile( " stlr %w1, %0\n" #ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK " dsb ishst\n" " sev\n" #endif : "=Q" (rw->lock) : "r" (0) : "memory"); } Loading Loading @@ -197,6 +205,10 @@ static inline void arch_read_unlock(arch_rwlock_t *rw) "1: ldxr %w0, %2\n" " sub %w0, %w0, #1\n" " stlxr %w1, %w0, %2\n" #ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK " dsb ishst\n" " sev\n" #endif " cbnz %w1, 1b\n" : "=&r" (tmp), "=&r" (tmp2), "+Q" (rw->lock) : Loading Loading
arch/arm64/Kconfig +9 −0 Original line number Diff line number Diff line Loading @@ -529,6 +529,15 @@ config CPU_BIG_ENDIAN help Say Y if you plan on running a kernel in big-endian mode. config ARM64_SEV_IN_LOCK_UNLOCK bool "Add explicit SEV in the spinlock unlock code path" help In certain unexplained cases, the stlr alone might not wakeup the processor waiting in WFE on a spinlock. Add an explicity dsb and SEV in write_unlock, read_unlock and spin_unlock to ensure that the core waiting on the lock wakes up from WFE. config SMP bool "Symmetric Multi-Processing" help Loading
arch/arm64/include/asm/spinlock.h +12 −0 Original line number Diff line number Diff line Loading @@ -86,6 +86,10 @@ static inline void arch_spin_unlock(arch_spinlock_t *lock) { asm volatile( " stlrh %w1, %0\n" #ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK " dsb ishst\n" " sev\n" #endif : "=Q" (lock->owner) : "r" (lock->owner + 1) : "memory"); Loading Loading @@ -154,6 +158,10 @@ static inline void arch_write_unlock(arch_rwlock_t *rw) { asm volatile( " stlr %w1, %0\n" #ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK " dsb ishst\n" " sev\n" #endif : "=Q" (rw->lock) : "r" (0) : "memory"); } Loading Loading @@ -197,6 +205,10 @@ static inline void arch_read_unlock(arch_rwlock_t *rw) "1: ldxr %w0, %2\n" " sub %w0, %w0, #1\n" " stlxr %w1, %w0, %2\n" #ifdef CONFIG_ARM64_SEV_IN_LOCK_UNLOCK " dsb ishst\n" " sev\n" #endif " cbnz %w1, 1b\n" : "=&r" (tmp), "=&r" (tmp2), "+Q" (rw->lock) : Loading