Loading arch/arm64/include/asm/spinlock.h +0 −1 Original line number Diff line number Diff line Loading @@ -130,7 +130,6 @@ static inline int arch_spin_trylock(arch_spinlock_t *lock) asm volatile(ARM64_LSE_ATOMIC_INSN( /* LL/SC */ " prfm pstl1strm, %2\n" "1: ldaxr %w0, %2\n" " eor %w1, %w0, %w0, ror #16\n" " cbnz %w1, 2f\n" Loading Loading
arch/arm64/include/asm/spinlock.h +0 −1 Original line number Diff line number Diff line Loading @@ -130,7 +130,6 @@ static inline int arch_spin_trylock(arch_spinlock_t *lock) asm volatile(ARM64_LSE_ATOMIC_INSN( /* LL/SC */ " prfm pstl1strm, %2\n" "1: ldaxr %w0, %2\n" " eor %w1, %w0, %w0, ror #16\n" " cbnz %w1, 2f\n" Loading