Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 3c12cecc authored by Linux Build Service Account's avatar Linux Build Service Account Committed by Gerrit - the friendly Code Review server
Browse files

Merge "arm64: percpu: Make this_cpu accessors pre-empt safe"

parents 339f9353 344808e8
Loading
Loading
Loading
Loading
+2 −0
Original line number Diff line number Diff line
@@ -35,6 +35,7 @@ config ARM64
	select GENERIC_TIME_VSYSCALL
	select HANDLE_DOMAIN_IRQ
	select HARDIRQS_SW_RESEND
	select HAVE_ALIGNED_STRUCT_PAGE if SLUB
	select HAVE_ARCH_AUDITSYSCALL
	select HAVE_ARCH_JUMP_LABEL
	select HAVE_ARCH_KGDB
@@ -43,6 +44,7 @@ config ARM64
	select HAVE_BPF_JIT
	select HAVE_C_RECORDMCOUNT
	select HAVE_CC_STACKPROTECTOR
	select HAVE_CMPXCHG_DOUBLE
	select HAVE_DEBUG_BUGVERBOSE
	select HAVE_DEBUG_KMEMLEAK
	select HAVE_DMA_API_DEBUG
+89 −0
Original line number Diff line number Diff line
@@ -19,6 +19,7 @@
#define __ASM_CMPXCHG_H

#include <linux/bug.h>
#include <linux/mmdebug.h>

#include <asm/barrier.h>

@@ -152,6 +153,51 @@ static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
	return oldval;
}

#define system_has_cmpxchg_double()     1

static inline int __cmpxchg_double(volatile void *ptr1, volatile void *ptr2,
		unsigned long old1, unsigned long old2,
		unsigned long new1, unsigned long new2, int size)
{
	unsigned long loop, lost;

	switch (size) {
	case 8:
		VM_BUG_ON((unsigned long *)ptr2 - (unsigned long *)ptr1 != 1);
		do {
			asm volatile("// __cmpxchg_double8\n"
			"	ldxp	%0, %1, %2\n"
			"	eor	%0, %0, %3\n"
			"	eor	%1, %1, %4\n"
			"	orr	%1, %0, %1\n"
			"	mov	%w0, #0\n"
			"	cbnz	%1, 1f\n"
			"	stxp	%w0, %5, %6, %2\n"
			"1:\n"
				: "=&r"(loop), "=&r"(lost), "+Q" (*(u64 *)ptr1)
				: "r" (old1), "r"(old2), "r"(new1), "r"(new2));
		} while (loop);
		break;
	default:
		BUILD_BUG();
	}

	return !lost;
}

static inline int __cmpxchg_double_mb(volatile void *ptr1, volatile void *ptr2,
			unsigned long old1, unsigned long old2,
			unsigned long new1, unsigned long new2, int size)
{
	int ret;

	smp_mb();
	ret = __cmpxchg_double(ptr1, ptr2, old1, old2, new1, new2, size);
	smp_mb();

	return ret;
}

static inline unsigned long __cmpxchg_mb(volatile void *ptr, unsigned long old,
					 unsigned long new, int size)
{
@@ -182,6 +228,49 @@ static inline unsigned long __cmpxchg_mb(volatile void *ptr, unsigned long old,
	__ret; \
})

#define cmpxchg_double(ptr1, ptr2, o1, o2, n1, n2) \
({\
	int __ret;\
	__ret = __cmpxchg_double_mb((ptr1), (ptr2), (unsigned long)(o1), \
			(unsigned long)(o2), (unsigned long)(n1), \
			(unsigned long)(n2), sizeof(*(ptr1)));\
	__ret; \
})

#define cmpxchg_double_local(ptr1, ptr2, o1, o2, n1, n2) \
({\
	int __ret;\
	__ret = __cmpxchg_double((ptr1), (ptr2), (unsigned long)(o1), \
			(unsigned long)(o2), (unsigned long)(n1), \
			(unsigned long)(n2), sizeof(*(ptr1)));\
	__ret; \
})

#define _protect_cmpxchg_local(pcp, o, n)			\
({								\
	typeof(*raw_cpu_ptr(&(pcp))) __ret;			\
	preempt_disable();					\
	__ret = cmpxchg_local(raw_cpu_ptr(&(pcp)), o, n);	\
	preempt_enable();					\
	__ret;							\
})

#define this_cpu_cmpxchg_1(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
#define this_cpu_cmpxchg_2(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
#define this_cpu_cmpxchg_4(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)
#define this_cpu_cmpxchg_8(ptr, o, n) _protect_cmpxchg_local(ptr, o, n)

#define this_cpu_cmpxchg_double_8(ptr1, ptr2, o1, o2, n1, n2)		\
({									\
	int __ret;							\
	preempt_disable();						\
	__ret = cmpxchg_double_local(	raw_cpu_ptr(&(ptr1)),		\
					raw_cpu_ptr(&(ptr2)),		\
					o1, o2, n1, n2);		\
	preempt_enable();						\
	__ret;								\
})

#define cmpxchg64(ptr,o,n)		cmpxchg((ptr),(o),(n))
#define cmpxchg64_local(ptr,o,n)	cmpxchg_local((ptr),(o),(n))

+237 −0
Original line number Diff line number Diff line
@@ -44,6 +44,243 @@ static inline unsigned long __my_cpu_offset(void)

#endif /* CONFIG_SMP */

#define PERCPU_OP(op, asm_op)						\
static inline unsigned long __percpu_##op(void *ptr,			\
			unsigned long val, int size)			\
{									\
	unsigned long loop, ret;					\
									\
	switch (size) {							\
	case 1:								\
		do {							\
			asm ("//__per_cpu_" #op "_1\n"			\
			"ldxrb	  %w[ret], %[ptr]\n"			\
			#asm_op " %w[ret], %w[ret], %w[val]\n"		\
			"stxrb	  %w[loop], %w[ret], %[ptr]\n"		\
			: [loop] "=&r" (loop), [ret] "=&r" (ret),	\
			  [ptr] "+Q"(*(u8 *)ptr)			\
			: [val] "Ir" (val));				\
		} while (loop);						\
		break;							\
	case 2:								\
		do {							\
			asm ("//__per_cpu_" #op "_2\n"			\
			"ldxrh	  %w[ret], %[ptr]\n"			\
			#asm_op " %w[ret], %w[ret], %w[val]\n"		\
			"stxrh	  %w[loop], %w[ret], %[ptr]\n"		\
			: [loop] "=&r" (loop), [ret] "=&r" (ret),	\
			  [ptr]  "+Q"(*(u16 *)ptr)			\
			: [val] "Ir" (val));				\
		} while (loop);						\
		break;							\
	case 4:								\
		do {							\
			asm ("//__per_cpu_" #op "_4\n"			\
			"ldxr	  %w[ret], %[ptr]\n"			\
			#asm_op " %w[ret], %w[ret], %w[val]\n"		\
			"stxr	  %w[loop], %w[ret], %[ptr]\n"		\
			: [loop] "=&r" (loop), [ret] "=&r" (ret),	\
			  [ptr] "+Q"(*(u32 *)ptr)			\
			: [val] "Ir" (val));				\
		} while (loop);						\
		break;							\
	case 8:								\
		do {							\
			asm ("//__per_cpu_" #op "_8\n"			\
			"ldxr	  %[ret], %[ptr]\n"			\
			#asm_op " %[ret], %[ret], %[val]\n"		\
			"stxr	  %w[loop], %[ret], %[ptr]\n"		\
			: [loop] "=&r" (loop), [ret] "=&r" (ret),	\
			  [ptr] "+Q"(*(u64 *)ptr)			\
			: [val] "Ir" (val));				\
		} while (loop);						\
		break;							\
	default:							\
		BUILD_BUG();						\
	}								\
									\
	return ret;							\
}

PERCPU_OP(add, add)
PERCPU_OP(and, and)
PERCPU_OP(or, orr)
#undef PERCPU_OP

static inline unsigned long __percpu_read(void *ptr, int size)
{
	unsigned long ret;

	switch (size) {
	case 1:
		ret = ACCESS_ONCE(*(u8 *)ptr);
		break;
	case 2:
		ret = ACCESS_ONCE(*(u16 *)ptr);
		break;
	case 4:
		ret = ACCESS_ONCE(*(u32 *)ptr);
		break;
	case 8:
		ret = ACCESS_ONCE(*(u64 *)ptr);
		break;
	default:
		BUILD_BUG();
	}

	return ret;
}

static inline void __percpu_write(void *ptr, unsigned long val, int size)
{
	switch (size) {
	case 1:
		ACCESS_ONCE(*(u8 *)ptr) = (u8)val;
		break;
	case 2:
		ACCESS_ONCE(*(u16 *)ptr) = (u16)val;
		break;
	case 4:
		ACCESS_ONCE(*(u32 *)ptr) = (u32)val;
		break;
	case 8:
		ACCESS_ONCE(*(u64 *)ptr) = (u64)val;
		break;
	default:
		BUILD_BUG();
	}
}

static inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
						int size)
{
	unsigned long ret, loop;

	switch (size) {
	case 1:
		do {
			asm ("//__percpu_xchg_1\n"
			"ldxrb %w[ret], %[ptr]\n"
			"stxrb %w[loop], %w[val], %[ptr]\n"
			: [loop] "=&r"(loop), [ret] "=&r"(ret),
			  [ptr] "+Q"(*(u8 *)ptr)
			: [val] "r" (val));
		} while (loop);
		break;
	case 2:
		do {
			asm ("//__percpu_xchg_2\n"
			"ldxrh %w[ret], %[ptr]\n"
			"stxrh %w[loop], %w[val], %[ptr]\n"
			: [loop] "=&r"(loop), [ret] "=&r"(ret),
			  [ptr] "+Q"(*(u16 *)ptr)
			: [val] "r" (val));
		} while (loop);
		break;
	case 4:
		do {
			asm ("//__percpu_xchg_4\n"
			"ldxr %w[ret], %[ptr]\n"
			"stxr %w[loop], %w[val], %[ptr]\n"
			: [loop] "=&r"(loop), [ret] "=&r"(ret),
			  [ptr] "+Q"(*(u32 *)ptr)
			: [val] "r" (val));
		} while (loop);
		break;
	case 8:
		do {
			asm ("//__percpu_xchg_8\n"
			"ldxr %[ret], %[ptr]\n"
			"stxr %w[loop], %[val], %[ptr]\n"
			: [loop] "=&r"(loop), [ret] "=&r"(ret),
			  [ptr] "+Q"(*(u64 *)ptr)
			: [val] "r" (val));
		} while (loop);
		break;
	default:
		BUILD_BUG();
	}

	return ret;
}

#define _percpu_read(pcp)						\
({									\
	typeof(pcp) __retval;						\
	preempt_disable();						\
	__retval = (typeof(pcp))__percpu_read(raw_cpu_ptr(&(pcp)), 	\
					      sizeof(pcp));		\
	preempt_enable();						\
	__retval;							\
})

#define _percpu_write(pcp, val)						\
do {									\
	preempt_disable();						\
	__percpu_write(raw_cpu_ptr(&(pcp)), (unsigned long)(val), 	\
				sizeof(pcp));				\
	preempt_enable();						\
} while(0)								\

#define _pcp_protect(operation, pcp, val)			\
({								\
	typeof(pcp) __retval;					\
	preempt_disable();					\
	__retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)),	\
					  (val), sizeof(pcp));	\
	preempt_enable();					\
	__retval;						\
})

#define _percpu_add(pcp, val) \
	_pcp_protect(__percpu_add, pcp, val)

#define _percpu_add_return(pcp, val) _percpu_add(pcp, val)

#define _percpu_and(pcp, val) \
	_pcp_protect(__percpu_and, pcp, val)

#define _percpu_or(pcp, val) \
	_pcp_protect(__percpu_or, pcp, val)

#define _percpu_xchg(pcp, val) (typeof(pcp)) \
	_pcp_protect(__percpu_xchg, pcp, (unsigned long)(val))

#define this_cpu_add_1(pcp, val) _percpu_add(pcp, val)
#define this_cpu_add_2(pcp, val) _percpu_add(pcp, val)
#define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
#define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)

#define this_cpu_add_return_1(pcp, val) _percpu_add_return(pcp, val)
#define this_cpu_add_return_2(pcp, val) _percpu_add_return(pcp, val)
#define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
#define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)

#define this_cpu_and_1(pcp, val) _percpu_and(pcp, val)
#define this_cpu_and_2(pcp, val) _percpu_and(pcp, val)
#define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
#define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)

#define this_cpu_or_1(pcp, val) _percpu_or(pcp, val)
#define this_cpu_or_2(pcp, val) _percpu_or(pcp, val)
#define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
#define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)

#define this_cpu_read_1(pcp) _percpu_read(pcp)
#define this_cpu_read_2(pcp) _percpu_read(pcp)
#define this_cpu_read_4(pcp) _percpu_read(pcp)
#define this_cpu_read_8(pcp) _percpu_read(pcp)

#define this_cpu_write_1(pcp, val) _percpu_write(pcp, val)
#define this_cpu_write_2(pcp, val) _percpu_write(pcp, val)
#define this_cpu_write_4(pcp, val) _percpu_write(pcp, val)
#define this_cpu_write_8(pcp, val) _percpu_write(pcp, val)

#define this_cpu_xchg_1(pcp, val) _percpu_xchg(pcp, val)
#define this_cpu_xchg_2(pcp, val) _percpu_xchg(pcp, val)
#define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
#define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)

#include <asm-generic/percpu.h>

#endif /* __ASM_PERCPU_H */