Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 2929ad29 authored by Alexei Starovoitov's avatar Alexei Starovoitov
Browse files

Merge branch 'improve_perf_barriers'



Daniel Borkmann says:

====================
This set first adds smp_* barrier variants to tools infrastructure
and updates perf and libbpf to make use of them. For details, please
see individual patches, thanks!

Arnaldo, if there are no objections, could this be routed via bpf-next
with Acked-by's due to later dependencies in libbpf? Alternatively,
I could also get the 2nd patch out during merge window, but perhaps
it's okay to do in one go as there shouldn't be much conflict in perf
itself.

Thanks!

v1 -> v2:
  - add common helper and switch to acquire/release variants
    when possible, thanks Peter!
====================

Signed-off-by: default avatarAlexei Starovoitov <ast@kernel.org>
parents 78de3546 a64af0ef
Loading
Loading
Loading
Loading
+70 −0
Original line number Diff line number Diff line
@@ -14,4 +14,74 @@
#define wmb()		asm volatile("dmb ishst" ::: "memory")
#define rmb()		asm volatile("dmb ishld" ::: "memory")

#define smp_store_release(p, v)					\
do {								\
	union { typeof(*p) __val; char __c[1]; } __u =		\
		{ .__val = (__force typeof(*p)) (v) }; 		\
								\
	switch (sizeof(*p)) {					\
	case 1:							\
		asm volatile ("stlrb %w1, %0"			\
				: "=Q" (*p)			\
				: "r" (*(__u8 *)__u.__c)	\
				: "memory");			\
		break;						\
	case 2:							\
		asm volatile ("stlrh %w1, %0"			\
				: "=Q" (*p)			\
				: "r" (*(__u16 *)__u.__c)	\
				: "memory");			\
		break;						\
	case 4:							\
		asm volatile ("stlr %w1, %0"			\
				: "=Q" (*p)			\
				: "r" (*(__u32 *)__u.__c)	\
				: "memory");			\
		break;						\
	case 8:							\
		asm volatile ("stlr %1, %0"			\
				: "=Q" (*p)			\
				: "r" (*(__u64 *)__u.__c)	\
				: "memory");			\
		break;						\
	default:						\
		/* Only to shut up gcc ... */			\
		mb();						\
		break;						\
	}							\
} while (0)

#define smp_load_acquire(p)					\
({								\
	union { typeof(*p) __val; char __c[1]; } __u;		\
								\
	switch (sizeof(*p)) {					\
	case 1:							\
		asm volatile ("ldarb %w0, %1"			\
			: "=r" (*(__u8 *)__u.__c)		\
			: "Q" (*p) : "memory");			\
		break;						\
	case 2:							\
		asm volatile ("ldarh %w0, %1"			\
			: "=r" (*(__u16 *)__u.__c)		\
			: "Q" (*p) : "memory");			\
		break;						\
	case 4:							\
		asm volatile ("ldar %w0, %1"			\
			: "=r" (*(__u32 *)__u.__c)		\
			: "Q" (*p) : "memory");			\
		break;						\
	case 8:							\
		asm volatile ("ldar %0, %1"			\
			: "=r" (*(__u64 *)__u.__c)		\
			: "Q" (*p) : "memory");			\
		break;						\
	default:						\
		/* Only to shut up gcc ... */			\
		mb();						\
		break;						\
	}							\
	__u.__val;						\
})

#endif /* _TOOLS_LINUX_ASM_AARCH64_BARRIER_H */
+13 −0
Original line number Diff line number Diff line
@@ -46,4 +46,17 @@
#define rmb()		mb()
#define wmb()		mb()

#define smp_store_release(p, v)			\
do {						\
	barrier();				\
	WRITE_ONCE(*p, v);			\
} while (0)

#define smp_load_acquire(p)			\
({						\
	typeof(*p) ___p1 = READ_ONCE(*p);	\
	barrier();				\
	___p1;					\
})

#endif /* _TOOLS_LINUX_ASM_IA64_BARRIER_H */
+16 −0
Original line number Diff line number Diff line
@@ -27,4 +27,20 @@
#define rmb()  __asm__ __volatile__ ("sync" : : : "memory")
#define wmb()  __asm__ __volatile__ ("sync" : : : "memory")

#if defined(__powerpc64__)
#define smp_lwsync()	__asm__ __volatile__ ("lwsync" : : : "memory")

#define smp_store_release(p, v)			\
do {						\
	smp_lwsync();				\
	WRITE_ONCE(*p, v);			\
} while (0)

#define smp_load_acquire(p)			\
({						\
	typeof(*p) ___p1 = READ_ONCE(*p);	\
	smp_lwsync();				\
	___p1;					\
})
#endif /* defined(__powerpc64__) */
#endif /* _TOOLS_LINUX_ASM_POWERPC_BARRIER_H */
+13 −0
Original line number Diff line number Diff line
@@ -28,4 +28,17 @@
#define rmb()				mb()
#define wmb()				mb()

#define smp_store_release(p, v)			\
do {						\
	barrier();				\
	WRITE_ONCE(*p, v);			\
} while (0)

#define smp_load_acquire(p)			\
({						\
	typeof(*p) ___p1 = READ_ONCE(*p);	\
	barrier();				\
	___p1;					\
})

#endif /* __TOOLS_LIB_ASM_BARRIER_H */
+13 −0
Original line number Diff line number Diff line
@@ -40,4 +40,17 @@ do { __asm__ __volatile__("ba,pt %%xcc, 1f\n\t" \
#define rmb()	__asm__ __volatile__("":::"memory")
#define wmb()	__asm__ __volatile__("":::"memory")

#define smp_store_release(p, v)			\
do {						\
	barrier();				\
	WRITE_ONCE(*p, v);			\
} while (0)

#define smp_load_acquire(p)			\
({						\
	typeof(*p) ___p1 = READ_ONCE(*p);	\
	barrier();				\
	___p1;					\
})

#endif /* !(__TOOLS_LINUX_SPARC64_BARRIER_H) */
Loading