Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 71966f3a authored by Ingo Molnar's avatar Ingo Molnar
Browse files

Merge branch 'locking/core' into x86/core, to prepare for dependent patch



Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parents 34e7724c 92ae1837
Loading
Loading
Loading
Loading
+3 −3
Original line number Diff line number Diff line
@@ -1662,7 +1662,7 @@ CPU from reordering them.

There are some more advanced barrier functions:

 (*) set_mb(var, value)
 (*) smp_store_mb(var, value)

     This assigns the value to the variable and then inserts a full memory
     barrier after it, depending on the function.  It isn't guaranteed to
@@ -1975,7 +1975,7 @@ after it has altered the task state:
	CPU 1
	===============================
	set_current_state();
	  set_mb();
	  smp_store_mb();
	    STORE current->state
	    <general barrier>
	LOAD event_indicated
@@ -2016,7 +2016,7 @@ between the STORE to indicate the event and the STORE to set TASK_RUNNING:
	CPU 1				CPU 2
	===============================	===============================
	set_current_state();		STORE event_indicated
	  set_mb();			wake_up();
	  smp_store_mb();		wake_up();
	    STORE current->state	  <write barrier>
	    <general barrier>		  STORE current->state
	LOAD event_indicated
+0 −2
Original line number Diff line number Diff line
@@ -66,6 +66,4 @@
#undef __ASM__MB
#undef ____cmpxchg

#define __HAVE_ARCH_CMPXCHG 1

#endif /* _ALPHA_CMPXCHG_H */
+1 −1
Original line number Diff line number Diff line
@@ -81,7 +81,7 @@ do { \
#define read_barrier_depends()		do { } while(0)
#define smp_read_barrier_depends()	do { } while(0)

#define set_mb(var, value)	do { var = value; smp_mb(); } while (0)
#define smp_store_mb(var, value)	do { WRITE_ONCE(var, value); smp_mb(); } while (0)

#define smp_mb__before_atomic()	smp_mb()
#define smp_mb__after_atomic()	smp_mb()
+1 −1
Original line number Diff line number Diff line
@@ -114,7 +114,7 @@ do { \
#define read_barrier_depends()		do { } while(0)
#define smp_read_barrier_depends()	do { } while(0)

#define set_mb(var, value)	do { var = value; smp_mb(); } while (0)
#define smp_store_mb(var, value)	do { WRITE_ONCE(var, value); smp_mb(); } while (0)
#define nop()		asm volatile("nop");

#define smp_mb__before_atomic()	smp_mb()
+0 −2
Original line number Diff line number Diff line
@@ -70,8 +70,6 @@ extern unsigned long __cmpxchg_u64_unsupported_on_32bit_kernels(
   if something tries to do an invalid cmpxchg().  */
extern void __cmpxchg_called_with_bad_pointer(void);

#define __HAVE_ARCH_CMPXCHG 1

static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
				      unsigned long new, int size)
{
Loading