Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit ed79e946 authored by Heiko Carstens's avatar Heiko Carstens Committed by Ingo Molnar
Browse files

s390/uaccess, locking/static_keys: employ static_branch_likely()



Use the new static_branch_likely() primitive to make sure that the
most likely case is executed without taking an unconditional branch.
This wasn't possible with the old jump label primitives.

Signed-off-by: default avatarHeiko Carstens <heiko.carstens@de.ibm.com>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: linux-kernel@vger.kernel.org
Link: http://lkml.kernel.org/r/20150729064600.GB3953@osiris


Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent 3bbfafb7
Loading
Loading
Loading
Loading
+6 −6
Original line number Diff line number Diff line
@@ -15,7 +15,7 @@
#include <asm/mmu_context.h>
#include <asm/facility.h>

static struct static_key have_mvcos = STATIC_KEY_INIT_FALSE;
static DEFINE_STATIC_KEY_FALSE(have_mvcos);

static inline unsigned long copy_from_user_mvcos(void *x, const void __user *ptr,
						 unsigned long size)
@@ -104,7 +104,7 @@ static inline unsigned long copy_from_user_mvcp(void *x, const void __user *ptr,

unsigned long __copy_from_user(void *to, const void __user *from, unsigned long n)
{
	if (static_key_false(&have_mvcos))
	if (static_branch_likely(&have_mvcos))
		return copy_from_user_mvcos(to, from, n);
	return copy_from_user_mvcp(to, from, n);
}
@@ -177,7 +177,7 @@ static inline unsigned long copy_to_user_mvcs(void __user *ptr, const void *x,

unsigned long __copy_to_user(void __user *to, const void *from, unsigned long n)
{
	if (static_key_false(&have_mvcos))
	if (static_branch_likely(&have_mvcos))
		return copy_to_user_mvcos(to, from, n);
	return copy_to_user_mvcs(to, from, n);
}
@@ -240,7 +240,7 @@ static inline unsigned long copy_in_user_mvc(void __user *to, const void __user

unsigned long __copy_in_user(void __user *to, const void __user *from, unsigned long n)
{
	if (static_key_false(&have_mvcos))
	if (static_branch_likely(&have_mvcos))
		return copy_in_user_mvcos(to, from, n);
	return copy_in_user_mvc(to, from, n);
}
@@ -312,7 +312,7 @@ static inline unsigned long clear_user_xc(void __user *to, unsigned long size)

unsigned long __clear_user(void __user *to, unsigned long size)
{
	if (static_key_false(&have_mvcos))
	if (static_branch_likely(&have_mvcos))
			return clear_user_mvcos(to, size);
	return clear_user_xc(to, size);
}
@@ -386,7 +386,7 @@ early_param("uaccess_primary", parse_uaccess_pt);
static int __init uaccess_init(void)
{
	if (!uaccess_primary && test_facility(27))
		static_key_slow_inc(&have_mvcos);
		static_branch_enable(&have_mvcos);
	return 0;
}
early_initcall(uaccess_init);