Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 62122fd7 authored by Borislav Petkov's avatar Borislav Petkov Committed by H. Peter Anvin
Browse files

x86, cpufeature: Use new CC_HAVE_ASM_GOTO



... for checking for "asm goto" compiler support. It is more explicit
this way and we cover the cases where distros have backported that
support even to gcc versions < 4.5.

Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
Link: http://lkml.kernel.org/r/1372437701-13351-1-git-send-email-bp@alien8.de


Signed-off-by: default avatarH. Peter Anvin <hpa@linux.intel.com>
parent 9f84b626
Loading
Loading
Loading
Loading
+11 −6
Original line number Diff line number Diff line
@@ -366,9 +366,10 @@ extern bool __static_cpu_has_safe(u16 bit);
 */
static __always_inline __pure bool __static_cpu_has(u16 bit)
{
#if __GNUC__ > 4 || __GNUC_MINOR__ >= 5
#ifdef CC_HAVE_ASM_GOTO

#ifdef CONFIG_X86_DEBUG_STATIC_CPU_HAS

		/*
		 * Catch too early usage of this before alternatives
		 * have run.
@@ -384,6 +385,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)
			 ".previous\n"
			 /* skipping size check since replacement size = 0 */
			 : : "i" (X86_FEATURE_ALWAYS) : : t_warn);

#endif

		asm goto("1: jmp %l[t_no]\n"
@@ -406,7 +408,9 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)
		warn_pre_alternatives();
		return false;
#endif
#else /* GCC_VERSION >= 40500 */

#else /* CC_HAVE_ASM_GOTO */

		u8 flag;
		/* Open-coded due to __stringify() in ALTERNATIVE() */
		asm volatile("1: movb $0,%0\n"
@@ -427,7 +431,8 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)
			     ".previous\n"
			     : "=qm" (flag) : "i" (bit));
		return flag;
#endif

#endif /* CC_HAVE_ASM_GOTO */
}

#define static_cpu_has(bit)					\
@@ -441,7 +446,7 @@ static __always_inline __pure bool __static_cpu_has(u16 bit)

static __always_inline __pure bool _static_cpu_has_safe(u16 bit)
{
#if __GNUC__ > 4 || __GNUC_MINOR__ >= 5
#ifdef CC_HAVE_ASM_GOTO
/*
 * We need to spell the jumps to the compiler because, depending on the offset,
 * the replacement jump can be bigger than the original jump, and this we cannot
@@ -475,7 +480,7 @@ static __always_inline __pure bool _static_cpu_has_safe(u16 bit)
		return false;
	t_dynamic:
		return __static_cpu_has_safe(bit);
#else /* GCC_VERSION >= 40500 */
#else
		u8 flag;
		/* Open-coded due to __stringify() in ALTERNATIVE() */
		asm volatile("1: movb $2,%0\n"
@@ -511,7 +516,7 @@ static __always_inline __pure bool _static_cpu_has_safe(u16 bit)
			     : "=qm" (flag)
			     : "i" (bit), "i" (X86_FEATURE_ALWAYS));
		return (flag == 2 ? __static_cpu_has_safe(bit) : flag);
#endif
#endif /* CC_HAVE_ASM_GOTO */
}

#define static_cpu_has_safe(bit)				\