Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 1ef55be1 authored by Andy Lutomirski's avatar Andy Lutomirski Committed by Thomas Gleixner
Browse files

x86/asm: Get rid of __read_cr4_safe()



We use __read_cr4() vs __read_cr4_safe() inconsistently.  On
CR4-less CPUs, all CR4 bits are effectively clear, so we can make
the code simpler and more robust by making __read_cr4() always fix
up faults on 32-bit kernels.

This may fix some bugs on old 486-like CPUs, but I don't have any
easy way to test that.

Signed-off-by: default avatarAndy Lutomirski <luto@kernel.org>
Cc: Brian Gerst <brgerst@gmail.com>
Cc: Borislav Petkov <bp@alien8.de>
Cc: david@saggiorato.net
Link: http://lkml.kernel.org/r/ea647033d357d9ce2ad2bbde5a631045f5052fb6.1475178370.git.luto@kernel.org


Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
parent d7e25c66
Loading
Loading
Loading
Loading
+0 −4
Original line number Diff line number Diff line
@@ -80,10 +80,6 @@ static inline unsigned long __read_cr4(void)
{
	return PVOP_CALL0(unsigned long, pv_cpu_ops.read_cr4);
}
static inline unsigned long __read_cr4_safe(void)
{
	return PVOP_CALL0(unsigned long, pv_cpu_ops.read_cr4_safe);
}

static inline void __write_cr4(unsigned long x)
{
+0 −1
Original line number Diff line number Diff line
@@ -108,7 +108,6 @@ struct pv_cpu_ops {
	unsigned long (*read_cr0)(void);
	void (*write_cr0)(unsigned long);

	unsigned long (*read_cr4_safe)(void);
	unsigned long (*read_cr4)(void);
	void (*write_cr4)(unsigned long);

+7 −15
Original line number Diff line number Diff line
@@ -59,22 +59,19 @@ static inline void native_write_cr3(unsigned long val)
static inline unsigned long native_read_cr4(void)
{
	unsigned long val;
	asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
	return val;
}

static inline unsigned long native_read_cr4_safe(void)
{
	unsigned long val;
	/* This could fault if %cr4 does not exist. In x86_64, a cr4 always
	 * exists, so it will never fail. */
#ifdef CONFIG_X86_32
	/*
	 * This could fault if CR4 does not exist.  Non-existent CR4
	 * is functionally equivalent to CR4 == 0.  Keep it simple and pretend
	 * that CR4 == 0 on CPUs that don't have CR4.
	 */
	asm volatile("1: mov %%cr4, %0\n"
		     "2:\n"
		     _ASM_EXTABLE(1b, 2b)
		     : "=r" (val), "=m" (__force_order) : "0" (0));
#else
	val = native_read_cr4();
	/* CR4 always exists on x86_64. */
	asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
#endif
	return val;
}
@@ -182,11 +179,6 @@ static inline unsigned long __read_cr4(void)
	return native_read_cr4();
}

static inline unsigned long __read_cr4_safe(void)
{
	return native_read_cr4_safe();
}

static inline void __write_cr4(unsigned long x)
{
	native_write_cr4(x);
+1 −1
Original line number Diff line number Diff line
@@ -81,7 +81,7 @@ DECLARE_PER_CPU_SHARED_ALIGNED(struct tlb_state, cpu_tlbstate);
/* Initialize cr4 shadow for this CPU. */
static inline void cr4_init_shadow(void)
{
	this_cpu_write(cpu_tlbstate.cr4, __read_cr4_safe());
	this_cpu_write(cpu_tlbstate.cr4, __read_cr4());
}

/* Set in this cpu's CR4. */
+0 −1
Original line number Diff line number Diff line
@@ -332,7 +332,6 @@ __visible struct pv_cpu_ops pv_cpu_ops = {
	.read_cr0 = native_read_cr0,
	.write_cr0 = native_write_cr0,
	.read_cr4 = native_read_cr4,
	.read_cr4_safe = native_read_cr4_safe,
	.write_cr4 = native_write_cr4,
#ifdef CONFIG_X86_64
	.read_cr8 = native_read_cr8,
Loading