Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 63bcff2a authored by H. Peter Anvin's avatar H. Peter Anvin
Browse files

x86, smap: Add STAC and CLAC instructions to control user space access



When Supervisor Mode Access Prevention (SMAP) is enabled, access to
userspace from the kernel is controlled by the AC flag.  To make the
performance of manipulating that flag acceptable, there are two new
instructions, STAC and CLAC, to set and clear it.

This patch adds those instructions, via alternative(), when the SMAP
feature is enabled.  It also adds X86_EFLAGS_AC unconditionally to the
SYSCALL entry mask; there is simply no reason to make that one
conditional.

Signed-off-by: default avatarH. Peter Anvin <hpa@linux.intel.com>
Link: http://lkml.kernel.org/r/1348256595-29119-9-git-send-email-hpa@linux.intel.com
parent a052858f
Loading
Loading
Loading
Loading
+6 −0
Original line number Diff line number Diff line
@@ -14,6 +14,7 @@
#include <asm/segment.h>
#include <asm/irqflags.h>
#include <asm/asm.h>
#include <asm/smap.h>
#include <linux/linkage.h>
#include <linux/err.h>

@@ -146,8 +147,10 @@ ENTRY(ia32_sysenter_target)
	SAVE_ARGS 0,1,0
 	/* no need to do an access_ok check here because rbp has been
 	   32bit zero extended */ 
	ASM_STAC
1:	movl	(%rbp),%ebp
	_ASM_EXTABLE(1b,ia32_badarg)
	ASM_CLAC
	orl     $TS_COMPAT,TI_status+THREAD_INFO(%rsp,RIP-ARGOFFSET)
	testl   $_TIF_WORK_SYSCALL_ENTRY,TI_flags+THREAD_INFO(%rsp,RIP-ARGOFFSET)
	CFI_REMEMBER_STATE
@@ -301,8 +304,10 @@ ENTRY(ia32_cstar_target)
	/* no need to do an access_ok check here because r8 has been
	   32bit zero extended */ 
	/* hardware stack frame is complete now */	
	ASM_STAC
1:	movl	(%r8),%r9d
	_ASM_EXTABLE(1b,ia32_badarg)
	ASM_CLAC
	orl     $TS_COMPAT,TI_status+THREAD_INFO(%rsp,RIP-ARGOFFSET)
	testl   $_TIF_WORK_SYSCALL_ENTRY,TI_flags+THREAD_INFO(%rsp,RIP-ARGOFFSET)
	CFI_REMEMBER_STATE
@@ -365,6 +370,7 @@ cstar_tracesys:
END(ia32_cstar_target)
				
ia32_badarg:
	ASM_CLAC
	movq $-EFAULT,%rax
	jmp ia32_sysret
	CFI_ENDPROC
+6 −4
Original line number Diff line number Diff line
@@ -126,8 +126,9 @@ static inline int fxsave_user(struct i387_fxsave_struct __user *fx)

	/* See comment in fxsave() below. */
#ifdef CONFIG_AS_FXSAVEQ
	asm volatile("1:  fxsaveq %[fx]\n\t"
		     "2:\n"
	asm volatile(ASM_STAC "\n"
		     "1:  fxsaveq %[fx]\n\t"
		     "2: " ASM_CLAC "\n"
		     ".section .fixup,\"ax\"\n"
		     "3:  movl $-1,%[err]\n"
		     "    jmp  2b\n"
@@ -136,8 +137,9 @@ static inline int fxsave_user(struct i387_fxsave_struct __user *fx)
		     : [err] "=r" (err), [fx] "=m" (*fx)
		     : "0" (0));
#else
	asm volatile("1:  rex64/fxsave (%[fx])\n\t"
		     "2:\n"
	asm volatile(ASM_STAC "\n"
		     "1:  rex64/fxsave (%[fx])\n\t"
		     "2: " ASM_CLAC "\n"
		     ".section .fixup,\"ax\"\n"
		     "3:  movl $-1,%[err]\n"
		     "    jmp  2b\n"
+13 −6
Original line number Diff line number Diff line
@@ -9,10 +9,13 @@
#include <asm/asm.h>
#include <asm/errno.h>
#include <asm/processor.h>
#include <asm/smap.h>

#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg)	\
	asm volatile("1:\t" insn "\n"				\
		     "2:\t.section .fixup,\"ax\"\n"		\
	asm volatile("\t" ASM_STAC "\n"				\
		     "1:\t" insn "\n"				\
		     "2:\t" ASM_CLAC "\n"			\
		     "\t.section .fixup,\"ax\"\n"		\
		     "3:\tmov\t%3, %1\n"			\
		     "\tjmp\t2b\n"				\
		     "\t.previous\n"				\
@@ -21,12 +24,14 @@
		     : "i" (-EFAULT), "0" (oparg), "1" (0))

#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg)	\
	asm volatile("1:\tmovl	%2, %0\n"			\
	asm volatile("\t" ASM_STAC "\n"				\
		     "1:\tmovl	%2, %0\n"			\
		     "\tmovl\t%0, %3\n"				\
		     "\t" insn "\n"				\
		     "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"	\
		     "\tjnz\t1b\n"				\
		     "3:\t.section .fixup,\"ax\"\n"		\
		     "3:\t" ASM_CLAC "\n"			\
		     "\t.section .fixup,\"ax\"\n"		\
		     "4:\tmov\t%5, %1\n"			\
		     "\tjmp\t3b\n"				\
		     "\t.previous\n"				\
@@ -122,8 +127,10 @@ static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
		return -EFAULT;

	asm volatile("1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
		     "2:\t.section .fixup, \"ax\"\n"
	asm volatile("\t" ASM_STAC "\n"
		     "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
		     "2:\t" ASM_CLAC "\n"
		     "\t.section .fixup, \"ax\"\n"
		     "3:\tmov     %3, %0\n"
		     "\tjmp     2b\n"
		     "\t.previous\n"
+2 −2
Original line number Diff line number Diff line
@@ -58,13 +58,13 @@

#ifdef CONFIG_X86_SMAP

static inline void clac(void)
static __always_inline void clac(void)
{
	/* Note: a barrier is implicit in alternative() */
	alternative(ASM_NOP3, __stringify(__ASM_CLAC), X86_FEATURE_SMAP);
}

static inline void stac(void)
static __always_inline void stac(void)
{
	/* Note: a barrier is implicit in alternative() */
	alternative(ASM_NOP3, __stringify(__ASM_STAC), X86_FEATURE_SMAP);
+19 −12
Original line number Diff line number Diff line
@@ -9,6 +9,7 @@
#include <linux/string.h>
#include <asm/asm.h>
#include <asm/page.h>
#include <asm/smap.h>

#define VERIFY_READ 0
#define VERIFY_WRITE 1
@@ -192,9 +193,10 @@ extern int __get_user_bad(void);

#ifdef CONFIG_X86_32
#define __put_user_asm_u64(x, addr, err, errret)			\
	asm volatile("1:	movl %%eax,0(%2)\n"			\
	asm volatile(ASM_STAC "\n"					\
		     "1:	movl %%eax,0(%2)\n"			\
		     "2:	movl %%edx,4(%2)\n"			\
		     "3:\n"						\
		     "3: " ASM_CLAC "\n"				\
		     ".section .fixup,\"ax\"\n"				\
		     "4:	movl %3,%0\n"				\
		     "	jmp 3b\n"					\
@@ -205,9 +207,10 @@ extern int __get_user_bad(void);
		     : "A" (x), "r" (addr), "i" (errret), "0" (err))

#define __put_user_asm_ex_u64(x, addr)					\
	asm volatile("1:	movl %%eax,0(%1)\n"			\
	asm volatile(ASM_STAC "\n"					\
		     "1:	movl %%eax,0(%1)\n"			\
		     "2:	movl %%edx,4(%1)\n"			\
		     "3:\n"						\
		     "3: " ASM_CLAC "\n"				\
		     _ASM_EXTABLE_EX(1b, 2b)				\
		     _ASM_EXTABLE_EX(2b, 3b)				\
		     : : "A" (x), "r" (addr))
@@ -379,8 +382,9 @@ do { \
} while (0)

#define __get_user_asm(x, addr, err, itype, rtype, ltype, errret)	\
	asm volatile("1:	mov"itype" %2,%"rtype"1\n"		\
		     "2:\n"						\
	asm volatile(ASM_STAC "\n"					\
		     "1:	mov"itype" %2,%"rtype"1\n"		\
		     "2: " ASM_CLAC "\n"				\
		     ".section .fixup,\"ax\"\n"				\
		     "3:	mov %3,%0\n"				\
		     "	xor"itype" %"rtype"1,%"rtype"1\n"		\
@@ -412,8 +416,9 @@ do { \
} while (0)

#define __get_user_asm_ex(x, addr, itype, rtype, ltype)			\
	asm volatile("1:	mov"itype" %1,%"rtype"0\n"		\
		     "2:\n"						\
	asm volatile(ASM_STAC "\n"					\
		     "1:	mov"itype" %1,%"rtype"0\n"		\
		     "2: " ASM_CLAC "\n"				\
		     _ASM_EXTABLE_EX(1b, 2b)				\
		     : ltype(x) : "m" (__m(addr)))

@@ -443,8 +448,9 @@ struct __large_struct { unsigned long buf[100]; };
 * aliasing issues.
 */
#define __put_user_asm(x, addr, err, itype, rtype, ltype, errret)	\
	asm volatile("1:	mov"itype" %"rtype"1,%2\n"		\
		     "2:\n"						\
	asm volatile(ASM_STAC "\n"					\
		     "1:	mov"itype" %"rtype"1,%2\n"		\
		     "2: " ASM_CLAC "\n"				\
		     ".section .fixup,\"ax\"\n"				\
		     "3:	mov %3,%0\n"				\
		     "	jmp 2b\n"					\
@@ -454,8 +460,9 @@ struct __large_struct { unsigned long buf[100]; };
		     : ltype(x), "m" (__m(addr)), "i" (errret), "0" (err))

#define __put_user_asm_ex(x, addr, itype, rtype, ltype)			\
	asm volatile("1:	mov"itype" %"rtype"0,%1\n"		\
		     "2:\n"						\
	asm volatile(ASM_STAC "\n"					\
		     "1:	mov"itype" %"rtype"0,%1\n"		\
		     "2: " ASM_CLAC "\n"				\
		     _ASM_EXTABLE_EX(1b, 2b)				\
		     : : ltype(x), "m" (__m(addr)))

Loading