Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 2190fed6 authored by Russell King's avatar Russell King
Browse files

ARM: entry: provide uaccess assembly macro hooks



Provide hooks into the kernel entry and exit paths to permit control
of userspace visibility to the kernel.  The intended use is:

- on entry to kernel from user, uaccess_disable will be called to
  disable userspace visibility
- on exit from kernel to user, uaccess_enable will be called to
  enable userspace visibility
- on entry from a kernel exception, uaccess_save_and_disable will be
  called to save the current userspace visibility setting, and disable
  access
- on exit from a kernel exception, uaccess_restore will be called to
  restore the userspace visibility as it was before the exception
  occurred.

These hooks allows us to keep userspace visibility disabled for the
vast majority of the kernel, except for localised regions where we
want to explicitly access userspace.

Signed-off-by: default avatarRussell King <rmk+kernel@arm.linux.org.uk>
parent aa06e5c1
Loading
Loading
Loading
Loading
+17 −0
Original line number Diff line number Diff line
@@ -445,6 +445,23 @@ THUMB( orr \reg , \reg , #PSR_T_BIT )
#endif
	.endm

	.macro	uaccess_disable, tmp, isb=1
	.endm

	.macro	uaccess_enable, tmp, isb=1
	.endm

	.macro	uaccess_save, tmp
	.endm

	.macro	uaccess_restore
	.endm

	.macro	uaccess_save_and_disable, tmp
	uaccess_save \tmp
	uaccess_disable \tmp
	.endm

	.irp	c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
	.macro	ret\c, reg
#if __LINUX_ARM_ARCH__ < 6
+22 −8
Original line number Diff line number Diff line
@@ -149,10 +149,10 @@ ENDPROC(__und_invalid)
#define SPFIX(code...)
#endif

	.macro	svc_entry, stack_hole=0, trace=1
	.macro	svc_entry, stack_hole=0, trace=1, uaccess=1
 UNWIND(.fnstart		)
 UNWIND(.save {r0 - pc}		)
	sub	sp, sp, #(S_FRAME_SIZE + \stack_hole - 4)
	sub	sp, sp, #(S_FRAME_SIZE + 8 + \stack_hole - 4)
#ifdef CONFIG_THUMB2_KERNEL
 SPFIX(	str	r0, [sp]	)	@ temporarily saved
 SPFIX(	mov	r0, sp		)
@@ -167,7 +167,7 @@ ENDPROC(__und_invalid)
	ldmia	r0, {r3 - r5}
	add	r7, sp, #S_SP - 4	@ here for interlock avoidance
	mov	r6, #-1			@  ""  ""      ""       ""
	add	r2, sp, #(S_FRAME_SIZE + \stack_hole - 4)
	add	r2, sp, #(S_FRAME_SIZE + 8 + \stack_hole - 4)
 SPFIX(	addeq	r2, r2, #4	)
	str	r3, [sp, #-4]!		@ save the "real" r0 copied
					@ from the exception stack
@@ -185,6 +185,11 @@ ENDPROC(__und_invalid)
	@
	stmia	r7, {r2 - r6}

	uaccess_save r0
	.if \uaccess
	uaccess_disable r0
	.endif

	.if \trace
#ifdef CONFIG_TRACE_IRQFLAGS
	bl	trace_hardirqs_off
@@ -194,7 +199,7 @@ ENDPROC(__und_invalid)

	.align	5
__dabt_svc:
	svc_entry
	svc_entry uaccess=0
	mov	r2, sp
	dabt_helper
 THUMB(	ldr	r5, [sp, #S_PSR]	)	@ potentially updated CPSR
@@ -368,7 +373,7 @@ ENDPROC(__fiq_abt)
#error "sizeof(struct pt_regs) must be a multiple of 8"
#endif

	.macro	usr_entry, trace=1
	.macro	usr_entry, trace=1, uaccess=1
 UNWIND(.fnstart	)
 UNWIND(.cantunwind	)	@ don't unwind the user space
	sub	sp, sp, #S_FRAME_SIZE
@@ -400,6 +405,10 @@ ENDPROC(__fiq_abt)
 ARM(	stmdb	r0, {sp, lr}^			)
 THUMB(	store_user_sp_lr r0, r1, S_SP - S_PC	)

	.if \uaccess
	uaccess_disable ip
	.endif

	@ Enable the alignment trap while in kernel mode
 ATRAP(	teq	r8, r7)
 ATRAP( mcrne	p15, 0, r8, c1, c0, 0)
@@ -435,7 +444,7 @@ ENDPROC(__fiq_abt)

	.align	5
__dabt_usr:
	usr_entry
	usr_entry uaccess=0
	kuser_cmpxchg_check
	mov	r2, sp
	dabt_helper
@@ -458,7 +467,7 @@ ENDPROC(__irq_usr)

	.align	5
__und_usr:
	usr_entry
	usr_entry uaccess=0

	mov	r2, r4
	mov	r3, r5
@@ -484,6 +493,8 @@ __und_usr:
1:	ldrt	r0, [r4]
 ARM_BE8(rev	r0, r0)				@ little endian instruction

	uaccess_disable ip

	@ r0 = 32-bit ARM instruction which caused the exception
	@ r2 = PC value for the following instruction (:= regs->ARM_pc)
	@ r4 = PC value for the faulting instruction
@@ -518,9 +529,10 @@ __und_usr_thumb:
2:	ldrht	r5, [r4]
ARM_BE8(rev16	r5, r5)				@ little endian instruction
	cmp	r5, #0xe800			@ 32bit instruction if xx != 0
	blo	__und_usr_fault_16		@ 16bit undefined instruction
	blo	__und_usr_fault_16_pan		@ 16bit undefined instruction
3:	ldrht	r0, [r2]
ARM_BE8(rev16	r0, r0)				@ little endian instruction
	uaccess_disable ip
	add	r2, r2, #2			@ r2 is PC + 2, make it PC + 4
	str	r2, [sp, #S_PC]			@ it's a 2x16bit instr, update
	orr	r0, r0, r5, lsl #16
@@ -715,6 +727,8 @@ ENDPROC(no_fp)
__und_usr_fault_32:
	mov	r1, #4
	b	1f
__und_usr_fault_16_pan:
	uaccess_disable ip
__und_usr_fault_16:
	mov	r1, #2
1:	mov	r0, sp
+2 −0
Original line number Diff line number Diff line
@@ -173,6 +173,8 @@ ENTRY(vector_swi)
 USER(	ldr	scno, [lr, #-4]		)	@ get SWI instruction
#endif

	uaccess_disable tbl

	adr	tbl, sys_call_table		@ load syscall table pointer

#if defined(CONFIG_OABI_COMPAT)
+3 −0
Original line number Diff line number Diff line
@@ -215,6 +215,7 @@
	blne	trace_hardirqs_off
#endif
	.endif
	uaccess_restore

#ifndef CONFIG_THUMB2_KERNEL
	@ ARM mode SVC restore
@@ -258,6 +259,7 @@
	@ on the stack remains correct).
	@
	.macro  svc_exit_via_fiq
	uaccess_restore
#ifndef CONFIG_THUMB2_KERNEL
	@ ARM mode restore
	mov	r0, sp
@@ -287,6 +289,7 @@


	.macro	restore_user_regs, fast = 0, offset = 0
	uaccess_enable r1, isb=0
#ifndef CONFIG_THUMB2_KERNEL
	@ ARM mode restore
	mov	r2, sp
+1 −0
Original line number Diff line number Diff line
@@ -19,6 +19,7 @@ ENTRY(v4_early_abort)
	mrc	p15, 0, r1, c5, c0, 0		@ get FSR
	mrc	p15, 0, r0, c6, c0, 0		@ get FAR
	ldr	r3, [r4]			@ read aborted ARM instruction
	uaccess_disable ip			@ disable userspace access
	bic	r1, r1, #1 << 11 | 1 << 10	@ clear bits 11 and 10 of FSR
	tst	r3, #1 << 20			@ L = 1 -> write?
	orreq	r1, r1, #1 << 11		@ yes.
Loading