Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 4ea2126b authored by Will Deacon's avatar Will Deacon Committed by Greg Kroah-Hartman
Browse files

FROMLIST: arm64: mm: Fix and re-enable ARM64_SW_TTBR0_PAN



With the ASID now installed in TTBR1, we can re-enable ARM64_SW_TTBR0_PAN
by ensuring that we switch to a reserved ASID of zero when disabling
user access and restore the active user ASID on the uaccess enable path.

Reviewed-by: default avatarMark Rutland <mark.rutland@arm.com>
Tested-by: default avatarLaura Abbott <labbott@redhat.com>
Tested-by: default avatarShanker Donthineni <shankerd@codeaurora.org>
Signed-off-by: default avatarWill Deacon <will.deacon@arm.com>
(cherry picked from git://git.kernel.org/pub/scm/linux/kernel/git/arm64/linux.git


 commit 27a921e75711d924617269e0ba4adb8bae9fd0d1)

Change-Id: I3b06e02766753c59fac975363a2ead5c5e45b8f3
[ghackmann@google.com: adjust context, applying asm-uaccess.h changes to
 uaccess.h]
Signed-off-by: default avatarGreg Hackmann <ghackmann@google.com>
Signed-off-by: default avatarGreg Kroah-Hartman <gregkh@google.com>
parent 6de44978
Loading
Loading
Loading
Loading
+0 −1
Original line number Diff line number Diff line
@@ -597,7 +597,6 @@ endif

config ARM64_SW_TTBR0_PAN
	bool "Emulate Privileged Access Never using TTBR0_EL1 switching"
	depends on BROKEN       # Temporary while switch_mm is reworked
	help
	  Enabling this option prevents the kernel from accessing
	  user-space memory directly by pointing TTBR0_EL1 to a reserved
+34 −12
Original line number Diff line number Diff line
@@ -136,15 +136,19 @@ static inline void __uaccess_ttbr0_disable(void)
{
	unsigned long ttbr;

	ttbr = read_sysreg(ttbr1_el1);
	/* reserved_ttbr0 placed at the end of swapper_pg_dir */
	ttbr = read_sysreg(ttbr1_el1) + SWAPPER_DIR_SIZE;
	write_sysreg(ttbr, ttbr0_el1);
	write_sysreg(ttbr + SWAPPER_DIR_SIZE, ttbr0_el1);
	isb();
	/* Set reserved ASID */
	ttbr &= ~(0xffffUL << 48);
	write_sysreg(ttbr, ttbr1_el1);
	isb();
}

static inline void __uaccess_ttbr0_enable(void)
{
	unsigned long flags;
	unsigned long flags, ttbr0, ttbr1;

	/*
	 * Disable interrupts to avoid preemption between reading the 'ttbr0'
@@ -152,7 +156,16 @@ static inline void __uaccess_ttbr0_enable(void)
	 * roll-over and an update of 'ttbr0'.
	 */
	local_irq_save(flags);
	write_sysreg(current_thread_info()->ttbr0, ttbr0_el1);
	ttbr0 = current_thread_info()->ttbr0;

	/* Restore active ASID */
	ttbr1 = read_sysreg(ttbr1_el1);
	ttbr1 |= ttbr0 & (0xffffUL << 48);
	write_sysreg(ttbr1, ttbr1_el1);
	isb();

	/* Restore user page table */
	write_sysreg(ttbr0, ttbr0_el1);
	isb();
	local_irq_restore(flags);
}
@@ -440,11 +453,20 @@ extern __must_check long strnlen_user(const char __user *str, long n);
	add	\tmp1, \tmp1, #SWAPPER_DIR_SIZE	// reserved_ttbr0 at the end of swapper_pg_dir
	msr	ttbr0_el1, \tmp1		// set reserved TTBR0_EL1
	isb
	sub	\tmp1, \tmp1, #SWAPPER_DIR_SIZE
	bic	\tmp1, \tmp1, #(0xffff << 48)
	msr	ttbr1_el1, \tmp1		// set reserved ASID
	isb
	.endm

	.macro	__uaccess_ttbr0_enable, tmp1
	.macro	__uaccess_ttbr0_enable, tmp1, tmp2
	get_thread_info \tmp1
	ldr	\tmp1, [\tmp1, #TSK_TI_TTBR0]	// load saved TTBR0_EL1
	mrs	\tmp2, ttbr1_el1
	extr    \tmp2, \tmp2, \tmp1, #48
	ror     \tmp2, \tmp2, #16
	msr	ttbr1_el1, \tmp2		// set the active ASID
	isb
	msr	ttbr0_el1, \tmp1		// set the non-PAN TTBR0_EL1
	isb
	.endm
@@ -455,18 +477,18 @@ alternative_if_not ARM64_HAS_PAN
alternative_else_nop_endif
	.endm

	.macro	uaccess_ttbr0_enable, tmp1, tmp2
	.macro	uaccess_ttbr0_enable, tmp1, tmp2, tmp3
alternative_if_not ARM64_HAS_PAN
	save_and_disable_irq \tmp2		// avoid preemption
	__uaccess_ttbr0_enable \tmp1
	restore_irq \tmp2
	save_and_disable_irq \tmp3		// avoid preemption
	__uaccess_ttbr0_enable \tmp1, \tmp2
	restore_irq \tmp3
alternative_else_nop_endif
	.endm
#else
	.macro	uaccess_ttbr0_disable, tmp1
	.endm

	.macro	uaccess_ttbr0_enable, tmp1, tmp2
	.macro	uaccess_ttbr0_enable, tmp1, tmp2, tmp3
	.endm
#endif

@@ -480,8 +502,8 @@ alternative_if ARM64_ALT_PAN_NOT_UAO
alternative_else_nop_endif
	.endm

	.macro	uaccess_enable_not_uao, tmp1, tmp2
	uaccess_ttbr0_enable \tmp1, \tmp2
	.macro	uaccess_enable_not_uao, tmp1, tmp2, tmp3
	uaccess_ttbr0_enable \tmp1, \tmp2, \tmp3
alternative_if ARM64_ALT_PAN_NOT_UAO
	SET_PSTATE_PAN(0)
alternative_else_nop_endif
+2 −2
Original line number Diff line number Diff line
@@ -129,7 +129,7 @@ alternative_if ARM64_HAS_PAN
alternative_else_nop_endif

	.if	\el != 0
	mrs	x21, ttbr0_el1
	mrs	x21, ttbr1_el1
	tst	x21, #0xffff << 48		// Check for the reserved ASID
	orr	x23, x23, #PSR_PAN_BIT		// Set the emulated PAN in the saved SPSR
	b.eq	1f				// TTBR0 access already disabled
@@ -193,7 +193,7 @@ alternative_else_nop_endif
	tbnz	x22, #22, 1f			// Skip re-enabling TTBR0 access if the PSR_PAN_BIT is set
	.endif

	__uaccess_ttbr0_enable x0
	__uaccess_ttbr0_enable x0, x1
1:
	.if	\el != 0
	and	x22, x22, #~PSR_PAN_BIT		// ARMv8.0 CPUs do not understand this bit
+1 −1
Original line number Diff line number Diff line
@@ -30,7 +30,7 @@
 * Alignment fixed up by hardware.
 */
ENTRY(__clear_user)
	uaccess_enable_not_uao x2, x3
	uaccess_enable_not_uao x2, x3, x4
	mov	x2, x1			// save the size for fixup return
	subs	x1, x1, #8
	b.mi	2f
+1 −1
Original line number Diff line number Diff line
@@ -64,7 +64,7 @@

end	.req	x5
ENTRY(__arch_copy_from_user)
	uaccess_enable_not_uao x3, x4
	uaccess_enable_not_uao x3, x4, x5
	add	end, x0, x2
#include "copy_template.S"
	uaccess_disable_not_uao x3
Loading