Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 112e5847 authored by Paul Mundt's avatar Paul Mundt
Browse files

sh: TLB protection violation exception optimizations.



This adds a bit of rework to have the TLB protection violations skip the
TLB miss fastpath and go directly in to do_page_fault(), as these require
slow path handling.

Based on an earlier patch by SUGIOKA Toshinobu.

Signed-off-by: default avatarPaul Mundt <lethal@linux-sh.org>
parent e7b8b7f1
Loading
Loading
Loading
Loading
+19 −11
Original line number Diff line number Diff line
@@ -113,34 +113,33 @@ OFF_TRA = (16*4+6*4)
#if defined(CONFIG_MMU)
	.align	2
ENTRY(tlb_miss_load)
	bra	call_dpf
	bra	call_handle_tlbmiss
	 mov	#0, r5

	.align	2
ENTRY(tlb_miss_store)
	bra	call_dpf
	bra	call_handle_tlbmiss
	 mov	#1, r5

	.align	2
ENTRY(initial_page_write)
	bra	call_dpf
	bra	call_handle_tlbmiss
	 mov	#1, r5

	.align	2
ENTRY(tlb_protection_violation_load)
	bra	call_dpf
	bra	call_do_page_fault
	 mov	#0, r5

	.align	2
ENTRY(tlb_protection_violation_store)
	bra	call_dpf
	bra	call_do_page_fault
	 mov	#1, r5

call_dpf:
call_handle_tlbmiss:
	mov.l	1f, r0
	mov	r5, r8
	mov.l	@r0, r6
	mov	r6, r9
	mov.l	2f, r0
	sts	pr, r10
	jsr	@r0
@@ -151,16 +150,25 @@ call_dpf:
	 lds	r10, pr
	rts
	 nop
0:	mov.l	3f, r0
	mov	r9, r6
0:
	mov	r8, r5
	jmp	@r0
call_do_page_fault:
	mov.l	1f, r0
	mov.l	@r0, r6

	sti

	mov.l	3f, r0
	mov.l	4f, r1
	mov	r15, r4
	jmp	@r0
	 lds	r1, pr

	.align 2
1:	.long	MMU_TEA
2:	.long	__do_page_fault
2:	.long	handle_tlbmiss
3:	.long	do_page_fault
4:	.long	ret_from_exception

	.align	2
ENTRY(address_error_load)
+3 −3
Original line number Diff line number Diff line
@@ -318,8 +318,8 @@ do_sigbus:
/*
 * Called with interrupts disabled.
 */
asmlinkage int __kprobes __do_page_fault(struct pt_regs *regs,
					 unsigned long writeaccess,
asmlinkage int __kprobes
handle_tlbmiss(struct pt_regs *regs, unsigned long writeaccess,
	       unsigned long address)
{
	pgd_t *pgd;