Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit ae8a8b95 authored by Will Deacon's avatar Will Deacon Committed by Russell King
Browse files

ARM: 7691/1: mm: kill unused TLB_CAN_READ_FROM_L1_CACHE and use ALT_SMP instead



Many ARMv7 cores have hardware page table walkers that can read the L1
cache. This is discoverable from the ID_MMFR3 register, although this
can be expensive to access from the low-level set_pte functions and is a
pain to cache, particularly with multi-cluster systems.

A useful observation is that the multi-processing extensions for ARMv7
require coherent table walks, meaning that we can make use of ALT_SMP
patching in proc-v7-* to patch away the cache flush safely for these
cores.

Reported-by: default avatarAlbin Tonnerre <Albin.Tonnerre@arm.com>
Reviewed-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
Signed-off-by: default avatarWill Deacon <will.deacon@arm.com>
Signed-off-by: default avatarRussell King <rmk+kernel@arm.linux.org.uk>
parent b0088480
Loading
Loading
Loading
Loading
+1 −1
Original line number Original line Diff line number Diff line
@@ -169,7 +169,7 @@
# define v6wbi_always_flags	(-1UL)
# define v6wbi_always_flags	(-1UL)
#endif
#endif


#define v7wbi_tlb_flags_smp	(TLB_WB | TLB_DCLEAN | TLB_BARRIER | \
#define v7wbi_tlb_flags_smp	(TLB_WB | TLB_BARRIER | \
				 TLB_V7_UIS_FULL | TLB_V7_UIS_PAGE | \
				 TLB_V7_UIS_FULL | TLB_V7_UIS_PAGE | \
				 TLB_V7_UIS_ASID | TLB_V7_UIS_BP)
				 TLB_V7_UIS_ASID | TLB_V7_UIS_BP)
#define v7wbi_tlb_flags_up	(TLB_WB | TLB_DCLEAN | TLB_BARRIER | \
#define v7wbi_tlb_flags_up	(TLB_WB | TLB_DCLEAN | TLB_BARRIER | \
+0 −2
Original line number Original line Diff line number Diff line
@@ -80,12 +80,10 @@ ENTRY(cpu_v6_do_idle)
	mov	pc, lr
	mov	pc, lr


ENTRY(cpu_v6_dcache_clean_area)
ENTRY(cpu_v6_dcache_clean_area)
#ifndef TLB_CAN_READ_FROM_L1_CACHE
1:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
1:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
	add	r0, r0, #D_CACHE_LINE_SIZE
	add	r0, r0, #D_CACHE_LINE_SIZE
	subs	r1, r1, #D_CACHE_LINE_SIZE
	subs	r1, r1, #D_CACHE_LINE_SIZE
	bhi	1b
	bhi	1b
#endif
	mov	pc, lr
	mov	pc, lr


/*
/*
+2 −1
Original line number Original line Diff line number Diff line
@@ -110,7 +110,8 @@ ENTRY(cpu_v7_set_pte_ext)
 ARM(	str	r3, [r0, #2048]! )
 ARM(	str	r3, [r0, #2048]! )
 THUMB(	add	r0, r0, #2048 )
 THUMB(	add	r0, r0, #2048 )
 THUMB(	str	r3, [r0] )
 THUMB(	str	r3, [r0] )
	mcr	p15, 0, r0, c7, c10, 1		@ flush_pte
	ALT_SMP(mov	pc,lr)
	ALT_UP (mcr	p15, 0, r0, c7, c10, 1)		@ flush_pte
#endif
#endif
	mov	pc, lr
	mov	pc, lr
ENDPROC(cpu_v7_set_pte_ext)
ENDPROC(cpu_v7_set_pte_ext)
+2 −1
Original line number Original line Diff line number Diff line
@@ -73,7 +73,8 @@ ENTRY(cpu_v7_set_pte_ext)
	tst	r3, #1 << (55 - 32)		@ L_PTE_DIRTY
	tst	r3, #1 << (55 - 32)		@ L_PTE_DIRTY
	orreq	r2, #L_PTE_RDONLY
	orreq	r2, #L_PTE_RDONLY
1:	strd	r2, r3, [r0]
1:	strd	r2, r3, [r0]
	mcr	p15, 0, r0, c7, c10, 1		@ flush_pte
	ALT_SMP(mov	pc, lr)
	ALT_UP (mcr	p15, 0, r0, c7, c10, 1)		@ flush_pte
#endif
#endif
	mov	pc, lr
	mov	pc, lr
ENDPROC(cpu_v7_set_pte_ext)
ENDPROC(cpu_v7_set_pte_ext)
+2 −2
Original line number Original line Diff line number Diff line
@@ -75,14 +75,14 @@ ENTRY(cpu_v7_do_idle)
ENDPROC(cpu_v7_do_idle)
ENDPROC(cpu_v7_do_idle)


ENTRY(cpu_v7_dcache_clean_area)
ENTRY(cpu_v7_dcache_clean_area)
#ifndef TLB_CAN_READ_FROM_L1_CACHE
	ALT_SMP(mov	pc, lr)			@ MP extensions imply L1 PTW
	ALT_UP(W(nop))
	dcache_line_size r2, r3
	dcache_line_size r2, r3
1:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
1:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
	add	r0, r0, r2
	add	r0, r0, r2
	subs	r1, r1, r2
	subs	r1, r1, r2
	bhi	1b
	bhi	1b
	dsb
	dsb
#endif
	mov	pc, lr
	mov	pc, lr
ENDPROC(cpu_v7_dcache_clean_area)
ENDPROC(cpu_v7_dcache_clean_area)