Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 7dffb720 authored by Stephen Rothwell's avatar Stephen Rothwell
Browse files

ppc32: use L1_CACHE_SHIFT/L1_CACHE_BYTES



instead of L1_CACHE_LINE_SIZE and LG_L1_CACHE_LINE_SIZE

Signed-off-by: default avatarStephen Rothwell <sfr@canb.auug.org.au>
parent cf764855
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -837,7 +837,7 @@ relocate_kernel:
copy_and_flush:
	addi	r5,r5,-4
	addi	r6,r6,-4
4:	li	r0,L1_CACHE_LINE_SIZE/4
4:	li	r0,L1_CACHE_BYTES/4
	mtctr	r0
3:	addi	r6,r6,4			/* copy a cache line */
	lwzx	r0,r6,r4
+29 −29
Original line number Diff line number Diff line
@@ -496,21 +496,21 @@ _GLOBAL(flush_icache_range)
BEGIN_FTR_SECTION
	blr				/* for 601, do nothing */
END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
	li	r5,L1_CACHE_LINE_SIZE-1
	li	r5,L1_CACHE_BYTES-1
	andc	r3,r3,r5
	subf	r4,r3,r4
	add	r4,r4,r5
	srwi.	r4,r4,LG_L1_CACHE_LINE_SIZE
	srwi.	r4,r4,L1_CACHE_SHIFT
	beqlr
	mtctr	r4
	mr	r6,r3
1:	dcbst	0,r3
	addi	r3,r3,L1_CACHE_LINE_SIZE
	addi	r3,r3,L1_CACHE_BYTES
	bdnz	1b
	sync				/* wait for dcbst's to get to ram */
	mtctr	r4
2:	icbi	0,r6
	addi	r6,r6,L1_CACHE_LINE_SIZE
	addi	r6,r6,L1_CACHE_BYTES
	bdnz	2b
	sync				/* additional sync needed on g4 */
	isync
@@ -523,16 +523,16 @@ END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
 * clean_dcache_range(unsigned long start, unsigned long stop)
 */
_GLOBAL(clean_dcache_range)
	li	r5,L1_CACHE_LINE_SIZE-1
	li	r5,L1_CACHE_BYTES-1
	andc	r3,r3,r5
	subf	r4,r3,r4
	add	r4,r4,r5
	srwi.	r4,r4,LG_L1_CACHE_LINE_SIZE
	srwi.	r4,r4,L1_CACHE_SHIFT
	beqlr
	mtctr	r4

1:	dcbst	0,r3
	addi	r3,r3,L1_CACHE_LINE_SIZE
	addi	r3,r3,L1_CACHE_BYTES
	bdnz	1b
	sync				/* wait for dcbst's to get to ram */
	blr
@@ -544,16 +544,16 @@ _GLOBAL(clean_dcache_range)
 * flush_dcache_range(unsigned long start, unsigned long stop)
 */
_GLOBAL(flush_dcache_range)
	li	r5,L1_CACHE_LINE_SIZE-1
	li	r5,L1_CACHE_BYTES-1
	andc	r3,r3,r5
	subf	r4,r3,r4
	add	r4,r4,r5
	srwi.	r4,r4,LG_L1_CACHE_LINE_SIZE
	srwi.	r4,r4,L1_CACHE_SHIFT
	beqlr
	mtctr	r4

1:	dcbf	0,r3
	addi	r3,r3,L1_CACHE_LINE_SIZE
	addi	r3,r3,L1_CACHE_BYTES
	bdnz	1b
	sync				/* wait for dcbst's to get to ram */
	blr
@@ -566,16 +566,16 @@ _GLOBAL(flush_dcache_range)
 * invalidate_dcache_range(unsigned long start, unsigned long stop)
 */
_GLOBAL(invalidate_dcache_range)
	li	r5,L1_CACHE_LINE_SIZE-1
	li	r5,L1_CACHE_BYTES-1
	andc	r3,r3,r5
	subf	r4,r3,r4
	add	r4,r4,r5
	srwi.	r4,r4,LG_L1_CACHE_LINE_SIZE
	srwi.	r4,r4,L1_CACHE_SHIFT
	beqlr
	mtctr	r4

1:	dcbi	0,r3
	addi	r3,r3,L1_CACHE_LINE_SIZE
	addi	r3,r3,L1_CACHE_BYTES
	bdnz	1b
	sync				/* wait for dcbi's to get to ram */
	blr
@@ -596,7 +596,7 @@ _GLOBAL(flush_dcache_all)
	mtctr	r4
	lis     r5, KERNELBASE@h
1:	lwz	r3, 0(r5)		/* Load one word from every line */
	addi	r5, r5, L1_CACHE_LINE_SIZE
	addi	r5, r5, L1_CACHE_BYTES
	bdnz    1b
	blr
#endif /* CONFIG_NOT_COHERENT_CACHE */
@@ -614,16 +614,16 @@ BEGIN_FTR_SECTION
	blr					/* for 601, do nothing */
END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
	rlwinm	r3,r3,0,0,19			/* Get page base address */
	li	r4,4096/L1_CACHE_LINE_SIZE	/* Number of lines in a page */
	li	r4,4096/L1_CACHE_BYTES	/* Number of lines in a page */
	mtctr	r4
	mr	r6,r3
0:	dcbst	0,r3				/* Write line to ram */
	addi	r3,r3,L1_CACHE_LINE_SIZE
	addi	r3,r3,L1_CACHE_BYTES
	bdnz	0b
	sync
	mtctr	r4
1:	icbi	0,r6
	addi	r6,r6,L1_CACHE_LINE_SIZE
	addi	r6,r6,L1_CACHE_BYTES
	bdnz	1b
	sync
	isync
@@ -646,16 +646,16 @@ END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
	mtmsr	r0
	isync
	rlwinm	r3,r3,0,0,19			/* Get page base address */
	li	r4,4096/L1_CACHE_LINE_SIZE	/* Number of lines in a page */
	li	r4,4096/L1_CACHE_BYTES	/* Number of lines in a page */
	mtctr	r4
	mr	r6,r3
0:	dcbst	0,r3				/* Write line to ram */
	addi	r3,r3,L1_CACHE_LINE_SIZE
	addi	r3,r3,L1_CACHE_BYTES
	bdnz	0b
	sync
	mtctr	r4
1:	icbi	0,r6
	addi	r6,r6,L1_CACHE_LINE_SIZE
	addi	r6,r6,L1_CACHE_BYTES
	bdnz	1b
	sync
	mtmsr	r10				/* restore DR */
@@ -670,7 +670,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
 * void clear_pages(void *page, int order) ;
 */
_GLOBAL(clear_pages)
	li	r0,4096/L1_CACHE_LINE_SIZE
	li	r0,4096/L1_CACHE_BYTES
	slw	r0,r0,r4
	mtctr	r0
#ifdef CONFIG_8xx
@@ -682,7 +682,7 @@ _GLOBAL(clear_pages)
#else
1:	dcbz	0,r3
#endif
	addi	r3,r3,L1_CACHE_LINE_SIZE
	addi	r3,r3,L1_CACHE_BYTES
	bdnz	1b
	blr

@@ -708,7 +708,7 @@ _GLOBAL(copy_page)

#ifdef CONFIG_8xx
	/* don't use prefetch on 8xx */
    	li	r0,4096/L1_CACHE_LINE_SIZE
    	li	r0,4096/L1_CACHE_BYTES
	mtctr	r0
1:	COPY_16_BYTES
	bdnz	1b
@@ -722,13 +722,13 @@ _GLOBAL(copy_page)
	li	r11,4
	mtctr	r0
11:	dcbt	r11,r4
	addi	r11,r11,L1_CACHE_LINE_SIZE
	addi	r11,r11,L1_CACHE_BYTES
	bdnz	11b
#else /* MAX_COPY_PREFETCH == 1 */
	dcbt	r5,r4
	li	r11,L1_CACHE_LINE_SIZE+4
	li	r11,L1_CACHE_BYTES+4
#endif /* MAX_COPY_PREFETCH */
	li	r0,4096/L1_CACHE_LINE_SIZE - MAX_COPY_PREFETCH
	li	r0,4096/L1_CACHE_BYTES - MAX_COPY_PREFETCH
	crclr	4*cr0+eq
2:
	mtctr	r0
@@ -736,12 +736,12 @@ _GLOBAL(copy_page)
	dcbt	r11,r4
	dcbz	r5,r3
	COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 32
#if L1_CACHE_BYTES >= 32
	COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 64
#if L1_CACHE_BYTES >= 64
	COPY_16_BYTES
	COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 128
#if L1_CACHE_BYTES >= 128
	COPY_16_BYTES
	COPY_16_BYTES
	COPY_16_BYTES
+12 −12
Original line number Diff line number Diff line
@@ -66,9 +66,9 @@
	.stabs	"copy32.S",N_SO,0,0,0f
0:

CACHELINE_BYTES = L1_CACHE_LINE_SIZE
LG_CACHELINE_BYTES = LG_L1_CACHE_LINE_SIZE
CACHELINE_MASK = (L1_CACHE_LINE_SIZE-1)
CACHELINE_BYTES = L1_CACHE_BYTES
LG_CACHELINE_BYTES = L1_CACHE_SHIFT
CACHELINE_MASK = (L1_CACHE_BYTES-1)

/*
 * Use dcbz on the complete cache lines in the destination
@@ -205,12 +205,12 @@ _GLOBAL(cacheable_memcpy)
	dcbz	r11,r6
#endif
	COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 32
#if L1_CACHE_BYTES >= 32
	COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 64
#if L1_CACHE_BYTES >= 64
	COPY_16_BYTES
	COPY_16_BYTES
#if L1_CACHE_LINE_SIZE >= 128
#if L1_CACHE_BYTES >= 128
	COPY_16_BYTES
	COPY_16_BYTES
	COPY_16_BYTES
@@ -399,12 +399,12 @@ _GLOBAL(__copy_tofrom_user)
	.text
/* the main body of the cacheline loop */
	COPY_16_BYTES_WITHEX(0)
#if L1_CACHE_LINE_SIZE >= 32
#if L1_CACHE_BYTES >= 32
	COPY_16_BYTES_WITHEX(1)
#if L1_CACHE_LINE_SIZE >= 64
#if L1_CACHE_BYTES >= 64
	COPY_16_BYTES_WITHEX(2)
	COPY_16_BYTES_WITHEX(3)
#if L1_CACHE_LINE_SIZE >= 128
#if L1_CACHE_BYTES >= 128
	COPY_16_BYTES_WITHEX(4)
	COPY_16_BYTES_WITHEX(5)
	COPY_16_BYTES_WITHEX(6)
@@ -458,12 +458,12 @@ _GLOBAL(__copy_tofrom_user)
 * 104f (if in read part) or 105f (if in write part), after updating r5
 */
	COPY_16_BYTES_EXCODE(0)
#if L1_CACHE_LINE_SIZE >= 32
#if L1_CACHE_BYTES >= 32
	COPY_16_BYTES_EXCODE(1)
#if L1_CACHE_LINE_SIZE >= 64
#if L1_CACHE_BYTES >= 64
	COPY_16_BYTES_EXCODE(2)
	COPY_16_BYTES_EXCODE(3)
#if L1_CACHE_LINE_SIZE >= 128
#if L1_CACHE_BYTES >= 128
	COPY_16_BYTES_EXCODE(4)
	COPY_16_BYTES_EXCODE(5)
	COPY_16_BYTES_EXCODE(6)
+2 −2
Original line number Diff line number Diff line
@@ -387,10 +387,10 @@ turn_on_mmu:
#endif /* defined(CONFIG_PM) || defined(CONFIG_CPU_FREQ) */

	.section .data
	.balign	L1_CACHE_LINE_SIZE
	.balign	L1_CACHE_BYTES
sleep_storage:
	.long 0
	.balign	L1_CACHE_LINE_SIZE, 0
	.balign	L1_CACHE_BYTES, 0

#endif /* CONFIG_6xx */
	.section .text
+2 −2
Original line number Diff line number Diff line
@@ -290,10 +290,10 @@ _GLOBAL(__init_fpu_registers)
#define CS_SIZE		32

	.data
	.balign	L1_CACHE_LINE_SIZE
	.balign	L1_CACHE_BYTES
cpu_state_storage:
	.space	CS_SIZE
	.balign	L1_CACHE_LINE_SIZE,0
	.balign	L1_CACHE_BYTES,0
	.text

/* Called in normal context to backup CPU 0 state. This
Loading