Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 3448890c authored by Al Viro's avatar Al Viro
Browse files

powerpc: get rid of zeroing, switch to RAW_COPY_USER



Signed-off-by: default avatarAl Viro <viro@zeniv.linux.org.uk>
parent f2ed8beb
Loading
Loading
Loading
Loading
+1 −0
Original line number Diff line number Diff line
@@ -87,6 +87,7 @@ config PPC
	select ARCH_HAS_DMA_SET_COHERENT_MASK
	select ARCH_HAS_ELF_RANDOMIZE
	select ARCH_HAS_GCOV_PROFILE_ALL
	select ARCH_HAS_RAW_COPY_USER
	select ARCH_HAS_SCALED_CPUTIME		if VIRT_CPU_ACCOUNTING_NATIVE
	select ARCH_HAS_SG_CHAIN
	select ARCH_HAS_TICK_BROADCAST		if GENERIC_CLOCKEVENTS_BROADCAST
+9 −50
Original line number Diff line number Diff line
@@ -269,42 +269,19 @@ extern unsigned long __copy_tofrom_user(void __user *to,

#ifndef __powerpc64__

static inline unsigned long copy_from_user(void *to,
		const void __user *from, unsigned long n)
{
	if (likely(access_ok(VERIFY_READ, from, n))) {
		check_object_size(to, n, false);
		return __copy_tofrom_user((__force void __user *)to, from, n);
	}
	memset(to, 0, n);
	return n;
}

static inline unsigned long copy_to_user(void __user *to,
		const void *from, unsigned long n)
{
	if (access_ok(VERIFY_WRITE, to, n)) {
		check_object_size(from, n, true);
		return __copy_tofrom_user(to, (__force void __user *)from, n);
	}
	return n;
}
#define INLINE_COPY_FROM_USER
#define INLINE_COPY_TO_USER

#else /* __powerpc64__ */

#define __copy_in_user(to, from, size) \
	__copy_tofrom_user((to), (from), (size))

extern unsigned long copy_from_user(void *to, const void __user *from,
				    unsigned long n);
extern unsigned long copy_to_user(void __user *to, const void *from,
				  unsigned long n);
extern unsigned long copy_in_user(void __user *to, const void __user *from,
				  unsigned long n);

static inline unsigned long
raw_copy_in_user(void __user *to, const void __user *from, unsigned long n)
{
	return __copy_tofrom_user(to, from, n);
}
#endif /* __powerpc64__ */

static inline unsigned long __copy_from_user_inatomic(void *to,
static inline unsigned long raw_copy_from_user(void *to,
		const void __user *from, unsigned long n)
{
	if (__builtin_constant_p(n) && (n <= 8)) {
@@ -328,12 +305,10 @@ static inline unsigned long __copy_from_user_inatomic(void *to,
			return 0;
	}

	check_object_size(to, n, false);

	return __copy_tofrom_user((__force void __user *)to, from, n);
}

static inline unsigned long __copy_to_user_inatomic(void __user *to,
static inline unsigned long raw_copy_to_user(void __user *to,
		const void *from, unsigned long n)
{
	if (__builtin_constant_p(n) && (n <= 8)) {
@@ -357,25 +332,9 @@ static inline unsigned long __copy_to_user_inatomic(void __user *to,
			return 0;
	}

	check_object_size(from, n, true);

	return __copy_tofrom_user(to, (__force const void __user *)from, n);
}

static inline unsigned long __copy_from_user(void *to,
		const void __user *from, unsigned long size)
{
	might_fault();
	return __copy_from_user_inatomic(to, from, size);
}

static inline unsigned long __copy_to_user(void __user *to,
		const void *from, unsigned long size)
{
	might_fault();
	return __copy_to_user_inatomic(to, from, size);
}

extern unsigned long __clear_user(void __user *addr, unsigned long size);

static inline unsigned long clear_user(void __user *addr, unsigned long size)
+1 −1
Original line number Diff line number Diff line
@@ -14,7 +14,7 @@ obj-y += string.o alloc.o crtsavres.o code-patching.o \

obj-$(CONFIG_PPC32)	+= div64.o copy_32.o

obj64-y	+= copypage_64.o copyuser_64.o usercopy_64.o mem_64.o hweight_64.o \
obj64-y	+= copypage_64.o copyuser_64.o mem_64.o hweight_64.o \
	   copyuser_power7.o string_64.o copypage_power7.o memcpy_power7.o \
	   memcpy_64.o memcmp_64.o

+0 −14
Original line number Diff line number Diff line
@@ -477,18 +477,6 @@ _GLOBAL(__copy_tofrom_user)
	bdnz	130b
/* then clear out the destination: r3 bytes starting at 4(r6) */
132:	mfctr	r3
	srwi.	r0,r3,2
	li	r9,0
	mtctr	r0
	beq	113f
112:	stwu	r9,4(r6)
	bdnz	112b
113:	andi.	r0,r3,3
	mtctr	r0
	beq	120f
114:	stb	r9,4(r6)
	addi	r6,r6,1
	bdnz	114b
120:	blr

	EX_TABLE(30b,108b)
@@ -497,7 +485,5 @@ _GLOBAL(__copy_tofrom_user)
	EX_TABLE(41b,111b)
	EX_TABLE(130b,132b)
	EX_TABLE(131b,120b)
	EX_TABLE(112b,120b)
	EX_TABLE(114b,120b)

EXPORT_SYMBOL(__copy_tofrom_user)
+3 −32
Original line number Diff line number Diff line
@@ -319,32 +319,9 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
	blr

/*
 * here we have trapped again, need to clear ctr bytes starting at r3
 * here we have trapped again, amount remaining is in ctr.
 */
143:	mfctr	r5
	li	r0,0
	mr	r4,r3
	mr	r3,r5		/* return the number of bytes not copied */
1:	andi.	r9,r4,7
	beq	3f
90:	stb	r0,0(r4)
	addic.	r5,r5,-1
	addi	r4,r4,1
	bne	1b
	blr
3:	cmpldi	cr1,r5,8
	srdi	r9,r5,3
	andi.	r5,r5,7
	blt	cr1,93f
	mtctr	r9
91:	std	r0,0(r4)
	addi	r4,r4,8
	bdnz	91b
93:	beqlr
	mtctr	r5	
92:	stb	r0,0(r4)
	addi	r4,r4,1
	bdnz	92b
143:	mfctr	r3
	blr

/*
@@ -389,10 +366,7 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
	ld	r5,-8(r1)
	add	r6,r6,r5
	subf	r3,r3,r6	/* #bytes not copied */
190:
191:
192:
	blr			/* #bytes not copied in r3 */
	blr

	EX_TABLE(20b,120b)
	EX_TABLE(220b,320b)
@@ -451,9 +425,6 @@ END_FTR_SECTION_IFCLR(CPU_FTR_UNALIGNED_LD_STD)
	EX_TABLE(88b,188b)
	EX_TABLE(43b,143b)
	EX_TABLE(89b,189b)
	EX_TABLE(90b,190b)
	EX_TABLE(91b,191b)
	EX_TABLE(92b,192b)

/*
 * Routine to copy a whole page of data, optimized for POWER4.
Loading