Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f4cb1cc1 authored by Fenghua Yu's avatar Fenghua Yu Committed by H. Peter Anvin
Browse files

x86-64, copy_user: Remove zero byte check before copy user buffer.



Operation of rep movsb instruction handles zero byte copy. As pointed out by
Linus, there is no need to check zero size in kernel. Removing this redundant
check saves a few cycles in copy user functions.

Reported-by: default avatarLinus Torvalds <torvalds@linux-foundation.org>
Signed-off-by: default avatarFenghua Yu <fenghua.yu@intel.com>
Link: http://lkml.kernel.org/r/1384634221-6006-1-git-send-email-fenghua.yu@intel.com


Signed-off-by: default avatarH. Peter Anvin <hpa@linux.intel.com>
parent 1213959d
Loading
Loading
Loading
Loading
+2 −6
Original line number Original line Diff line number Diff line
@@ -236,8 +236,6 @@ ENDPROC(copy_user_generic_unrolled)
ENTRY(copy_user_generic_string)
ENTRY(copy_user_generic_string)
	CFI_STARTPROC
	CFI_STARTPROC
	ASM_STAC
	ASM_STAC
	andl %edx,%edx
	jz 4f
	cmpl $8,%edx
	cmpl $8,%edx
	jb 2f		/* less than 8 bytes, go to byte copy loop */
	jb 2f		/* less than 8 bytes, go to byte copy loop */
	ALIGN_DESTINATION
	ALIGN_DESTINATION
@@ -249,7 +247,7 @@ ENTRY(copy_user_generic_string)
2:	movl %edx,%ecx
2:	movl %edx,%ecx
3:	rep
3:	rep
	movsb
	movsb
4:	xorl %eax,%eax
	xorl %eax,%eax
	ASM_CLAC
	ASM_CLAC
	ret
	ret


@@ -279,12 +277,10 @@ ENDPROC(copy_user_generic_string)
ENTRY(copy_user_enhanced_fast_string)
ENTRY(copy_user_enhanced_fast_string)
	CFI_STARTPROC
	CFI_STARTPROC
	ASM_STAC
	ASM_STAC
	andl %edx,%edx
	jz 2f
	movl %edx,%ecx
	movl %edx,%ecx
1:	rep
1:	rep
	movsb
	movsb
2:	xorl %eax,%eax
	xorl %eax,%eax
	ASM_CLAC
	ASM_CLAC
	ret
	ret