Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 49db46a6 authored by Denys Vlasenko's avatar Denys Vlasenko Committed by Ingo Molnar
Browse files

x86/asm: Introduce push/pop macros which generate CFI_REL_OFFSET and CFI_RESTORE



Sequences:

        pushl_cfi %reg
        CFI_REL_OFFSET reg, 0

and:

        popl_cfi %reg
        CFI_RESTORE reg

happen quite often. This patch adds macros which generate them.

No assembly changes (verified with objdump -dr vmlinux.o).

Signed-off-by: default avatarDenys Vlasenko <dvlasenk@redhat.com>
Signed-off-by: default avatarAndy Lutomirski <luto@amacapital.net>
Cc: Alexei Starovoitov <ast@plumgrid.com>
Cc: Borislav Petkov <bp@alien8.de>
Cc: Frederic Weisbecker <fweisbec@gmail.com>
Cc: H. Peter Anvin <hpa@zytor.com>
Cc: Kees Cook <keescook@chromium.org>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Oleg Nesterov <oleg@redhat.com>
Cc: Will Drewry <wad@chromium.org>
Link: http://lkml.kernel.org/r/1421017655-25561-1-git-send-email-dvlasenk@redhat.com
Link: http://lkml.kernel.org/r/2202eb90f175cf45d1b2d1c64dbb5676a8ad07ad.1424989793.git.luto@amacapital.net


Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
parent 69e8544c
Loading
Loading
Loading
Loading
+14 −28
Original line number Diff line number Diff line
@@ -210,37 +210,23 @@ For 32-bit we have the following conventions - kernel is built with
 */

	.macro SAVE_ALL
	pushl_cfi %eax
	CFI_REL_OFFSET eax, 0
	pushl_cfi %ebp
	CFI_REL_OFFSET ebp, 0
	pushl_cfi %edi
	CFI_REL_OFFSET edi, 0
	pushl_cfi %esi
	CFI_REL_OFFSET esi, 0
	pushl_cfi %edx
	CFI_REL_OFFSET edx, 0
	pushl_cfi %ecx
	CFI_REL_OFFSET ecx, 0
	pushl_cfi %ebx
	CFI_REL_OFFSET ebx, 0
	pushl_cfi_reg eax
	pushl_cfi_reg ebp
	pushl_cfi_reg edi
	pushl_cfi_reg esi
	pushl_cfi_reg edx
	pushl_cfi_reg ecx
	pushl_cfi_reg ebx
	.endm

	.macro RESTORE_ALL
	popl_cfi %ebx
	CFI_RESTORE ebx
	popl_cfi %ecx
	CFI_RESTORE ecx
	popl_cfi %edx
	CFI_RESTORE edx
	popl_cfi %esi
	CFI_RESTORE esi
	popl_cfi %edi
	CFI_RESTORE edi
	popl_cfi %ebp
	CFI_RESTORE ebp
	popl_cfi %eax
	CFI_RESTORE eax
	popl_cfi_reg ebx
	popl_cfi_reg ecx
	popl_cfi_reg edx
	popl_cfi_reg esi
	popl_cfi_reg edi
	popl_cfi_reg ebp
	popl_cfi_reg eax
	.endm

#endif /* CONFIG_X86_64 */
+24 −0
Original line number Diff line number Diff line
@@ -86,11 +86,23 @@
	CFI_ADJUST_CFA_OFFSET 8
	.endm

	.macro pushq_cfi_reg reg
	pushq %\reg
	CFI_ADJUST_CFA_OFFSET 8
	CFI_REL_OFFSET \reg, 0
	.endm

	.macro popq_cfi reg
	popq \reg
	CFI_ADJUST_CFA_OFFSET -8
	.endm

	.macro popq_cfi_reg reg
	popq %\reg
	CFI_ADJUST_CFA_OFFSET -8
	CFI_RESTORE \reg
	.endm

	.macro pushfq_cfi
	pushfq
	CFI_ADJUST_CFA_OFFSET 8
@@ -116,11 +128,23 @@
	CFI_ADJUST_CFA_OFFSET 4
	.endm

	.macro pushl_cfi_reg reg
	pushl %\reg
	CFI_ADJUST_CFA_OFFSET 4
	CFI_REL_OFFSET \reg, 0
	.endm

	.macro popl_cfi reg
	popl \reg
	CFI_ADJUST_CFA_OFFSET -4
	.endm

	.macro popl_cfi_reg reg
	popl %\reg
	CFI_ADJUST_CFA_OFFSET -4
	CFI_RESTORE \reg
	.endm

	.macro pushfl_cfi
	pushfl
	CFI_ADJUST_CFA_OFFSET 4
+7 −14
Original line number Diff line number Diff line
@@ -1234,20 +1234,13 @@ error_code:
	/*CFI_REL_OFFSET es, 0*/
	pushl_cfi %ds
	/*CFI_REL_OFFSET ds, 0*/
	pushl_cfi %eax
	CFI_REL_OFFSET eax, 0
	pushl_cfi %ebp
	CFI_REL_OFFSET ebp, 0
	pushl_cfi %edi
	CFI_REL_OFFSET edi, 0
	pushl_cfi %esi
	CFI_REL_OFFSET esi, 0
	pushl_cfi %edx
	CFI_REL_OFFSET edx, 0
	pushl_cfi %ecx
	CFI_REL_OFFSET ecx, 0
	pushl_cfi %ebx
	CFI_REL_OFFSET ebx, 0
	pushl_cfi_reg eax
	pushl_cfi_reg ebp
	pushl_cfi_reg edi
	pushl_cfi_reg esi
	pushl_cfi_reg edx
	pushl_cfi_reg ecx
	pushl_cfi_reg ebx
	cld
	movl $(__KERNEL_PERCPU), %ecx
	movl %ecx, %fs
+20 −30
Original line number Diff line number Diff line
@@ -13,16 +13,6 @@
#include <asm/alternative-asm.h>
#include <asm/dwarf2.h>

.macro SAVE reg
	pushl_cfi %\reg
	CFI_REL_OFFSET \reg, 0
.endm

.macro RESTORE reg
	popl_cfi %\reg
	CFI_RESTORE \reg
.endm

.macro read64 reg
	movl %ebx, %eax
	movl %ecx, %edx
@@ -67,10 +57,10 @@ ENDPROC(atomic64_xchg_cx8)
.macro addsub_return func ins insc
ENTRY(atomic64_\func\()_return_cx8)
	CFI_STARTPROC
	SAVE ebp
	SAVE ebx
	SAVE esi
	SAVE edi
	pushl_cfi_reg ebp
	pushl_cfi_reg ebx
	pushl_cfi_reg esi
	pushl_cfi_reg edi

	movl %eax, %esi
	movl %edx, %edi
@@ -89,10 +79,10 @@ ENTRY(atomic64_\func\()_return_cx8)
10:
	movl %ebx, %eax
	movl %ecx, %edx
	RESTORE edi
	RESTORE esi
	RESTORE ebx
	RESTORE ebp
	popl_cfi_reg edi
	popl_cfi_reg esi
	popl_cfi_reg ebx
	popl_cfi_reg ebp
	ret
	CFI_ENDPROC
ENDPROC(atomic64_\func\()_return_cx8)
@@ -104,7 +94,7 @@ addsub_return sub sub sbb
.macro incdec_return func ins insc
ENTRY(atomic64_\func\()_return_cx8)
	CFI_STARTPROC
	SAVE ebx
	pushl_cfi_reg ebx

	read64 %esi
1:
@@ -119,7 +109,7 @@ ENTRY(atomic64_\func\()_return_cx8)
10:
	movl %ebx, %eax
	movl %ecx, %edx
	RESTORE ebx
	popl_cfi_reg ebx
	ret
	CFI_ENDPROC
ENDPROC(atomic64_\func\()_return_cx8)
@@ -130,7 +120,7 @@ incdec_return dec sub sbb

ENTRY(atomic64_dec_if_positive_cx8)
	CFI_STARTPROC
	SAVE ebx
	pushl_cfi_reg ebx

	read64 %esi
1:
@@ -146,18 +136,18 @@ ENTRY(atomic64_dec_if_positive_cx8)
2:
	movl %ebx, %eax
	movl %ecx, %edx
	RESTORE ebx
	popl_cfi_reg ebx
	ret
	CFI_ENDPROC
ENDPROC(atomic64_dec_if_positive_cx8)

ENTRY(atomic64_add_unless_cx8)
	CFI_STARTPROC
	SAVE ebp
	SAVE ebx
	pushl_cfi_reg ebp
	pushl_cfi_reg ebx
/* these just push these two parameters on the stack */
	SAVE edi
	SAVE ecx
	pushl_cfi_reg edi
	pushl_cfi_reg ecx

	movl %eax, %ebp
	movl %edx, %edi
@@ -179,8 +169,8 @@ ENTRY(atomic64_add_unless_cx8)
3:
	addl $8, %esp
	CFI_ADJUST_CFA_OFFSET -8
	RESTORE ebx
	RESTORE ebp
	popl_cfi_reg ebx
	popl_cfi_reg ebp
	ret
4:
	cmpl %edx, 4(%esp)
@@ -192,7 +182,7 @@ ENDPROC(atomic64_add_unless_cx8)

ENTRY(atomic64_inc_not_zero_cx8)
	CFI_STARTPROC
	SAVE ebx
	pushl_cfi_reg ebx

	read64 %esi
1:
@@ -209,7 +199,7 @@ ENTRY(atomic64_inc_not_zero_cx8)

	movl $1, %eax
3:
	RESTORE ebx
	popl_cfi_reg ebx
	ret
	CFI_ENDPROC
ENDPROC(atomic64_inc_not_zero_cx8)
+20 −40
Original line number Diff line number Diff line
@@ -51,10 +51,8 @@ unsigned int csum_partial(const unsigned char * buff, int len, unsigned int sum)
	   */		
ENTRY(csum_partial)
	CFI_STARTPROC
	pushl_cfi %esi
	CFI_REL_OFFSET esi, 0
	pushl_cfi %ebx
	CFI_REL_OFFSET ebx, 0
	pushl_cfi_reg esi
	pushl_cfi_reg ebx
	movl 20(%esp),%eax	# Function arg: unsigned int sum
	movl 16(%esp),%ecx	# Function arg: int len
	movl 12(%esp),%esi	# Function arg: unsigned char *buff
@@ -131,10 +129,8 @@ ENTRY(csum_partial)
	jz 8f
	roll $8, %eax
8:
	popl_cfi %ebx
	CFI_RESTORE ebx
	popl_cfi %esi
	CFI_RESTORE esi
	popl_cfi_reg ebx
	popl_cfi_reg esi
	ret
	CFI_ENDPROC
ENDPROC(csum_partial)
@@ -145,10 +141,8 @@ ENDPROC(csum_partial)

ENTRY(csum_partial)
	CFI_STARTPROC
	pushl_cfi %esi
	CFI_REL_OFFSET esi, 0
	pushl_cfi %ebx
	CFI_REL_OFFSET ebx, 0
	pushl_cfi_reg esi
	pushl_cfi_reg ebx
	movl 20(%esp),%eax	# Function arg: unsigned int sum
	movl 16(%esp),%ecx	# Function arg: int len
	movl 12(%esp),%esi	# Function arg:	const unsigned char *buf
@@ -255,10 +249,8 @@ ENTRY(csum_partial)
	jz 90f
	roll $8, %eax
90: 
	popl_cfi %ebx
	CFI_RESTORE ebx
	popl_cfi %esi
	CFI_RESTORE esi
	popl_cfi_reg ebx
	popl_cfi_reg esi
	ret
	CFI_ENDPROC
ENDPROC(csum_partial)
@@ -298,12 +290,9 @@ ENTRY(csum_partial_copy_generic)
	CFI_STARTPROC
	subl  $4,%esp	
	CFI_ADJUST_CFA_OFFSET 4
	pushl_cfi %edi
	CFI_REL_OFFSET edi, 0
	pushl_cfi %esi
	CFI_REL_OFFSET esi, 0
	pushl_cfi %ebx
	CFI_REL_OFFSET ebx, 0
	pushl_cfi_reg edi
	pushl_cfi_reg esi
	pushl_cfi_reg ebx
	movl ARGBASE+16(%esp),%eax	# sum
	movl ARGBASE+12(%esp),%ecx	# len
	movl ARGBASE+4(%esp),%esi	# src
@@ -412,12 +401,9 @@ DST( movb %cl, (%edi) )

.previous

	popl_cfi %ebx
	CFI_RESTORE ebx
	popl_cfi %esi
	CFI_RESTORE esi
	popl_cfi %edi
	CFI_RESTORE edi
	popl_cfi_reg ebx
	popl_cfi_reg esi
	popl_cfi_reg edi
	popl_cfi %ecx			# equivalent to addl $4,%esp
	ret	
	CFI_ENDPROC
@@ -441,12 +427,9 @@ ENDPROC(csum_partial_copy_generic)
		
ENTRY(csum_partial_copy_generic)
	CFI_STARTPROC
	pushl_cfi %ebx
	CFI_REL_OFFSET ebx, 0
	pushl_cfi %edi
	CFI_REL_OFFSET edi, 0
	pushl_cfi %esi
	CFI_REL_OFFSET esi, 0
	pushl_cfi_reg ebx
	pushl_cfi_reg edi
	pushl_cfi_reg esi
	movl ARGBASE+4(%esp),%esi	#src
	movl ARGBASE+8(%esp),%edi	#dst	
	movl ARGBASE+12(%esp),%ecx	#len
@@ -506,12 +489,9 @@ DST( movb %dl, (%edi) )
	jmp  7b			
.previous				

	popl_cfi %esi
	CFI_RESTORE esi
	popl_cfi %edi
	CFI_RESTORE edi
	popl_cfi %ebx
	CFI_RESTORE ebx
	popl_cfi_reg esi
	popl_cfi_reg edi
	popl_cfi_reg ebx
	ret
	CFI_ENDPROC
ENDPROC(csum_partial_copy_generic)
Loading