Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 79c5dca3 authored by H. Peter Anvin's avatar H. Peter Anvin
Browse files

x86, msr: CFI annotations, cleanups for msr-reg.S



Add CFI annotations for native_{rd,wr}msr_safe_regs().
Simplify the 64-bit implementation: we don't allow the upper half
registers to be set, and so we can use them to carry state across the
operation.

Signed-off-by: default avatarH. Peter Anvin <hpa@zytor.com>
Cc: Borislav Petkov <petkovbb@gmail.com>
LKML-Reference: <1251705011-18636-1-git-send-email-petkovbb@gmail.com>
parent 709972b1
Loading
Loading
Loading
Loading
+42 −38
Original line number Original line Diff line number Diff line
#include <linux/linkage.h>
#include <linux/linkage.h>
#include <linux/errno.h>
#include <linux/errno.h>
#include <asm/dwarf2.h>
#include <asm/asm.h>
#include <asm/asm.h>
#include <asm/msr.h>
#include <asm/msr.h>


@@ -12,10 +13,11 @@
 */
 */
.macro op_safe_regs op:req
.macro op_safe_regs op:req
ENTRY(native_\op\()_safe_regs)
ENTRY(native_\op\()_safe_regs)
	push    %rbx
	CFI_STARTPROC
	push    %rbp
	pushq_cfi %rbx
	push    $0              /* Return value */
	pushq_cfi %rbp
	push    %rdi
	movq	%rdi, %r10	/* Save pointer */
	xorl	%r11d, %r11d	/* Return value */
	movl    (%rdi), %eax
	movl    (%rdi), %eax
	movl    4(%rdi), %ecx
	movl    4(%rdi), %ecx
	movl    8(%rdi), %edx
	movl    8(%rdi), %edx
@@ -23,27 +25,26 @@ ENTRY(native_\op\()_safe_regs)
	movl    20(%rdi), %ebp
	movl    20(%rdi), %ebp
	movl    24(%rdi), %esi
	movl    24(%rdi), %esi
	movl    28(%rdi), %edi
	movl    28(%rdi), %edi
	CFI_REMEMBER_STATE
1:	\op
1:	\op
2:	movl    %edi, %r10d
2:	movl    %eax, (%r10)
	pop     %rdi
	movl	%r11d, %eax	/* Return value */
	movl    %eax, (%rdi)
	movl    %ecx, 4(%r10)
	movl    %ecx, 4(%rdi)
	movl    %edx, 8(%r10)
	movl    %edx, 8(%rdi)
	movl    %ebx, 12(%r10)
	movl    %ebx, 12(%rdi)
	movl    %ebp, 20(%r10)
	movl    %ebp, 20(%rdi)
	movl    %esi, 24(%r10)
	movl    %esi, 24(%rdi)
	movl    %edi, 28(%r10)
	movl    %r10d, 28(%rdi)
	popq_cfi %rbp
	pop     %rax
	popq_cfi %rbx
	pop     %rbp
	pop     %rbx
	ret
	ret
3:
3:
	movq    $-EIO, 8(%rsp)
	CFI_RESTORE_STATE
	movl    $-EIO, %r11d
	jmp     2b
	jmp     2b
	.section __ex_table,"ax"

	.balign 4
	_ASM_EXTABLE(1b, 3b)
	.quad   1b, 3b
	CFI_ENDPROC
	.previous
ENDPROC(native_\op\()_safe_regs)
ENDPROC(native_\op\()_safe_regs)
.endm
.endm


@@ -51,12 +52,13 @@ ENDPROC(native_\op\()_safe_regs)


.macro op_safe_regs op:req
.macro op_safe_regs op:req
ENTRY(native_\op\()_safe_regs)
ENTRY(native_\op\()_safe_regs)
	push    %ebx
	CFI_STARTPROC
	push    %ebp
	pushl_cfi %ebx
	push    %esi
	pushl_cfi %ebp
	push    %edi
	pushl_cfi %esi
	push    $0              /* Return value */
	pushl_cfi %edi
	push    %eax
	pushl_cfi $0              /* Return value */
	pushl_cfi %eax
	movl    4(%eax), %ecx
	movl    4(%eax), %ecx
	movl    8(%eax), %edx
	movl    8(%eax), %edx
	movl    12(%eax), %ebx
	movl    12(%eax), %ebx
@@ -64,30 +66,32 @@ ENTRY(native_\op\()_safe_regs)
	movl    24(%eax), %esi
	movl    24(%eax), %esi
	movl    28(%eax), %edi
	movl    28(%eax), %edi
	movl    (%eax), %eax
	movl    (%eax), %eax
	CFI_REMEMBER_STATE
1:	\op
1:	\op
2:	push    %eax
2:	pushl_cfi %eax
	movl    4(%esp), %eax
	movl    4(%esp), %eax
	pop     (%eax)
	popl_cfi (%eax)
	addl    $4, %esp
	addl    $4, %esp
	CFI_ADJUST_CFA_OFFSET -4
	movl    %ecx, 4(%eax)
	movl    %ecx, 4(%eax)
	movl    %edx, 8(%eax)
	movl    %edx, 8(%eax)
	movl    %ebx, 12(%eax)
	movl    %ebx, 12(%eax)
	movl    %ebp, 20(%eax)
	movl    %ebp, 20(%eax)
	movl    %esi, 24(%eax)
	movl    %esi, 24(%eax)
	movl    %edi, 28(%eax)
	movl    %edi, 28(%eax)
	pop     %eax
	popl_cfi %eax
	pop     %edi
	popl_cfi %edi
	pop     %esi
	popl_cfi %esi
	pop     %ebp
	popl_cfi %ebp
	pop     %ebx
	popl_cfi %ebx
	ret
	ret
3:
3:
	CFI_RESTORE_STATE
	movl    $-EIO, 4(%esp)
	movl    $-EIO, 4(%esp)
	jmp     2b
	jmp     2b
	.section __ex_table,"ax"

	.balign 4
	_ASM_EXTABLE(1b, 3b)
	.long   1b, 3b
	CFI_ENDPROC
	.previous
ENDPROC(native_\op\()_safe_regs)
ENDPROC(native_\op\()_safe_regs)
.endm
.endm