Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit df4c0b18 authored by Linus Torvalds's avatar Linus Torvalds
Browse files

Merge branch 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull x86 asm updates from Ingo Molnar:

 - Add UMIP emulation/spoofing for 64-bit processes as well, because of
   Wine based gaming.

 - Clean up symbols/labels in low level asm code

 - Add an assembly optimized mul_u64_u32_div() implementation on x86-64.

* 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  x86/umip: Add emulation (spoofing) for UMIP covered instructions in 64-bit processes as well
  x86/asm: Make some functions local labels
  x86/asm/suspend: Get rid of bogus_64_magic
  x86/math64: Provide a sane mul_u64_u32_div() implementation for x86_64
parents 7e67a859 e86c2c8b
Loading
Loading
Loading
Loading
+2 −2
Original line number Diff line number Diff line
@@ -140,7 +140,7 @@ ENTRY(startup_32)
/*
 * Jump to the relocated address.
 */
	leal	relocated(%ebx), %eax
	leal	.Lrelocated(%ebx), %eax
	jmp	*%eax
ENDPROC(startup_32)

@@ -209,7 +209,7 @@ ENDPROC(efi32_stub_entry)
#endif

	.text
relocated:
.Lrelocated:

/*
 * Clear BSS (stack is currently empty)
+9 −9
Original line number Diff line number Diff line
@@ -87,7 +87,7 @@ ENTRY(startup_32)

	call	verify_cpu
	testl	%eax, %eax
	jnz	no_longmode
	jnz	.Lno_longmode

/*
 * Compute the delta between where we were compiled to run at
@@ -322,7 +322,7 @@ ENTRY(startup_64)
1:	popq	%rdi
	subq	$1b, %rdi

	call	adjust_got
	call	.Ladjust_got

	/*
	 * At this point we are in long mode with 4-level paging enabled,
@@ -421,7 +421,7 @@ trampoline_return:

	/* The new adjustment is the relocation address */
	movq	%rbx, %rdi
	call	adjust_got
	call	.Ladjust_got

/*
 * Copy the compressed kernel to the end of our buffer
@@ -440,7 +440,7 @@ trampoline_return:
/*
 * Jump to the relocated address.
 */
	leaq	relocated(%rbx), %rax
	leaq	.Lrelocated(%rbx), %rax
	jmp	*%rax

#ifdef CONFIG_EFI_STUB
@@ -511,7 +511,7 @@ ENDPROC(efi64_stub_entry)
#endif

	.text
relocated:
.Lrelocated:

/*
 * Clear BSS (stack is currently empty)
@@ -548,7 +548,7 @@ relocated:
 * first time we touch GOT).
 * RDI is the new adjustment to apply.
 */
adjust_got:
.Ladjust_got:
	/* Walk through the GOT adding the address to the entries */
	leaq	_got(%rip), %rdx
	leaq	_egot(%rip), %rcx
@@ -622,7 +622,7 @@ ENTRY(trampoline_32bit_src)
	movl	%eax, %cr4

	/* Calculate address of paging_enabled() once we are executing in the trampoline */
	leal	paging_enabled - trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_OFFSET(%ecx), %eax
	leal	.Lpaging_enabled - trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_OFFSET(%ecx), %eax

	/* Prepare the stack for far return to Long Mode */
	pushl	$__KERNEL_CS
@@ -635,7 +635,7 @@ ENTRY(trampoline_32bit_src)
	lret

	.code64
paging_enabled:
.Lpaging_enabled:
	/* Return from the trampoline */
	jmp	*%rdi

@@ -647,7 +647,7 @@ paging_enabled:
	.org	trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_SIZE

	.code32
no_longmode:
.Lno_longmode:
	/* This isn't an x86-64 CPU, so hang intentionally, we cannot continue */
1:
	hlt
+2 −2
Original line number Diff line number Diff line
@@ -1058,10 +1058,10 @@ ENTRY(native_load_gs_index)
ENDPROC(native_load_gs_index)
EXPORT_SYMBOL(native_load_gs_index)

	_ASM_EXTABLE(.Lgs_change, bad_gs)
	_ASM_EXTABLE(.Lgs_change, .Lbad_gs)
	.section .fixup, "ax"
	/* running with kernelgs */
bad_gs:
.Lbad_gs:
	SWAPGS					/* switch back to user gs */
.macro ZAP_GS
	/* This can't be a string because the preprocessor needs to see it. */
+13 −0
Original line number Diff line number Diff line
@@ -73,6 +73,19 @@ static inline u64 mul_u32_u32(u32 a, u32 b)

#else
# include <asm-generic/div64.h>

static inline u64 mul_u64_u32_div(u64 a, u32 mul, u32 div)
{
	u64 q;

	asm ("mulq %2; divq %3" : "=a" (q)
				: "a" (a), "rm" ((u64)mul), "rm" ((u64)div)
				: "rdx");

	return q;
}
#define mul_u64_u32_div	mul_u64_u32_div

#endif /* CONFIG_X86_32 */

#endif /* _ASM_X86_DIV64_H */
+6 −4
Original line number Diff line number Diff line
@@ -18,8 +18,13 @@ ENTRY(wakeup_long64)
	movq	saved_magic, %rax
	movq	$0x123456789abcdef0, %rdx
	cmpq	%rdx, %rax
	jne	bogus_64_magic
	je	2f

	/* stop here on a saved_magic mismatch */
	movq $0xbad6d61676963, %rcx
1:
	jmp 1b
2:
	movw	$__KERNEL_DS, %ax
	movw	%ax, %ss	
	movw	%ax, %ds
@@ -37,9 +42,6 @@ ENTRY(wakeup_long64)
	jmp	*%rax
ENDPROC(wakeup_long64)

bogus_64_magic:
	jmp	bogus_64_magic

ENTRY(do_suspend_lowlevel)
	FRAME_BEGIN
	subq	$8, %rsp
Loading