Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit f24d6f26 authored by Linus Torvalds's avatar Linus Torvalds
Browse files

Merge branch 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull x86 asm updates from Thomas Gleixner:
 "The lowlevel and ASM code updates for x86:

   - Make stack trace unwinding more reliable

   - ASM instruction updates for better code generation

   - Various cleanups"

* 'x86-asm-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  x86/entry/64: Add two more instruction suffixes
  x86/asm/64: Use 32-bit XOR to zero registers
  x86/build/vdso: Simplify 'cmd_vdso2c'
  x86/build/vdso: Remove unused vdso-syms.lds
  x86/stacktrace: Enable HAVE_RELIABLE_STACKTRACE for the ORC unwinder
  x86/unwind/orc: Detect the end of the stack
  x86/stacktrace: Do not fail for ORC with regs on stack
  x86/stacktrace: Clarify the reliable success paths
  x86/stacktrace: Remove STACKTRACE_DUMP_ONCE
  x86/stacktrace: Do not unwind after user regs
  x86/asm: Use CC_SET/CC_OUT in percpu_cmpxchg8b_double() to micro-optimize code generation
parents b9b8e5b7 6709812f
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -180,7 +180,7 @@ config X86
	select HAVE_PERF_USER_STACK_DUMP
	select HAVE_RCU_TABLE_FREE
	select HAVE_REGS_AND_STACK_ACCESS_API
	select HAVE_RELIABLE_STACKTRACE		if X86_64 && UNWINDER_FRAME_POINTER && STACK_VALIDATION
	select HAVE_RELIABLE_STACKTRACE		if X86_64 && (UNWINDER_FRAME_POINTER || UNWINDER_ORC) && STACK_VALIDATION
	select HAVE_STACKPROTECTOR		if CC_HAS_SANE_STACKPROTECTOR
	select HAVE_STACK_VALIDATION		if X86_64
	select HAVE_RSEQ
+1 −1
Original line number Diff line number Diff line
@@ -75,7 +75,7 @@
 *   %r9
 */
__load_partial:
	xor %r9, %r9
	xor %r9d, %r9d
	pxor MSG, MSG

	mov LEN, %r8
+1 −1
Original line number Diff line number Diff line
@@ -66,7 +66,7 @@
 *   %r9
 */
__load_partial:
	xor %r9, %r9
	xor %r9d, %r9d
	pxor MSG0, MSG0
	pxor MSG1, MSG1

+1 −1
Original line number Diff line number Diff line
@@ -59,7 +59,7 @@
 *   %r9
 */
__load_partial:
	xor %r9, %r9
	xor %r9d, %r9d
	pxor MSG, MSG

	mov LEN, %r8
+4 −4
Original line number Diff line number Diff line
@@ -258,7 +258,7 @@ ALL_F: .octa 0xffffffffffffffffffffffffffffffff
.macro GCM_INIT Iv SUBKEY AAD AADLEN
	mov \AADLEN, %r11
	mov %r11, AadLen(%arg2) # ctx_data.aad_length = aad_length
	xor %r11, %r11
	xor %r11d, %r11d
	mov %r11, InLen(%arg2) # ctx_data.in_length = 0
	mov %r11, PBlockLen(%arg2) # ctx_data.partial_block_length = 0
	mov %r11, PBlockEncKey(%arg2) # ctx_data.partial_block_enc_key = 0
@@ -286,7 +286,7 @@ ALL_F: .octa 0xffffffffffffffffffffffffffffffff
	movdqu HashKey(%arg2), %xmm13
	add %arg5, InLen(%arg2)

	xor %r11, %r11 # initialise the data pointer offset as zero
	xor %r11d, %r11d # initialise the data pointer offset as zero
	PARTIAL_BLOCK %arg3 %arg4 %arg5 %r11 %xmm8 \operation

	sub %r11, %arg5		# sub partial block data used
@@ -702,7 +702,7 @@ _no_extra_mask_1_\@:

	# GHASH computation for the last <16 Byte block
	GHASH_MUL \AAD_HASH, %xmm13, %xmm0, %xmm10, %xmm11, %xmm5, %xmm6
	xor	%rax,%rax
	xor	%eax, %eax

	mov	%rax, PBlockLen(%arg2)
	jmp	_dec_done_\@
@@ -737,7 +737,7 @@ _no_extra_mask_2_\@:

	# GHASH computation for the last <16 Byte block
	GHASH_MUL \AAD_HASH, %xmm13, %xmm0, %xmm10, %xmm11, %xmm5, %xmm6
	xor	%rax,%rax
	xor	%eax, %eax

	mov	%rax, PBlockLen(%arg2)
	jmp	_encode_done_\@
Loading