Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 84d95ad4 authored by Borislav Petkov's avatar Borislav Petkov
Browse files

x86/lib/memset_64.S: Convert to ALTERNATIVE_2 macro



Make alternatives replace single JMPs instead of whole memset functions,
thus decreasing the amount of instructions copied during patching time
at boot.

While at it, make it use the REP_GOOD version by default which means
alternatives NOP out the JMP to the other versions, as REP_GOOD is set
by default on the majority of relevant x86 processors.

Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
parent a930dc45
Loading
Loading
Loading
Loading
+24 −37
Original line number Diff line number Diff line
@@ -5,6 +5,8 @@
#include <asm/cpufeature.h>
#include <asm/alternative-asm.h>

.weak memset

/*
 * ISO C memset - set a memory block to a byte value. This function uses fast
 * string to get better performance than the original function. The code is
@@ -16,8 +18,17 @@
 *
 * rax   original destination
 */
	.section .altinstr_replacement, "ax", @progbits
.Lmemset_c:
ENTRY(memset)
ENTRY(__memset)
	/*
	 * Some CPUs support enhanced REP MOVSB/STOSB feature. It is recommended
	 * to use it when possible. If not available, use fast string instructions.
	 *
	 * Otherwise, use original memset function.
	 */
	ALTERNATIVE_2 "jmp memset_orig", "", X86_FEATURE_REP_GOOD, \
		      "jmp memset_erms", X86_FEATURE_ERMS

	movq %rdi,%r9
	movq %rdx,%rcx
	andl $7,%edx
@@ -31,8 +42,8 @@
	rep stosb
	movq %r9,%rax
	ret
.Lmemset_e:
	.previous
ENDPROC(memset)
ENDPROC(__memset)

/*
 * ISO C memset - set a memory block to a byte value. This function uses
@@ -45,21 +56,16 @@
 *
 * rax   original destination
 */
	.section .altinstr_replacement, "ax", @progbits
.Lmemset_c_e:
ENTRY(memset_erms)
	movq %rdi,%r9
	movb %sil,%al
	movq %rdx,%rcx
	rep stosb
	movq %r9,%rax
	ret
.Lmemset_e_e:
	.previous
ENDPROC(memset_erms)

.weak memset

ENTRY(memset)
ENTRY(__memset)
ENTRY(memset_orig)
	CFI_STARTPROC
	movq %rdi,%r10

@@ -134,23 +140,4 @@ ENTRY(__memset)
	jmp .Lafter_bad_alignment
.Lfinal:
	CFI_ENDPROC
ENDPROC(memset)
ENDPROC(__memset)

	/* Some CPUs support enhanced REP MOVSB/STOSB feature.
	 * It is recommended to use this when possible.
	 *
	 * If enhanced REP MOVSB/STOSB feature is not available, use fast string
	 * instructions.
	 *
	 * Otherwise, use original memset function.
	 *
	 * In .altinstructions section, ERMS feature is placed after REG_GOOD
         * feature to implement the right patch order.
	 */
	.section .altinstructions,"a"
	altinstruction_entry __memset,.Lmemset_c,X86_FEATURE_REP_GOOD,\
			     .Lfinal-__memset,.Lmemset_e-.Lmemset_c,0
	altinstruction_entry __memset,.Lmemset_c_e,X86_FEATURE_ERMS, \
			     .Lfinal-__memset,.Lmemset_e_e-.Lmemset_c_e,0
	.previous
ENDPROC(memset_orig)