Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 26deb043 authored by Christophe Leroy's avatar Christophe Leroy Committed by Michael Ellerman
Browse files

powerpc: prepare string/mem functions for KASAN



CONFIG_KASAN implements wrappers for memcpy() memmove() and memset()
Those wrappers are doing the verification then call respectively
__memcpy() __memmove() and __memset(). The arches are therefore
expected to rename their optimised functions that way.

For files on which KASAN is inhibited, #defines are used to allow
them to directly call optimised versions of the functions without
going through the KASAN wrappers.

See commit 393f203f ("x86_64: kasan: add interceptors for
memset/memmove/memcpy functions") for details.

Other string / mem functions do not (yet) have kasan wrappers,
we therefore have to fallback to the generic versions when
KASAN is active, otherwise KASAN checks will be skipped.

Signed-off-by: default avatarChristophe Leroy <christophe.leroy@c-s.fr>
[mpe: Fixups to keep selftests working]
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
parent d69ca6ba
Loading
Loading
Loading
Loading
+15 −0
Original line number Diff line number Diff line
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef __ASM_KASAN_H
#define __ASM_KASAN_H

#ifdef CONFIG_KASAN
#define _GLOBAL_KASAN(fn)	_GLOBAL(__##fn)
#define _GLOBAL_TOC_KASAN(fn)	_GLOBAL_TOC(__##fn)
#define EXPORT_SYMBOL_KASAN(fn)	EXPORT_SYMBOL(__##fn)
#else
#define _GLOBAL_KASAN(fn)	_GLOBAL(fn)
#define _GLOBAL_TOC_KASAN(fn)	_GLOBAL_TOC(fn)
#define EXPORT_SYMBOL_KASAN(fn)
#endif

#endif
+29 −3
Original line number Diff line number Diff line
@@ -4,14 +4,17 @@

#ifdef __KERNEL__

#ifndef CONFIG_KASAN
#define __HAVE_ARCH_STRNCPY
#define __HAVE_ARCH_STRNCMP
#define __HAVE_ARCH_MEMCHR
#define __HAVE_ARCH_MEMCMP
#define __HAVE_ARCH_MEMSET16
#endif

#define __HAVE_ARCH_MEMSET
#define __HAVE_ARCH_MEMCPY
#define __HAVE_ARCH_MEMMOVE
#define __HAVE_ARCH_MEMCMP
#define __HAVE_ARCH_MEMCHR
#define __HAVE_ARCH_MEMSET16
#define __HAVE_ARCH_MEMCPY_FLUSHCACHE

extern char * strcpy(char *,const char *);
@@ -27,7 +30,27 @@ extern int memcmp(const void *,const void *,__kernel_size_t);
extern void * memchr(const void *,int,__kernel_size_t);
extern void * memcpy_flushcache(void *,const void *,__kernel_size_t);

void *__memset(void *s, int c, __kernel_size_t count);
void *__memcpy(void *to, const void *from, __kernel_size_t n);
void *__memmove(void *to, const void *from, __kernel_size_t n);

#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
/*
 * For files that are not instrumented (e.g. mm/slub.c) we
 * should use not instrumented version of mem* functions.
 */
#define memcpy(dst, src, len) __memcpy(dst, src, len)
#define memmove(dst, src, len) __memmove(dst, src, len)
#define memset(s, c, n) __memset(s, c, n)

#ifndef __NO_FORTIFY
#define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
#endif

#endif

#ifdef CONFIG_PPC64
#ifndef CONFIG_KASAN
#define __HAVE_ARCH_MEMSET32
#define __HAVE_ARCH_MEMSET64

@@ -49,8 +72,11 @@ static inline void *memset64(uint64_t *p, uint64_t v, __kernel_size_t n)
{
	return __memset64(p, v, n * 8);
}
#endif
#else
#ifndef CONFIG_KASAN
#define __HAVE_ARCH_STRLEN
#endif

extern void *memset16(uint16_t *, uint16_t, __kernel_size_t);
#endif
+9 −1
Original line number Diff line number Diff line
@@ -16,8 +16,16 @@
# If you really need to reference something from prom_init.o add
# it to the list below:

grep "^CONFIG_KASAN=y$" .config >/dev/null
if [ $? -eq 0 ]
then
	MEM_FUNCS="__memcpy __memset"
else
	MEM_FUNCS="memcpy memset"
fi

WHITELIST="add_reloc_offset __bss_start __bss_stop copy_and_flush
_end enter_prom memcpy memset reloc_offset __secondary_hold
_end enter_prom $MEM_FUNCS reloc_offset __secondary_hold
__secondary_hold_acknowledge __secondary_hold_spinloop __start
strcmp strcpy strlcpy strlen strncmp strstr kstrtobool logo_linux_clut224
reloc_got2 kernstart_addr memstart_addr linux_banner _stext
+8 −3
Original line number Diff line number Diff line
@@ -8,9 +8,14 @@ ccflags-$(CONFIG_PPC64) := $(NO_MINIMAL_TOC)
CFLAGS_REMOVE_code-patching.o = $(CC_FLAGS_FTRACE)
CFLAGS_REMOVE_feature-fixups.o = $(CC_FLAGS_FTRACE)

obj-y += string.o alloc.o code-patching.o feature-fixups.o
obj-y += alloc.o code-patching.o feature-fixups.o

obj-$(CONFIG_PPC32)	+= div64.o copy_32.o crtsavres.o strlen_32.o
ifndef CONFIG_KASAN
obj-y	+=	string.o memcmp_$(BITS).o
obj-$(CONFIG_PPC32)	+= strlen_32.o
endif

obj-$(CONFIG_PPC32)	+= div64.o copy_32.o crtsavres.o

obj-$(CONFIG_FUNCTION_ERROR_INJECTION)	+= error-inject.o

@@ -34,7 +39,7 @@ obj64-$(CONFIG_KPROBES_SANITY_TEST) += test_emulate_step.o \
					   test_emulate_step_exec_instr.o

obj-y			+= checksum_$(BITS).o checksum_wrappers.o \
			   string_$(BITS).o memcmp_$(BITS).o
			   string_$(BITS).o

obj-y			+= sstep.o ldstfp.o quad.o
obj64-y			+= quad.o
+9 −3
Original line number Diff line number Diff line
@@ -14,6 +14,7 @@
#include <asm/ppc_asm.h>
#include <asm/export.h>
#include <asm/code-patching-asm.h>
#include <asm/kasan.h>

#define COPY_16_BYTES		\
	lwz	r7,4(r4);	\
@@ -68,6 +69,7 @@ CACHELINE_BYTES = L1_CACHE_BYTES
LG_CACHELINE_BYTES = L1_CACHE_SHIFT
CACHELINE_MASK = (L1_CACHE_BYTES-1)

#ifndef CONFIG_KASAN
_GLOBAL(memset16)
	rlwinm.	r0 ,r5, 31, 1, 31
	addi	r6, r3, -4
@@ -81,6 +83,7 @@ _GLOBAL(memset16)
	sth	r4, 4(r6)
	blr
EXPORT_SYMBOL(memset16)
#endif

/*
 * Use dcbz on the complete cache lines in the destination
@@ -91,7 +94,7 @@ EXPORT_SYMBOL(memset16)
 * We therefore skip the optimised bloc that uses dcbz. This jump is
 * replaced by a nop once cache is active. This is done in machine_init()
 */
_GLOBAL(memset)
_GLOBAL_KASAN(memset)
	cmplwi	0,r5,4
	blt	7f

@@ -151,6 +154,7 @@ _GLOBAL(memset)
	bdnz	9b
	blr
EXPORT_SYMBOL(memset)
EXPORT_SYMBOL_KASAN(memset)

/*
 * This version uses dcbz on the complete cache lines in the
@@ -163,12 +167,12 @@ EXPORT_SYMBOL(memset)
 * We therefore jump to generic_memcpy which doesn't use dcbz. This jump is
 * replaced by a nop once cache is active. This is done in machine_init()
 */
_GLOBAL(memmove)
_GLOBAL_KASAN(memmove)
	cmplw	0,r3,r4
	bgt	backwards_memcpy
	/* fall through */

_GLOBAL(memcpy)
_GLOBAL_KASAN(memcpy)
1:	b	generic_memcpy
	patch_site	1b, patch__memcpy_nocache

@@ -244,6 +248,8 @@ _GLOBAL(memcpy)
65:	blr
EXPORT_SYMBOL(memcpy)
EXPORT_SYMBOL(memmove)
EXPORT_SYMBOL_KASAN(memcpy)
EXPORT_SYMBOL_KASAN(memmove)

generic_memcpy:
	srwi.	r7,r5,3
Loading