Loading arch/arm/Kconfig +3 −1 Original line number Diff line number Diff line Loading @@ -1704,6 +1704,7 @@ config ARCH_WANT_GENERAL_HUGETLB config ARM_MODULE_PLTS bool "Use PLTs to allow module memory to spill over into vmalloc area" depends on MODULES default y help Allocate PLTs when loading modules so that jumps and calls whose targets are too far away for their relative offsets to be encoded Loading @@ -1714,7 +1715,8 @@ config ARM_MODULE_PLTS rounding up to page size, the actual memory footprint is usually the same. Say y if you are getting out of memory errors while loading modules Disabling this is usually safe for small single-platform configurations. If unsure, say y. source "mm/Kconfig" Loading arch/arm/Makefile +1 −1 Original line number Diff line number Diff line Loading @@ -106,7 +106,7 @@ tune-$(CONFIG_CPU_V6K) =$(call cc-option,-mtune=arm1136j-s,-mtune=strongarm) tune-y := $(tune-y) ifeq ($(CONFIG_AEABI),y) CFLAGS_ABI :=-mabi=aapcs-linux -mno-thumb-interwork -mfpu=vfp CFLAGS_ABI :=-mabi=aapcs-linux -mfpu=vfp else CFLAGS_ABI :=$(call cc-option,-mapcs-32,-mabi=apcs-gnu) $(call cc-option,-mno-thumb-interwork,) endif Loading arch/arm/boot/compressed/Makefile +1 −1 Original line number Diff line number Diff line Loading @@ -113,7 +113,7 @@ CFLAGS_fdt_ro.o := $(nossp_flags) CFLAGS_fdt_rw.o := $(nossp_flags) CFLAGS_fdt_wip.o := $(nossp_flags) ccflags-y := -fpic -mno-single-pic-base -fno-builtin -I$(obj) ccflags-y := -fpic $(call cc-option,-mno-single-pic-base,) -fno-builtin -I$(obj) asflags-y := -DZIMAGE # Supply kernel BSS size to the decompressor via a linker symbol. Loading arch/arm/include/asm/assembler.h +8 −0 Original line number Diff line number Diff line Loading @@ -447,6 +447,14 @@ THUMB( orr \reg , \reg , #PSR_T_BIT ) .size \name , . - \name .endm .macro csdb #ifdef CONFIG_THUMB2_KERNEL .inst.w 0xf3af8014 #else .inst 0xe320f014 #endif .endm .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req #ifndef CONFIG_CPU_USE_DOMAINS adds \tmp, \addr, #\size - 1 Loading arch/arm/include/asm/barrier.h +32 −0 Original line number Diff line number Diff line Loading @@ -17,6 +17,12 @@ #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory") #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory") #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") #ifdef CONFIG_THUMB2_KERNEL #define CSDB ".inst.w 0xf3af8014" #else #define CSDB ".inst 0xe320f014" #endif #define csdb() __asm__ __volatile__(CSDB : : : "memory") #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ : : "r" (0) : "memory") Loading @@ -37,6 +43,13 @@ #define dmb(x) __asm__ __volatile__ ("" : : : "memory") #endif #ifndef CSDB #define CSDB #endif #ifndef csdb #define csdb() #endif #ifdef CONFIG_ARM_HEAVY_MB extern void (*soc_mb)(void); extern void arm_heavy_mb(void); Loading @@ -63,6 +76,25 @@ extern void arm_heavy_mb(void); #define __smp_rmb() __smp_mb() #define __smp_wmb() dmb(ishst) #ifdef CONFIG_CPU_SPECTRE static inline unsigned long array_index_mask_nospec(unsigned long idx, unsigned long sz) { unsigned long mask; asm volatile( "cmp %1, %2\n" " sbc %0, %1, %1\n" CSDB : "=r" (mask) : "r" (idx), "Ir" (sz) : "cc"); return mask; } #define array_index_mask_nospec array_index_mask_nospec #endif #include <asm-generic/barrier.h> #endif /* !__ASSEMBLY__ */ Loading Loading
arch/arm/Kconfig +3 −1 Original line number Diff line number Diff line Loading @@ -1704,6 +1704,7 @@ config ARCH_WANT_GENERAL_HUGETLB config ARM_MODULE_PLTS bool "Use PLTs to allow module memory to spill over into vmalloc area" depends on MODULES default y help Allocate PLTs when loading modules so that jumps and calls whose targets are too far away for their relative offsets to be encoded Loading @@ -1714,7 +1715,8 @@ config ARM_MODULE_PLTS rounding up to page size, the actual memory footprint is usually the same. Say y if you are getting out of memory errors while loading modules Disabling this is usually safe for small single-platform configurations. If unsure, say y. source "mm/Kconfig" Loading
arch/arm/Makefile +1 −1 Original line number Diff line number Diff line Loading @@ -106,7 +106,7 @@ tune-$(CONFIG_CPU_V6K) =$(call cc-option,-mtune=arm1136j-s,-mtune=strongarm) tune-y := $(tune-y) ifeq ($(CONFIG_AEABI),y) CFLAGS_ABI :=-mabi=aapcs-linux -mno-thumb-interwork -mfpu=vfp CFLAGS_ABI :=-mabi=aapcs-linux -mfpu=vfp else CFLAGS_ABI :=$(call cc-option,-mapcs-32,-mabi=apcs-gnu) $(call cc-option,-mno-thumb-interwork,) endif Loading
arch/arm/boot/compressed/Makefile +1 −1 Original line number Diff line number Diff line Loading @@ -113,7 +113,7 @@ CFLAGS_fdt_ro.o := $(nossp_flags) CFLAGS_fdt_rw.o := $(nossp_flags) CFLAGS_fdt_wip.o := $(nossp_flags) ccflags-y := -fpic -mno-single-pic-base -fno-builtin -I$(obj) ccflags-y := -fpic $(call cc-option,-mno-single-pic-base,) -fno-builtin -I$(obj) asflags-y := -DZIMAGE # Supply kernel BSS size to the decompressor via a linker symbol. Loading
arch/arm/include/asm/assembler.h +8 −0 Original line number Diff line number Diff line Loading @@ -447,6 +447,14 @@ THUMB( orr \reg , \reg , #PSR_T_BIT ) .size \name , . - \name .endm .macro csdb #ifdef CONFIG_THUMB2_KERNEL .inst.w 0xf3af8014 #else .inst 0xe320f014 #endif .endm .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req #ifndef CONFIG_CPU_USE_DOMAINS adds \tmp, \addr, #\size - 1 Loading
arch/arm/include/asm/barrier.h +32 −0 Original line number Diff line number Diff line Loading @@ -17,6 +17,12 @@ #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory") #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory") #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") #ifdef CONFIG_THUMB2_KERNEL #define CSDB ".inst.w 0xf3af8014" #else #define CSDB ".inst 0xe320f014" #endif #define csdb() __asm__ __volatile__(CSDB : : : "memory") #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ : : "r" (0) : "memory") Loading @@ -37,6 +43,13 @@ #define dmb(x) __asm__ __volatile__ ("" : : : "memory") #endif #ifndef CSDB #define CSDB #endif #ifndef csdb #define csdb() #endif #ifdef CONFIG_ARM_HEAVY_MB extern void (*soc_mb)(void); extern void arm_heavy_mb(void); Loading @@ -63,6 +76,25 @@ extern void arm_heavy_mb(void); #define __smp_rmb() __smp_mb() #define __smp_wmb() dmb(ishst) #ifdef CONFIG_CPU_SPECTRE static inline unsigned long array_index_mask_nospec(unsigned long idx, unsigned long sz) { unsigned long mask; asm volatile( "cmp %1, %2\n" " sbc %0, %1, %1\n" CSDB : "=r" (mask) : "r" (idx), "Ir" (sz) : "cc"); return mask; } #define array_index_mask_nospec array_index_mask_nospec #endif #include <asm-generic/barrier.h> #endif /* !__ASSEMBLY__ */ Loading