Loading arch/arm/Kconfig +3 −1 Original line number Original line Diff line number Diff line Loading @@ -1704,6 +1704,7 @@ config ARCH_WANT_GENERAL_HUGETLB config ARM_MODULE_PLTS config ARM_MODULE_PLTS bool "Use PLTs to allow module memory to spill over into vmalloc area" bool "Use PLTs to allow module memory to spill over into vmalloc area" depends on MODULES depends on MODULES default y help help Allocate PLTs when loading modules so that jumps and calls whose Allocate PLTs when loading modules so that jumps and calls whose targets are too far away for their relative offsets to be encoded targets are too far away for their relative offsets to be encoded Loading @@ -1714,7 +1715,8 @@ config ARM_MODULE_PLTS rounding up to page size, the actual memory footprint is usually rounding up to page size, the actual memory footprint is usually the same. the same. Say y if you are getting out of memory errors while loading modules Disabling this is usually safe for small single-platform configurations. If unsure, say y. source "mm/Kconfig" source "mm/Kconfig" Loading arch/arm/Makefile +1 −1 Original line number Original line Diff line number Diff line Loading @@ -106,7 +106,7 @@ tune-$(CONFIG_CPU_V6K) =$(call cc-option,-mtune=arm1136j-s,-mtune=strongarm) tune-y := $(tune-y) tune-y := $(tune-y) ifeq ($(CONFIG_AEABI),y) ifeq ($(CONFIG_AEABI),y) CFLAGS_ABI :=-mabi=aapcs-linux -mno-thumb-interwork -mfpu=vfp CFLAGS_ABI :=-mabi=aapcs-linux -mfpu=vfp else else CFLAGS_ABI :=$(call cc-option,-mapcs-32,-mabi=apcs-gnu) $(call cc-option,-mno-thumb-interwork,) CFLAGS_ABI :=$(call cc-option,-mapcs-32,-mabi=apcs-gnu) $(call cc-option,-mno-thumb-interwork,) endif endif Loading arch/arm/boot/compressed/Makefile +1 −1 Original line number Original line Diff line number Diff line Loading @@ -113,7 +113,7 @@ CFLAGS_fdt_ro.o := $(nossp_flags) CFLAGS_fdt_rw.o := $(nossp_flags) CFLAGS_fdt_rw.o := $(nossp_flags) CFLAGS_fdt_wip.o := $(nossp_flags) CFLAGS_fdt_wip.o := $(nossp_flags) ccflags-y := -fpic -mno-single-pic-base -fno-builtin -I$(obj) ccflags-y := -fpic $(call cc-option,-mno-single-pic-base,) -fno-builtin -I$(obj) asflags-y := -DZIMAGE asflags-y := -DZIMAGE # Supply kernel BSS size to the decompressor via a linker symbol. # Supply kernel BSS size to the decompressor via a linker symbol. Loading arch/arm/include/asm/assembler.h +8 −0 Original line number Original line Diff line number Diff line Loading @@ -447,6 +447,14 @@ THUMB( orr \reg , \reg , #PSR_T_BIT ) .size \name , . - \name .size \name , . - \name .endm .endm .macro csdb #ifdef CONFIG_THUMB2_KERNEL .inst.w 0xf3af8014 #else .inst 0xe320f014 #endif .endm .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req #ifndef CONFIG_CPU_USE_DOMAINS #ifndef CONFIG_CPU_USE_DOMAINS adds \tmp, \addr, #\size - 1 adds \tmp, \addr, #\size - 1 Loading arch/arm/include/asm/barrier.h +32 −0 Original line number Original line Diff line number Diff line Loading @@ -17,6 +17,12 @@ #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory") #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory") #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory") #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory") #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") #ifdef CONFIG_THUMB2_KERNEL #define CSDB ".inst.w 0xf3af8014" #else #define CSDB ".inst 0xe320f014" #endif #define csdb() __asm__ __volatile__(CSDB : : : "memory") #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6 #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ : : "r" (0) : "memory") : : "r" (0) : "memory") Loading @@ -37,6 +43,13 @@ #define dmb(x) __asm__ __volatile__ ("" : : : "memory") #define dmb(x) __asm__ __volatile__ ("" : : : "memory") #endif #endif #ifndef CSDB #define CSDB #endif #ifndef csdb #define csdb() #endif #ifdef CONFIG_ARM_HEAVY_MB #ifdef CONFIG_ARM_HEAVY_MB extern void (*soc_mb)(void); extern void (*soc_mb)(void); extern void arm_heavy_mb(void); extern void arm_heavy_mb(void); Loading @@ -63,6 +76,25 @@ extern void arm_heavy_mb(void); #define __smp_rmb() __smp_mb() #define __smp_rmb() __smp_mb() #define __smp_wmb() dmb(ishst) #define __smp_wmb() dmb(ishst) #ifdef CONFIG_CPU_SPECTRE static inline unsigned long array_index_mask_nospec(unsigned long idx, unsigned long sz) { unsigned long mask; asm volatile( "cmp %1, %2\n" " sbc %0, %1, %1\n" CSDB : "=r" (mask) : "r" (idx), "Ir" (sz) : "cc"); return mask; } #define array_index_mask_nospec array_index_mask_nospec #endif #include <asm-generic/barrier.h> #include <asm-generic/barrier.h> #endif /* !__ASSEMBLY__ */ #endif /* !__ASSEMBLY__ */ Loading Loading
arch/arm/Kconfig +3 −1 Original line number Original line Diff line number Diff line Loading @@ -1704,6 +1704,7 @@ config ARCH_WANT_GENERAL_HUGETLB config ARM_MODULE_PLTS config ARM_MODULE_PLTS bool "Use PLTs to allow module memory to spill over into vmalloc area" bool "Use PLTs to allow module memory to spill over into vmalloc area" depends on MODULES depends on MODULES default y help help Allocate PLTs when loading modules so that jumps and calls whose Allocate PLTs when loading modules so that jumps and calls whose targets are too far away for their relative offsets to be encoded targets are too far away for their relative offsets to be encoded Loading @@ -1714,7 +1715,8 @@ config ARM_MODULE_PLTS rounding up to page size, the actual memory footprint is usually rounding up to page size, the actual memory footprint is usually the same. the same. Say y if you are getting out of memory errors while loading modules Disabling this is usually safe for small single-platform configurations. If unsure, say y. source "mm/Kconfig" source "mm/Kconfig" Loading
arch/arm/Makefile +1 −1 Original line number Original line Diff line number Diff line Loading @@ -106,7 +106,7 @@ tune-$(CONFIG_CPU_V6K) =$(call cc-option,-mtune=arm1136j-s,-mtune=strongarm) tune-y := $(tune-y) tune-y := $(tune-y) ifeq ($(CONFIG_AEABI),y) ifeq ($(CONFIG_AEABI),y) CFLAGS_ABI :=-mabi=aapcs-linux -mno-thumb-interwork -mfpu=vfp CFLAGS_ABI :=-mabi=aapcs-linux -mfpu=vfp else else CFLAGS_ABI :=$(call cc-option,-mapcs-32,-mabi=apcs-gnu) $(call cc-option,-mno-thumb-interwork,) CFLAGS_ABI :=$(call cc-option,-mapcs-32,-mabi=apcs-gnu) $(call cc-option,-mno-thumb-interwork,) endif endif Loading
arch/arm/boot/compressed/Makefile +1 −1 Original line number Original line Diff line number Diff line Loading @@ -113,7 +113,7 @@ CFLAGS_fdt_ro.o := $(nossp_flags) CFLAGS_fdt_rw.o := $(nossp_flags) CFLAGS_fdt_rw.o := $(nossp_flags) CFLAGS_fdt_wip.o := $(nossp_flags) CFLAGS_fdt_wip.o := $(nossp_flags) ccflags-y := -fpic -mno-single-pic-base -fno-builtin -I$(obj) ccflags-y := -fpic $(call cc-option,-mno-single-pic-base,) -fno-builtin -I$(obj) asflags-y := -DZIMAGE asflags-y := -DZIMAGE # Supply kernel BSS size to the decompressor via a linker symbol. # Supply kernel BSS size to the decompressor via a linker symbol. Loading
arch/arm/include/asm/assembler.h +8 −0 Original line number Original line Diff line number Diff line Loading @@ -447,6 +447,14 @@ THUMB( orr \reg , \reg , #PSR_T_BIT ) .size \name , . - \name .size \name , . - \name .endm .endm .macro csdb #ifdef CONFIG_THUMB2_KERNEL .inst.w 0xf3af8014 #else .inst 0xe320f014 #endif .endm .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req #ifndef CONFIG_CPU_USE_DOMAINS #ifndef CONFIG_CPU_USE_DOMAINS adds \tmp, \addr, #\size - 1 adds \tmp, \addr, #\size - 1 Loading
arch/arm/include/asm/barrier.h +32 −0 Original line number Original line Diff line number Diff line Loading @@ -17,6 +17,12 @@ #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory") #define isb(option) __asm__ __volatile__ ("isb " #option : : : "memory") #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory") #define dsb(option) __asm__ __volatile__ ("dsb " #option : : : "memory") #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") #define dmb(option) __asm__ __volatile__ ("dmb " #option : : : "memory") #ifdef CONFIG_THUMB2_KERNEL #define CSDB ".inst.w 0xf3af8014" #else #define CSDB ".inst 0xe320f014" #endif #define csdb() __asm__ __volatile__(CSDB : : : "memory") #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6 #elif defined(CONFIG_CPU_XSC3) || __LINUX_ARM_ARCH__ == 6 #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ #define isb(x) __asm__ __volatile__ ("mcr p15, 0, %0, c7, c5, 4" \ : : "r" (0) : "memory") : : "r" (0) : "memory") Loading @@ -37,6 +43,13 @@ #define dmb(x) __asm__ __volatile__ ("" : : : "memory") #define dmb(x) __asm__ __volatile__ ("" : : : "memory") #endif #endif #ifndef CSDB #define CSDB #endif #ifndef csdb #define csdb() #endif #ifdef CONFIG_ARM_HEAVY_MB #ifdef CONFIG_ARM_HEAVY_MB extern void (*soc_mb)(void); extern void (*soc_mb)(void); extern void arm_heavy_mb(void); extern void arm_heavy_mb(void); Loading @@ -63,6 +76,25 @@ extern void arm_heavy_mb(void); #define __smp_rmb() __smp_mb() #define __smp_rmb() __smp_mb() #define __smp_wmb() dmb(ishst) #define __smp_wmb() dmb(ishst) #ifdef CONFIG_CPU_SPECTRE static inline unsigned long array_index_mask_nospec(unsigned long idx, unsigned long sz) { unsigned long mask; asm volatile( "cmp %1, %2\n" " sbc %0, %1, %1\n" CSDB : "=r" (mask) : "r" (idx), "Ir" (sz) : "cc"); return mask; } #define array_index_mask_nospec array_index_mask_nospec #endif #include <asm-generic/barrier.h> #include <asm-generic/barrier.h> #endif /* !__ASSEMBLY__ */ #endif /* !__ASSEMBLY__ */ Loading