Loading arch/ia64/include/asm/percpu.h +1 −1 Original line number Diff line number Diff line Loading @@ -31,7 +31,7 @@ extern void *per_cpu_init(void); #endif /* SMP */ #define PER_CPU_BASE_SECTION ".data.percpu" #define PER_CPU_BASE_SECTION ".data..percpu" /* * Be extremely careful when taking the address of this variable! Due to virtual Loading include/asm-generic/percpu.h +5 −5 Original line number Diff line number Diff line Loading @@ -76,7 +76,7 @@ extern void setup_per_cpu_areas(void); #ifndef PER_CPU_BASE_SECTION #ifdef CONFIG_SMP #define PER_CPU_BASE_SECTION ".data.percpu" #define PER_CPU_BASE_SECTION ".data..percpu" #else #define PER_CPU_BASE_SECTION ".data" #endif Loading @@ -88,15 +88,15 @@ extern void setup_per_cpu_areas(void); #define PER_CPU_SHARED_ALIGNED_SECTION "" #define PER_CPU_ALIGNED_SECTION "" #else #define PER_CPU_SHARED_ALIGNED_SECTION ".shared_aligned" #define PER_CPU_ALIGNED_SECTION ".shared_aligned" #define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned" #define PER_CPU_ALIGNED_SECTION "..shared_aligned" #endif #define PER_CPU_FIRST_SECTION ".first" #define PER_CPU_FIRST_SECTION "..first" #else #define PER_CPU_SHARED_ALIGNED_SECTION "" #define PER_CPU_ALIGNED_SECTION ".shared_aligned" #define PER_CPU_ALIGNED_SECTION "..shared_aligned" #define PER_CPU_FIRST_SECTION "" #endif Loading include/asm-generic/vmlinux.lds.h +12 −12 Original line number Diff line number Diff line Loading @@ -666,16 +666,16 @@ */ #define PERCPU_VADDR(vaddr, phdr) \ VMLINUX_SYMBOL(__per_cpu_load) = .; \ .data.percpu vaddr : AT(VMLINUX_SYMBOL(__per_cpu_load) \ .data..percpu vaddr : AT(VMLINUX_SYMBOL(__per_cpu_load) \ - LOAD_OFFSET) { \ VMLINUX_SYMBOL(__per_cpu_start) = .; \ *(.data.percpu.first) \ *(.data.percpu.page_aligned) \ *(.data.percpu) \ *(.data.percpu.shared_aligned) \ *(.data..percpu..first) \ *(.data..percpu..page_aligned) \ *(.data..percpu) \ *(.data..percpu..shared_aligned) \ VMLINUX_SYMBOL(__per_cpu_end) = .; \ } phdr \ . = VMLINUX_SYMBOL(__per_cpu_load) + SIZEOF(.data.percpu); . = VMLINUX_SYMBOL(__per_cpu_load) + SIZEOF(.data..percpu); /** * PERCPU - define output section for percpu area, simple version Loading @@ -687,18 +687,18 @@ * * This macro is equivalent to ALIGN(align); PERCPU_VADDR( , ) except * that __per_cpu_load is defined as a relative symbol against * .data.percpu which is required for relocatable x86_32 * .data..percpu which is required for relocatable x86_32 * configuration. */ #define PERCPU(align) \ . = ALIGN(align); \ .data.percpu : AT(ADDR(.data.percpu) - LOAD_OFFSET) { \ .data..percpu : AT(ADDR(.data..percpu) - LOAD_OFFSET) { \ VMLINUX_SYMBOL(__per_cpu_load) = .; \ VMLINUX_SYMBOL(__per_cpu_start) = .; \ *(.data.percpu.first) \ *(.data.percpu.page_aligned) \ *(.data.percpu) \ *(.data.percpu.shared_aligned) \ *(.data..percpu..first) \ *(.data..percpu..page_aligned) \ *(.data..percpu) \ *(.data..percpu..shared_aligned) \ VMLINUX_SYMBOL(__per_cpu_end) = .; \ } Loading include/linux/percpu-defs.h +2 −2 Original line number Diff line number Diff line Loading @@ -127,11 +127,11 @@ * Declaration/definition used for per-CPU variables that must be page aligned. */ #define DECLARE_PER_CPU_PAGE_ALIGNED(type, name) \ DECLARE_PER_CPU_SECTION(type, name, ".page_aligned") \ DECLARE_PER_CPU_SECTION(type, name, "..page_aligned") \ __aligned(PAGE_SIZE) #define DEFINE_PER_CPU_PAGE_ALIGNED(type, name) \ DEFINE_PER_CPU_SECTION(type, name, ".page_aligned") \ DEFINE_PER_CPU_SECTION(type, name, "..page_aligned") \ __aligned(PAGE_SIZE) /* Loading kernel/module.c +1 −1 Original line number Diff line number Diff line Loading @@ -397,7 +397,7 @@ static unsigned int find_pcpusec(Elf_Ehdr *hdr, Elf_Shdr *sechdrs, const char *secstrings) { return find_sec(hdr, sechdrs, secstrings, ".data.percpu"); return find_sec(hdr, sechdrs, secstrings, ".data..percpu"); } static void percpu_modcopy(void *pcpudest, const void *from, unsigned long size) Loading Loading
arch/ia64/include/asm/percpu.h +1 −1 Original line number Diff line number Diff line Loading @@ -31,7 +31,7 @@ extern void *per_cpu_init(void); #endif /* SMP */ #define PER_CPU_BASE_SECTION ".data.percpu" #define PER_CPU_BASE_SECTION ".data..percpu" /* * Be extremely careful when taking the address of this variable! Due to virtual Loading
include/asm-generic/percpu.h +5 −5 Original line number Diff line number Diff line Loading @@ -76,7 +76,7 @@ extern void setup_per_cpu_areas(void); #ifndef PER_CPU_BASE_SECTION #ifdef CONFIG_SMP #define PER_CPU_BASE_SECTION ".data.percpu" #define PER_CPU_BASE_SECTION ".data..percpu" #else #define PER_CPU_BASE_SECTION ".data" #endif Loading @@ -88,15 +88,15 @@ extern void setup_per_cpu_areas(void); #define PER_CPU_SHARED_ALIGNED_SECTION "" #define PER_CPU_ALIGNED_SECTION "" #else #define PER_CPU_SHARED_ALIGNED_SECTION ".shared_aligned" #define PER_CPU_ALIGNED_SECTION ".shared_aligned" #define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned" #define PER_CPU_ALIGNED_SECTION "..shared_aligned" #endif #define PER_CPU_FIRST_SECTION ".first" #define PER_CPU_FIRST_SECTION "..first" #else #define PER_CPU_SHARED_ALIGNED_SECTION "" #define PER_CPU_ALIGNED_SECTION ".shared_aligned" #define PER_CPU_ALIGNED_SECTION "..shared_aligned" #define PER_CPU_FIRST_SECTION "" #endif Loading
include/asm-generic/vmlinux.lds.h +12 −12 Original line number Diff line number Diff line Loading @@ -666,16 +666,16 @@ */ #define PERCPU_VADDR(vaddr, phdr) \ VMLINUX_SYMBOL(__per_cpu_load) = .; \ .data.percpu vaddr : AT(VMLINUX_SYMBOL(__per_cpu_load) \ .data..percpu vaddr : AT(VMLINUX_SYMBOL(__per_cpu_load) \ - LOAD_OFFSET) { \ VMLINUX_SYMBOL(__per_cpu_start) = .; \ *(.data.percpu.first) \ *(.data.percpu.page_aligned) \ *(.data.percpu) \ *(.data.percpu.shared_aligned) \ *(.data..percpu..first) \ *(.data..percpu..page_aligned) \ *(.data..percpu) \ *(.data..percpu..shared_aligned) \ VMLINUX_SYMBOL(__per_cpu_end) = .; \ } phdr \ . = VMLINUX_SYMBOL(__per_cpu_load) + SIZEOF(.data.percpu); . = VMLINUX_SYMBOL(__per_cpu_load) + SIZEOF(.data..percpu); /** * PERCPU - define output section for percpu area, simple version Loading @@ -687,18 +687,18 @@ * * This macro is equivalent to ALIGN(align); PERCPU_VADDR( , ) except * that __per_cpu_load is defined as a relative symbol against * .data.percpu which is required for relocatable x86_32 * .data..percpu which is required for relocatable x86_32 * configuration. */ #define PERCPU(align) \ . = ALIGN(align); \ .data.percpu : AT(ADDR(.data.percpu) - LOAD_OFFSET) { \ .data..percpu : AT(ADDR(.data..percpu) - LOAD_OFFSET) { \ VMLINUX_SYMBOL(__per_cpu_load) = .; \ VMLINUX_SYMBOL(__per_cpu_start) = .; \ *(.data.percpu.first) \ *(.data.percpu.page_aligned) \ *(.data.percpu) \ *(.data.percpu.shared_aligned) \ *(.data..percpu..first) \ *(.data..percpu..page_aligned) \ *(.data..percpu) \ *(.data..percpu..shared_aligned) \ VMLINUX_SYMBOL(__per_cpu_end) = .; \ } Loading
include/linux/percpu-defs.h +2 −2 Original line number Diff line number Diff line Loading @@ -127,11 +127,11 @@ * Declaration/definition used for per-CPU variables that must be page aligned. */ #define DECLARE_PER_CPU_PAGE_ALIGNED(type, name) \ DECLARE_PER_CPU_SECTION(type, name, ".page_aligned") \ DECLARE_PER_CPU_SECTION(type, name, "..page_aligned") \ __aligned(PAGE_SIZE) #define DEFINE_PER_CPU_PAGE_ALIGNED(type, name) \ DEFINE_PER_CPU_SECTION(type, name, ".page_aligned") \ DEFINE_PER_CPU_SECTION(type, name, "..page_aligned") \ __aligned(PAGE_SIZE) /* Loading
kernel/module.c +1 −1 Original line number Diff line number Diff line Loading @@ -397,7 +397,7 @@ static unsigned int find_pcpusec(Elf_Ehdr *hdr, Elf_Shdr *sechdrs, const char *secstrings) { return find_sec(hdr, sechdrs, secstrings, ".data.percpu"); return find_sec(hdr, sechdrs, secstrings, ".data..percpu"); } static void percpu_modcopy(void *pcpudest, const void *from, unsigned long size) Loading