Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 02b7da37 authored by Tim Abbott's avatar Tim Abbott Committed by Sam Ravnborg
Browse files

Use macros for .bss.page_aligned section.



This patch changes the remaining direct references to
.bss.page_aligned in C and assembly code to use the macros in
include/linux/linkage.h.

Signed-off-by: default avatarTim Abbott <tabbott@ksplice.com>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Ingo Molnar <mingo@redhat.com>
Cc: H. Peter Anvin <hpa@zytor.com>
Acked-by: default avatarPaul Mundt <lethal@linux-sh.org>
Cc: Chris Zankel <chris@zankel.net>
Signed-off-by: default avatarSam Ravnborg <sam@ravnborg.org>
parent d200c922
Loading
Loading
Loading
Loading
+2 −4
Original line number Diff line number Diff line
@@ -165,11 +165,9 @@ asmlinkage int do_IRQ(unsigned int irq, struct pt_regs *regs)
}

#ifdef CONFIG_IRQSTACKS
static char softirq_stack[NR_CPUS * THREAD_SIZE]
		__attribute__((__section__(".bss.page_aligned")));
static char softirq_stack[NR_CPUS * THREAD_SIZE] __page_aligned_bss;

static char hardirq_stack[NR_CPUS * THREAD_SIZE]
		__attribute__((__section__(".bss.page_aligned")));
static char hardirq_stack[NR_CPUS * THREAD_SIZE] __page_aligned_bss;

/*
 * allocate per-cpu stacks for hardirq and for softirq processing
+1 −1
Original line number Diff line number Diff line
@@ -608,7 +608,7 @@ ENTRY(initial_code)
/*
 * BSS section
 */
.section ".bss.page_aligned","wa"
__PAGE_ALIGNED_BSS
	.align PAGE_SIZE_asm
#ifdef CONFIG_X86_PAE
swapper_pg_pmd:
+1 −1
Original line number Diff line number Diff line
@@ -418,7 +418,7 @@ ENTRY(phys_base)
ENTRY(idt_table)
	.skip IDT_ENTRIES * 16

	.section .bss.page_aligned, "aw", @nobits
	__PAGE_ALIGNED_BSS
	.align PAGE_SIZE
ENTRY(empty_zero_page)
	.skip PAGE_SIZE
+1 −1
Original line number Diff line number Diff line
@@ -235,7 +235,7 @@ should_never_return:
 * BSS section
 */
	
.section ".bss.page_aligned", "w"
__PAGE_ALIGNED_BSS
#ifdef CONFIG_MMU
ENTRY(swapper_pg_dir)
	.fill	PAGE_SIZE, 1, 0