Loading arch/arm64/kernel/entry.S +9 −0 Original line number Diff line number Diff line Loading @@ -139,7 +139,12 @@ tsk .req x28 // current thread_info * Interrupt handling. */ .macro irq_handler #ifdef CONFIG_STRICT_MEMORY_RWX ldr x1, =handle_arch_irq ldr x1, [x1] #else ldr x1, handle_arch_irq #endif mov x0, sp blr x1 .endm Loading Loading @@ -676,5 +681,9 @@ ENTRY(sys_rt_sigreturn_wrapper) b sys_rt_sigreturn ENDPROC(sys_rt_sigreturn_wrapper) #ifdef CONFIG_STRICT_MEMORY_RWX .data #endif ENTRY(handle_arch_irq) .quad 0 arch/arm64/kernel/head.S +4 −0 Original line number Diff line number Diff line Loading @@ -207,7 +207,11 @@ CPU_LE( movk x0, #0x30d0, lsl #16 ) // Clear EE and E0E on LE systems msr vttbr_el2, xzr /* Hypervisor stub */ #ifndef CONFIG_STRICT_MEMORY_RWX adr x0, __hyp_stub_vectors #else ldr x0, =__hyp_stub_vectors #endif msr vbar_el2, x0 /* spsr */ Loading arch/arm64/kernel/vmlinux.lds.S +18 −0 Original line number Diff line number Diff line Loading @@ -8,6 +8,9 @@ #include <asm/thread_info.h> #include <asm/memory.h> #include <asm/page.h> #ifdef CONFIG_STRICT_MEMORY_RWX #include <asm/pgtable.h> #endif #define ARM_EXIT_KEEP(x) #define ARM_EXIT_DISCARD(x) x Loading Loading @@ -52,6 +55,9 @@ SECTIONS _text = .; HEAD_TEXT } #ifdef CONFIG_STRICT_MEMORY_RWX . = ALIGN(1<<SECTION_SHIFT); #endif .text : { /* Real text segment */ _stext = .; /* Text and read-only data */ __exception_text_start = .; Loading @@ -68,19 +74,31 @@ SECTIONS *(.got) /* Global offset table */ } #ifdef CONFIG_STRICT_MEMORY_RWX . = ALIGN(1<<SECTION_SHIFT); #endif RO_DATA(PAGE_SIZE) EXCEPTION_TABLE(8) NOTES _etext = .; /* End of text and rodata section */ #ifdef CONFIG_STRICT_MEMORY_RWX . = ALIGN(1<<SECTION_SHIFT); #else . = ALIGN(PAGE_SIZE); #endif __init_begin = .; INIT_TEXT_SECTION(8) .exit.text : { ARM_EXIT_KEEP(EXIT_TEXT) } #ifdef CONFIG_STRICT_MEMORY_RWX . = ALIGN(1<<SECTION_SHIFT); __init_data_begin = .; #else . = ALIGN(16); #endif .init.data : { INIT_DATA INIT_SETUP(16) Loading arch/arm64/mm/init.c +10 −0 Original line number Diff line number Diff line Loading @@ -343,11 +343,21 @@ void __init mem_init(void) } } #ifdef CONFIG_STRICT_MEMORY_RWX void free_initmem(void) { poison_init_mem(__init_data_begin, __init_end - __init_data_begin); free_reserved_area(PAGE_ALIGN((unsigned long)&__init_data_begin), ((unsigned long)&__init_end) & PAGE_MASK, 0, "unused kernel"); } #else void free_initmem(void) { poison_init_mem(__init_begin, __init_end - __init_begin); free_initmem_default(0); } #endif #ifdef CONFIG_BLK_DEV_INITRD Loading arch/arm64/mm/mmu.c +22 −1 Original line number Diff line number Diff line Loading @@ -185,6 +185,26 @@ static void __init alloc_init_pte(pmd_t *pmd, unsigned long addr, } while (pte++, addr += PAGE_SIZE, addr != end); } #ifdef CONFIG_STRICT_MEMORY_RWX pmdval_t get_pmd_prot_sect_kernel(unsigned long addr) { if (addr >= (unsigned long)__init_data_begin) return prot_sect_kernel | PMD_SECT_PXN; if (addr >= (unsigned long)__init_begin) return prot_sect_kernel | PMD_SECT_RDONLY; if (addr >= (unsigned long)__start_rodata) return prot_sect_kernel | PMD_SECT_RDONLY | PMD_SECT_PXN; if (addr >= (unsigned long)_stext) return prot_sect_kernel | PMD_SECT_RDONLY; return prot_sect_kernel | PMD_SECT_PXN; } #else pmdval_t get_pmd_prot_sect_kernel(unsigned long addr) { return prot_sect_kernel; } #endif static void __init alloc_init_pmd(pud_t *pud, unsigned long addr, unsigned long end, phys_addr_t phys) { Loading @@ -204,7 +224,8 @@ static void __init alloc_init_pmd(pud_t *pud, unsigned long addr, next = pmd_addr_end(addr, end); /* try section mapping first */ if (((addr | next | phys) & ~SECTION_MASK) == 0) set_pmd(pmd, __pmd(phys | prot_sect_kernel)); set_pmd(pmd, __pmd(phys | get_pmd_prot_sect_kernel(addr))); else alloc_init_pte(pmd, addr, next, __phys_to_pfn(phys)); phys += next - addr; Loading Loading
arch/arm64/kernel/entry.S +9 −0 Original line number Diff line number Diff line Loading @@ -139,7 +139,12 @@ tsk .req x28 // current thread_info * Interrupt handling. */ .macro irq_handler #ifdef CONFIG_STRICT_MEMORY_RWX ldr x1, =handle_arch_irq ldr x1, [x1] #else ldr x1, handle_arch_irq #endif mov x0, sp blr x1 .endm Loading Loading @@ -676,5 +681,9 @@ ENTRY(sys_rt_sigreturn_wrapper) b sys_rt_sigreturn ENDPROC(sys_rt_sigreturn_wrapper) #ifdef CONFIG_STRICT_MEMORY_RWX .data #endif ENTRY(handle_arch_irq) .quad 0
arch/arm64/kernel/head.S +4 −0 Original line number Diff line number Diff line Loading @@ -207,7 +207,11 @@ CPU_LE( movk x0, #0x30d0, lsl #16 ) // Clear EE and E0E on LE systems msr vttbr_el2, xzr /* Hypervisor stub */ #ifndef CONFIG_STRICT_MEMORY_RWX adr x0, __hyp_stub_vectors #else ldr x0, =__hyp_stub_vectors #endif msr vbar_el2, x0 /* spsr */ Loading
arch/arm64/kernel/vmlinux.lds.S +18 −0 Original line number Diff line number Diff line Loading @@ -8,6 +8,9 @@ #include <asm/thread_info.h> #include <asm/memory.h> #include <asm/page.h> #ifdef CONFIG_STRICT_MEMORY_RWX #include <asm/pgtable.h> #endif #define ARM_EXIT_KEEP(x) #define ARM_EXIT_DISCARD(x) x Loading Loading @@ -52,6 +55,9 @@ SECTIONS _text = .; HEAD_TEXT } #ifdef CONFIG_STRICT_MEMORY_RWX . = ALIGN(1<<SECTION_SHIFT); #endif .text : { /* Real text segment */ _stext = .; /* Text and read-only data */ __exception_text_start = .; Loading @@ -68,19 +74,31 @@ SECTIONS *(.got) /* Global offset table */ } #ifdef CONFIG_STRICT_MEMORY_RWX . = ALIGN(1<<SECTION_SHIFT); #endif RO_DATA(PAGE_SIZE) EXCEPTION_TABLE(8) NOTES _etext = .; /* End of text and rodata section */ #ifdef CONFIG_STRICT_MEMORY_RWX . = ALIGN(1<<SECTION_SHIFT); #else . = ALIGN(PAGE_SIZE); #endif __init_begin = .; INIT_TEXT_SECTION(8) .exit.text : { ARM_EXIT_KEEP(EXIT_TEXT) } #ifdef CONFIG_STRICT_MEMORY_RWX . = ALIGN(1<<SECTION_SHIFT); __init_data_begin = .; #else . = ALIGN(16); #endif .init.data : { INIT_DATA INIT_SETUP(16) Loading
arch/arm64/mm/init.c +10 −0 Original line number Diff line number Diff line Loading @@ -343,11 +343,21 @@ void __init mem_init(void) } } #ifdef CONFIG_STRICT_MEMORY_RWX void free_initmem(void) { poison_init_mem(__init_data_begin, __init_end - __init_data_begin); free_reserved_area(PAGE_ALIGN((unsigned long)&__init_data_begin), ((unsigned long)&__init_end) & PAGE_MASK, 0, "unused kernel"); } #else void free_initmem(void) { poison_init_mem(__init_begin, __init_end - __init_begin); free_initmem_default(0); } #endif #ifdef CONFIG_BLK_DEV_INITRD Loading
arch/arm64/mm/mmu.c +22 −1 Original line number Diff line number Diff line Loading @@ -185,6 +185,26 @@ static void __init alloc_init_pte(pmd_t *pmd, unsigned long addr, } while (pte++, addr += PAGE_SIZE, addr != end); } #ifdef CONFIG_STRICT_MEMORY_RWX pmdval_t get_pmd_prot_sect_kernel(unsigned long addr) { if (addr >= (unsigned long)__init_data_begin) return prot_sect_kernel | PMD_SECT_PXN; if (addr >= (unsigned long)__init_begin) return prot_sect_kernel | PMD_SECT_RDONLY; if (addr >= (unsigned long)__start_rodata) return prot_sect_kernel | PMD_SECT_RDONLY | PMD_SECT_PXN; if (addr >= (unsigned long)_stext) return prot_sect_kernel | PMD_SECT_RDONLY; return prot_sect_kernel | PMD_SECT_PXN; } #else pmdval_t get_pmd_prot_sect_kernel(unsigned long addr) { return prot_sect_kernel; } #endif static void __init alloc_init_pmd(pud_t *pud, unsigned long addr, unsigned long end, phys_addr_t phys) { Loading @@ -204,7 +224,8 @@ static void __init alloc_init_pmd(pud_t *pud, unsigned long addr, next = pmd_addr_end(addr, end); /* try section mapping first */ if (((addr | next | phys) & ~SECTION_MASK) == 0) set_pmd(pmd, __pmd(phys | prot_sect_kernel)); set_pmd(pmd, __pmd(phys | get_pmd_prot_sect_kernel(addr))); else alloc_init_pte(pmd, addr, next, __phys_to_pfn(phys)); phys += next - addr; Loading