Loading arch/arm64/kernel/head.S +2 −2 Original line number Original line Diff line number Diff line Loading @@ -664,7 +664,7 @@ ENDPROC(__secondary_switched) */ */ .section ".idmap.text", "ax" .section ".idmap.text", "ax" ENTRY(__enable_mmu) ENTRY(__enable_mmu) mrs x18, sctlr_el1 // preserve old SCTLR_EL1 value mrs x22, sctlr_el1 // preserve old SCTLR_EL1 value mrs x1, ID_AA64MMFR0_EL1 mrs x1, ID_AA64MMFR0_EL1 ubfx x2, x1, #ID_AA64MMFR0_TGRAN_SHIFT, 4 ubfx x2, x1, #ID_AA64MMFR0_TGRAN_SHIFT, 4 cmp x2, #ID_AA64MMFR0_TGRAN_SUPPORTED cmp x2, #ID_AA64MMFR0_TGRAN_SUPPORTED Loading @@ -691,7 +691,7 @@ ENTRY(__enable_mmu) * to take into account by discarding the current kernel mapping and * to take into account by discarding the current kernel mapping and * creating a new one. * creating a new one. */ */ msr sctlr_el1, x18 // disable the MMU msr sctlr_el1, x22 // disable the MMU isb isb bl __create_page_tables // recreate kernel mapping bl __create_page_tables // recreate kernel mapping Loading Loading
arch/arm64/kernel/head.S +2 −2 Original line number Original line Diff line number Diff line Loading @@ -664,7 +664,7 @@ ENDPROC(__secondary_switched) */ */ .section ".idmap.text", "ax" .section ".idmap.text", "ax" ENTRY(__enable_mmu) ENTRY(__enable_mmu) mrs x18, sctlr_el1 // preserve old SCTLR_EL1 value mrs x22, sctlr_el1 // preserve old SCTLR_EL1 value mrs x1, ID_AA64MMFR0_EL1 mrs x1, ID_AA64MMFR0_EL1 ubfx x2, x1, #ID_AA64MMFR0_TGRAN_SHIFT, 4 ubfx x2, x1, #ID_AA64MMFR0_TGRAN_SHIFT, 4 cmp x2, #ID_AA64MMFR0_TGRAN_SUPPORTED cmp x2, #ID_AA64MMFR0_TGRAN_SUPPORTED Loading @@ -691,7 +691,7 @@ ENTRY(__enable_mmu) * to take into account by discarding the current kernel mapping and * to take into account by discarding the current kernel mapping and * creating a new one. * creating a new one. */ */ msr sctlr_el1, x18 // disable the MMU msr sctlr_el1, x22 // disable the MMU isb isb bl __create_page_tables // recreate kernel mapping bl __create_page_tables // recreate kernel mapping Loading