Loading arch/arm64/include/asm/proc-fns.h +3 −0 Original line number Diff line number Diff line Loading @@ -26,11 +26,14 @@ #include <asm/page.h> struct mm_struct; struct cpu_suspend_ctx; extern void cpu_cache_off(void); extern void cpu_do_idle(void); extern void cpu_do_switch_mm(unsigned long pgd_phys, struct mm_struct *mm); extern void cpu_reset(unsigned long addr) __attribute__((noreturn)); extern void cpu_do_suspend(struct cpu_suspend_ctx *ptr); extern u64 cpu_do_resume(phys_addr_t ptr, u64 idmap_ttbr); #include <asm/memory.h> Loading arch/arm64/include/asm/suspend.h 0 → 100644 +18 −0 Original line number Diff line number Diff line #ifndef __ASM_SUSPEND_H #define __ASM_SUSPEND_H #define NR_CTX_REGS 11 /* * struct cpu_suspend_ctx must be 16-byte aligned since it is allocated on * the stack, which must be 16-byte aligned on v8 */ struct cpu_suspend_ctx { /* * This struct must be kept in sync with * cpu_do_{suspend/resume} in mm/proc.S */ u64 ctx_regs[NR_CTX_REGS]; u64 sp; } __aligned(16); #endif arch/arm64/mm/proc.S +69 −0 Original line number Diff line number Diff line Loading @@ -80,6 +80,75 @@ ENTRY(cpu_do_idle) ret ENDPROC(cpu_do_idle) #ifdef CONFIG_ARM64_CPU_SUSPEND /** * cpu_do_suspend - save CPU registers context * * x0: virtual address of context pointer */ ENTRY(cpu_do_suspend) mrs x2, tpidr_el0 mrs x3, tpidrro_el0 mrs x4, contextidr_el1 mrs x5, mair_el1 mrs x6, cpacr_el1 mrs x7, ttbr1_el1 mrs x8, tcr_el1 mrs x9, vbar_el1 mrs x10, mdscr_el1 mrs x11, oslsr_el1 mrs x12, sctlr_el1 stp x2, x3, [x0] stp x4, x5, [x0, #16] stp x6, x7, [x0, #32] stp x8, x9, [x0, #48] stp x10, x11, [x0, #64] str x12, [x0, #80] ret ENDPROC(cpu_do_suspend) /** * cpu_do_resume - restore CPU register context * * x0: Physical address of context pointer * x1: ttbr0_el1 to be restored * * Returns: * sctlr_el1 value in x0 */ ENTRY(cpu_do_resume) /* * Invalidate local tlb entries before turning on MMU */ tlbi vmalle1 ldp x2, x3, [x0] ldp x4, x5, [x0, #16] ldp x6, x7, [x0, #32] ldp x8, x9, [x0, #48] ldp x10, x11, [x0, #64] ldr x12, [x0, #80] msr tpidr_el0, x2 msr tpidrro_el0, x3 msr contextidr_el1, x4 msr mair_el1, x5 msr cpacr_el1, x6 msr ttbr0_el1, x1 msr ttbr1_el1, x7 msr tcr_el1, x8 msr vbar_el1, x9 msr mdscr_el1, x10 /* * Restore oslsr_el1 by writing oslar_el1 */ ubfx x11, x11, #1, #1 msr oslar_el1, x11 mov x0, x12 dsb nsh // Make sure local tlb invalidation completed isb ret ENDPROC(cpu_do_resume) #endif /* * cpu_switch_mm(pgd_phys, tsk) * Loading Loading
arch/arm64/include/asm/proc-fns.h +3 −0 Original line number Diff line number Diff line Loading @@ -26,11 +26,14 @@ #include <asm/page.h> struct mm_struct; struct cpu_suspend_ctx; extern void cpu_cache_off(void); extern void cpu_do_idle(void); extern void cpu_do_switch_mm(unsigned long pgd_phys, struct mm_struct *mm); extern void cpu_reset(unsigned long addr) __attribute__((noreturn)); extern void cpu_do_suspend(struct cpu_suspend_ctx *ptr); extern u64 cpu_do_resume(phys_addr_t ptr, u64 idmap_ttbr); #include <asm/memory.h> Loading
arch/arm64/include/asm/suspend.h 0 → 100644 +18 −0 Original line number Diff line number Diff line #ifndef __ASM_SUSPEND_H #define __ASM_SUSPEND_H #define NR_CTX_REGS 11 /* * struct cpu_suspend_ctx must be 16-byte aligned since it is allocated on * the stack, which must be 16-byte aligned on v8 */ struct cpu_suspend_ctx { /* * This struct must be kept in sync with * cpu_do_{suspend/resume} in mm/proc.S */ u64 ctx_regs[NR_CTX_REGS]; u64 sp; } __aligned(16); #endif
arch/arm64/mm/proc.S +69 −0 Original line number Diff line number Diff line Loading @@ -80,6 +80,75 @@ ENTRY(cpu_do_idle) ret ENDPROC(cpu_do_idle) #ifdef CONFIG_ARM64_CPU_SUSPEND /** * cpu_do_suspend - save CPU registers context * * x0: virtual address of context pointer */ ENTRY(cpu_do_suspend) mrs x2, tpidr_el0 mrs x3, tpidrro_el0 mrs x4, contextidr_el1 mrs x5, mair_el1 mrs x6, cpacr_el1 mrs x7, ttbr1_el1 mrs x8, tcr_el1 mrs x9, vbar_el1 mrs x10, mdscr_el1 mrs x11, oslsr_el1 mrs x12, sctlr_el1 stp x2, x3, [x0] stp x4, x5, [x0, #16] stp x6, x7, [x0, #32] stp x8, x9, [x0, #48] stp x10, x11, [x0, #64] str x12, [x0, #80] ret ENDPROC(cpu_do_suspend) /** * cpu_do_resume - restore CPU register context * * x0: Physical address of context pointer * x1: ttbr0_el1 to be restored * * Returns: * sctlr_el1 value in x0 */ ENTRY(cpu_do_resume) /* * Invalidate local tlb entries before turning on MMU */ tlbi vmalle1 ldp x2, x3, [x0] ldp x4, x5, [x0, #16] ldp x6, x7, [x0, #32] ldp x8, x9, [x0, #48] ldp x10, x11, [x0, #64] ldr x12, [x0, #80] msr tpidr_el0, x2 msr tpidrro_el0, x3 msr contextidr_el1, x4 msr mair_el1, x5 msr cpacr_el1, x6 msr ttbr0_el1, x1 msr ttbr1_el1, x7 msr tcr_el1, x8 msr vbar_el1, x9 msr mdscr_el1, x10 /* * Restore oslsr_el1 by writing oslar_el1 */ ubfx x11, x11, #1, #1 msr oslar_el1, x11 mov x0, x12 dsb nsh // Make sure local tlb invalidation completed isb ret ENDPROC(cpu_do_resume) #endif /* * cpu_switch_mm(pgd_phys, tsk) * Loading