Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 4b53cd9e authored by Venkat Devarasetty's avatar Venkat Devarasetty
Browse files

msm: pm: idle-v7 clean up



Legacy platforms are not expected to use the mainline
anymore. Unsupported targets are MSM7X01A, MSM7X25,
MSM7X27, MSM7X30, QSD8X50, MSM8X60, MSM8960,MSM8930,
APQ8064, FSM9XXX, MSM9615, MSM8625, MSM9625, MSM7X27A.

Remove unused code in idle-v7 and its related files.

Change-Id: If1fbf9eead91d3410c5c5e75cb92824b110b1138
Signed-off-by: default avatarVenkat Devarasetty <vdevaras@codeaurora.org>
parent 759f5a97
Loading
Loading
Loading
Loading

arch/arm/mach-msm/idle-macros.S

deleted100644 → 0
+0 −153
Original line number Diff line number Diff line
/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 and
 * only version 2 as published by the Free Software Foundation.
 *
 * This program is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 */

#include <asm/hardware/cache-l2x0.h>

/* Add 300 NOPs after 'wfi' for Cortex tagets */
.macro DELAY, rept
#if !defined(CONFIG_ARCH_MSM_SCORPION) && !defined(CONFIG_ARCH_MSM_KRAIT)
	.rept   \rept
	nop
	.endr
#endif
.endm

/* Switch between smp_to_amp/amp_to_smp configuration */
.macro SET_SMP_COHERENCY, on = 0
	ldr     r0, =target_type
	ldr     r0, [r0]
	mov     r1, #TARGET_IS_8625
	cmp     r0, r1
	bne     skip\@
	mrc	p15, 0, r0, c1, c0, 1	/* read ACTLR register */
	.if     \on
	orr	r0, r0, #(1 << 6)	/* Set the SMP bit in ACTLR */
	.else
	bic	r0, r0, #(1 << 6)	/* Clear the SMP bit */
	.endif
	mcr	p15, 0, r0, c1, c0, 1	/* write ACTLR register */
	isb
skip\@:
.endm

/*
 * Enable the "L2" cache, not require to restore the controller registers
 */
.macro ENABLE_8x25_L2
	ldr     r0, =target_type
	ldr     r0, [r0]
	mov     r1, #TARGET_IS_8625
	cmp     r0, r1
	bne     skip_enable\@
	ldr     r0, =apps_power_collapse
	ldr     r0, [r0]
	cmp     r0, #POWER_COLLAPSED
	bne     skip_enable\@
	ldr     r0, =l2x0_base_addr
	ldr	r0, [r0]
	mov	r1, #0x1
	str	r1, [r0, #L2X0_CTRL]
	dmb
skip_enable\@:
.endm

/*
 * Perform the required operation
 * operation: type of operation on l2 cache (e.g: clean&inv or inv)
 * l2_enable: enable or disable
 */
.macro DO_CACHE_OPERATION, operation, l2_enable
	ldr     r2, =l2x0_base_addr
	ldr	r2, [r2]
	ldr     r0, =0xffff
	str     r0, [r2, #\operation]
wait\@:
	ldr	r0, [r2, #\operation]
	ldr	r1, =0xffff
	ands    r0, r0, r1
	bne     wait\@
l2x_sync\@:
	mov	r0, #0x0
	str	r0, [r2, #L2X0_CACHE_SYNC]
sync\@:
	ldr	r0, [r2, #L2X0_CACHE_SYNC]
	ands	r0, r0, #0x1
	bne	sync\@
	mov     r1, #\l2_enable
	str     r1, [r2, #L2X0_CTRL]
.endm

/*
 * Clean and invalidate the L2 cache.
 * 1. Check the target type
 * 2. Check whether we are coming from PC are not
 * 3. Save 'aux', 'data latency', & 'prefetch ctlr' registers
 * 4. Start L2 clean & invalidation operation
 * 5. Disable the L2 cache
 */
.macro SUSPEND_8x25_L2
	ldr     r0, =target_type
	ldr     r0, [r0]
	mov     r1, #TARGET_IS_8625
	cmp     r0, r1
	bne     skip_suspend\@
	ldr	r0, =apps_power_collapse
	ldr	r0, [r0]
	cmp	r0, #POWER_COLLAPSED
	bne	skip_suspend\@
	ldr	r0, =l2x0_saved_ctrl_reg_val
	ldr	r1, =l2x0_base_addr
	ldr	r1, [r1]
	ldr	r2, [r1, #L2X0_AUX_CTRL]
	str	r2, [r0, #0x0] /* store aux_ctlr reg value */
	ldr     r2, [r1, #L2X0_DATA_LATENCY_CTRL]
	str     r2, [r0, #0x4] /* store data latency reg value */
	ldr     r2, [r1, #L2X0_PREFETCH_CTRL]
	str     r2, [r0, #0x8] /* store prefetch_ctlr reg value */
	DO_CACHE_OPERATION L2X0_CLEAN_INV_WAY OFF
	dmb
skip_suspend\@:
.endm

/*
 * Coming back from a successful PC
 * 1. Check the target type
 * 2. Check whether we are going to PC are not
 * 3. Disable the L2 cache
 * 4. Restore 'aux', 'data latency', & 'prefetch ctlr' reg
 * 5. Invalidate the cache
 * 6. Enable the L2 cache
 */
.macro RESUME_8x25_L2
	ldr     r0, =target_type
	ldr     r0, [r0]
	mov     r1, #TARGET_IS_8625
	cmp     r0, r1
	bne     skip_resume\@
	ldr	r0, =apps_power_collapse
	ldr	r0, [r0]
	cmp	r0, #POWER_COLLAPSED
	bne	skip_resume\@
	ldr     r1, =l2x0_base_addr
	ldr	r1, [r1]
	mov     r0, #0x0
	str     r0, [r1, #L2X0_CTRL]
	ldr     r0, =l2x0_saved_ctrl_reg_val
	ldr     r2, [r0, #0x0]
	str	r2, [r1, #L2X0_AUX_CTRL] /* restore aux_ctlr reg value */
	ldr	r2, [r0, #0x4]
	str	r2, [r1, #L2X0_DATA_LATENCY_CTRL]
	ldr	r2, [r0, #0x8]
	str     r2, [r1, #L2X0_PREFETCH_CTRL]
	DO_CACHE_OPERATION L2X0_INV_WAY ON
skip_resume\@:
.endm
+0 −118
Original line number Diff line number Diff line
@@ -20,7 +20,6 @@
#include <asm/assembler.h>

#include "idle.h"
#include "idle-macros.S"

#ifdef CONFIG_MSM_SCM
#define SCM_SVC_BOOT 0x1
@@ -29,51 +28,12 @@
#endif

ENTRY(msm_arch_idle)
#ifdef CONFIG_ARCH_MSM_KRAIT
	mrc 	p15, 0, r0, c0, c0, 0
	bic	r1, r0, #0xff
	movw	r2, #0x0400
	movt	r2, #0x511F
	movw	r3, #0x0600
	movt	r3, #0x510F
	cmp	r2, r1
	cmpne	r3, r1
	bne	go_wfi

	mrs	r0, cpsr
	cpsid	if

	mrc	p15, 7, r1, c15, c0, 5
	bic	r2, r1, #0x20000
	mcr	p15, 7, r2, c15, c0, 5
	isb

go_wfi:
	wfi
	bne	wfi_done
	mcr	p15, 7, r1, c15, c0, 5
	isb
	msr	cpsr_c, r0

wfi_done:
	bx	lr
#else
	wfi
#ifdef CONFIG_ARCH_MSM8X60
	mrc	p14, 1, r1, c1, c5, 4 /* read ETM PDSR to clear sticky bit */
	mrc     p14, 0, r1, c1, c5, 4 /* read DBG PRSR to clear sticky bit */
	isb
#endif
	bx	lr
#endif
ENDPROC(msm_arch_idle)

ENTRY(msm_pm_pc_hotplug)
	stmfd	sp!, {lr}
#if defined(CONFIG_MSM_FIQ_SUPPORT)
	cpsid   f
#endif

#if defined(CONFIG_MSM_JTAG) || defined(CONFIG_MSM_JTAG_MM)
	bl      msm_jtag_save_state
#endif
@@ -104,19 +64,6 @@ skip_hp_debug1:
	ldr	r1, =SCM_CMD_TERMINATE_PC
	ldr	r2, =SCM_CMD_CORE_HOTPLUGGED
	bl	scm_call_atomic1
#else
	mrc     p15, 0, r3, c1, c0, 0    /* read current CR    */
	bic     r0, r3, #(1 << 2)        /* clear dcache bit   */
	bic     r0, r0, #(1 << 12)       /* clear icache bit   */
	mcr     p15, 0, r0, c1, c0, 0    /* disable d/i cache  */
	isb
	wfi
	mcr     p15, 0, r3, c1, c0, 0    /* restore d/i cache  */
	isb
#endif

#if defined(CONFIG_MSM_FIQ_SUPPORT)
	cpsie   f
#endif
	mrc	p15, 0, r0, c0, c0, 5 /* MPIDR */
	and	r0, r0, #15              /* what CPU am I                  */
@@ -140,10 +87,6 @@ skip_hp_debug2:
ENDPROC(msm_pm_pc_hotplug)

ENTRY(msm_pm_collapse)
#if defined(CONFIG_MSM_FIQ_SUPPORT)
	cpsid   f
#endif

	ldr     r0, =msm_saved_state	/* address of msm_saved_state ptr */
	ldr	r0, [r0]		/* load ptr */
#if (NR_CPUS >= 2)
@@ -174,10 +117,6 @@ THUMB( str r14, [r0],#4 )
	mrc     p15, 0, r1, c1, c0, 0 /* MMU control */
	mrc     p15, 0, r2, c2, c0, 0 /* TTBR0 */
	mrc     p15, 0, r3, c3, c0, 0 /* dacr */
#ifdef CONFIG_ARCH_MSM_SCORPION
	/* This instruction is not valid for non scorpion processors */
	mrc     p15, 3, r4, c15, c0, 3 /* L2CR1 is the L2 cache control reg 1 */
#endif
	mrc     p15, 0, r5, c10, c2, 0 /* PRRR */
	mrc     p15, 0, r6, c10, c2, 1 /* NMRR */
	mrc     p15, 0, r7, c1, c0, 1 /* ACTLR */
@@ -235,26 +174,6 @@ skip_pc_debug1:
	ldr	r2, =msm_pm_flush_l2_flag
	ldr	r2, [r2]
	bl	scm_call_atomic1
#else
	mrc     p15, 0, r4, c1, c0, 0    /* read current CR    */
	bic     r0, r4, #(1 << 2)        /* clear dcache bit   */
	bic     r0, r0, #(1 << 12)       /* clear icache bit   */
	mcr     p15, 0, r0, c1, c0, 0    /* disable d/i cache  */
	isb

	SUSPEND_8x25_L2
	SET_SMP_COHERENCY OFF
	wfi
	DELAY 300

	mcr     p15, 0, r4, c1, c0, 0    /* restore d/i cache  */
	isb
	ENABLE_8x25_L2 /* enable only l2, no need to restore the reg back */
	SET_SMP_COHERENCY ON
#endif

#if defined(CONFIG_MSM_FIQ_SUPPORT)
	cpsie   f
#endif
	mrc	p15, 0, r0, c0, c0, 5 /* MPIDR */
	and	r0, r0, #15              /* what CPU am I                  */
@@ -297,15 +216,6 @@ THUMB( ldr r14, [r0],#4 )
ENDPROC(msm_pm_collapse)

ENTRY(msm_pm_collapse_exit)
#if 0 /* serial debug */
	mov     r0, #0x80000016
	mcr     p15, 0, r0, c15, c2, 4
	mov     r0, #0xA9000000
	add     r0, r0, #0x00A00000 /* UART1 */
	/*add     r0, r0, #0x00C00000*/ /* UART3 */
	mov     r1, #'A'
	str     r1, [r0, #0x00C]
#endif
	adr	r3, 3f
	ldr	r1, [r3]
	sub	r3, r1, r3
@@ -362,10 +272,6 @@ msm_pm_pa_to_va:
	ldmdb   r1!, {r2-r11}
	mcr     p15, 0, r4, c3, c0, 0 /* dacr */
	mcr     p15, 0, r3, c2, c0, 0 /* TTBR0 */
#ifdef CONFIG_ARCH_MSM_SCORPION
	/* This instruction is not valid for non scorpion processors */
	mcr     p15, 3, r5, c15, c0, 3 /* L2CR1 */
#endif
	mcr     p15, 0, r6, c10, c2, 0 /* PRRR */
	mcr     p15, 0, r7, c10, c2, 1 /* NMRR */
	mcr     p15, 0, r8, c1, c0, 1 /* ACTLR */
@@ -412,9 +318,6 @@ msm_pm_pa_to_va:
	mrceq	p15, 7, r3, c15, c0, 2
	biceq	r3, r3, #0x400
	mcreq	p15, 7, r3, c15, c0, 2
#else
	RESUME_8x25_L2
	SET_SMP_COHERENCY ON
#endif

	ldr	r1, =msm_pm_enable_l2_fn
@@ -502,18 +405,6 @@ msm_saved_state_phys:
msm_pm_boot_vector:
	.space  4 * NR_CPUS

	.globl target_type
target_type:
	.long  0x0

	.globl apps_power_collapse
apps_power_collapse:
	.long 0x0

	.globl l2x0_base_addr
l2x0_base_addr:
	.long 0x0

	.globl msm_pc_debug_counters_phys
msm_pc_debug_counters_phys:
	.long 0x0
@@ -542,12 +433,3 @@ msm_pm_flush_l2_fn:
msm_pm_flush_l2_flag:
	.long 0x1
/*
 * Save & restore l2x0 registers while system is entering and resuming
 * from Power Collapse.
 * 1. aux_ctrl_save (0x0)
 * 2. data_latency_ctrl (0x4)
 * 3. prefetch control (0x8)
 */
l2x0_saved_ctrl_reg_val:
	.space 4 * 3
+0 −8
Original line number Diff line number Diff line
@@ -23,11 +23,6 @@
#define CPU_SAVED_STATE_SIZE (4 * 2 + 4 * 10)
#endif

#define ON	1
#define OFF	0
#define TARGET_IS_8625	1
#define POWER_COLLAPSED 1

#ifndef __ASSEMBLY__

int msm_arch_idle(void);
@@ -46,9 +41,6 @@ void msm_pm_set_l2_flush_flag(unsigned int flag);
int msm_pm_get_l2_flush_flag(void);
extern unsigned long msm_pm_pc_pgd;
extern unsigned long msm_pm_boot_vector[NR_CPUS];
extern uint32_t target_type;
extern uint32_t apps_power_collapse;
extern uint32_t *l2x0_base_addr;
#else
static inline void msm_pm_set_l2_flush_flag(unsigned int flag)
{