Loading drivers/cpuidle/lpm-levels.c +3 −29 Original line number Diff line number Diff line Loading @@ -46,7 +46,6 @@ #define SCLK_HZ (32768) #define PSCI_POWER_STATE(reset) (reset << 30) #define PSCI_AFFINITY_LEVEL(lvl) ((lvl & 0x3) << 24) #define BIAS_HYST (bias_hyst * NSEC_PER_MSEC) enum { MSM_LPM_LVL_DBG_SUSPEND_LIMITS = BIT(0), Loading Loading @@ -81,9 +80,6 @@ struct lpm_cluster *lpm_root_node; static bool lpm_prediction = true; module_param_named(lpm_prediction, lpm_prediction, bool, 0664); static uint32_t bias_hyst; module_param_named(bias_hyst, bias_hyst, uint, 0664); struct lpm_history { uint32_t resi[MAXSAMPLES]; int mode[MAXSAMPLES]; Loading Loading @@ -113,19 +109,7 @@ static void cluster_prepare(struct lpm_cluster *cluster, static bool sleep_disabled; module_param_named(sleep_disabled, sleep_disabled, bool, 0664); #ifdef CONFIG_LPM_SCHED_CPU_BUSY static u64 get_cpu_last_busy(int cpu) { return sched_get_cpu_last_busy_time(cpu); } #else static u64 get_cpu_last_busy(int cpu) { return 0; } #endif #ifdef CONFIG_LPM_CPU_ISOLATE #ifdef CONFIG_SCHED_WALT static bool check_cpu_isolated(int cpu) { return cpu_isolated(cpu); Loading Loading @@ -442,20 +426,10 @@ static void clear_predict_history(void) static void update_history(struct cpuidle_device *dev, int idx); static inline bool is_cpu_biased(int cpu) { u64 now = sched_clock(); u64 last = get_cpu_last_busy(cpu); if (!last) return false; return (now - last) < BIAS_HYST; } static inline bool lpm_disallowed(s64 sleep_us, int cpu) { if ((sleep_disabled && !check_cpu_isolated(cpu)) || is_cpu_biased(cpu)) if ((sleep_disabled && !check_cpu_isolated(cpu)) || sched_lpm_disallowed_time(cpu)) return true; if (sleep_us < 0) Loading Loading
drivers/cpuidle/lpm-levels.c +3 −29 Original line number Diff line number Diff line Loading @@ -46,7 +46,6 @@ #define SCLK_HZ (32768) #define PSCI_POWER_STATE(reset) (reset << 30) #define PSCI_AFFINITY_LEVEL(lvl) ((lvl & 0x3) << 24) #define BIAS_HYST (bias_hyst * NSEC_PER_MSEC) enum { MSM_LPM_LVL_DBG_SUSPEND_LIMITS = BIT(0), Loading Loading @@ -81,9 +80,6 @@ struct lpm_cluster *lpm_root_node; static bool lpm_prediction = true; module_param_named(lpm_prediction, lpm_prediction, bool, 0664); static uint32_t bias_hyst; module_param_named(bias_hyst, bias_hyst, uint, 0664); struct lpm_history { uint32_t resi[MAXSAMPLES]; int mode[MAXSAMPLES]; Loading Loading @@ -113,19 +109,7 @@ static void cluster_prepare(struct lpm_cluster *cluster, static bool sleep_disabled; module_param_named(sleep_disabled, sleep_disabled, bool, 0664); #ifdef CONFIG_LPM_SCHED_CPU_BUSY static u64 get_cpu_last_busy(int cpu) { return sched_get_cpu_last_busy_time(cpu); } #else static u64 get_cpu_last_busy(int cpu) { return 0; } #endif #ifdef CONFIG_LPM_CPU_ISOLATE #ifdef CONFIG_SCHED_WALT static bool check_cpu_isolated(int cpu) { return cpu_isolated(cpu); Loading Loading @@ -442,20 +426,10 @@ static void clear_predict_history(void) static void update_history(struct cpuidle_device *dev, int idx); static inline bool is_cpu_biased(int cpu) { u64 now = sched_clock(); u64 last = get_cpu_last_busy(cpu); if (!last) return false; return (now - last) < BIAS_HYST; } static inline bool lpm_disallowed(s64 sleep_us, int cpu) { if ((sleep_disabled && !check_cpu_isolated(cpu)) || is_cpu_biased(cpu)) if ((sleep_disabled && !check_cpu_isolated(cpu)) || sched_lpm_disallowed_time(cpu)) return true; if (sleep_us < 0) Loading