Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 62433a08 authored by Pavankumar Kondeti's avatar Pavankumar Kondeti Committed by Abhijeet Dharmapurikar
Browse files

sched/fair: Refactor packing eligible test



The purpose of is_packing_eligible() test is to check if
we should throwaway the active cpu and use the idle CPU.
Refactor this function such a way that, the throw away
CPU (i.e active_cpu) is marked -1. The follow up patch
use this refactoring to add more checks in this function.

Change-Id: I0ae829dcc9b62da3ae69796b28abebd950b34462
Signed-off-by: default avatarPavankumar Kondeti <pkondeti@codeaurora.org>
[adharmap: made adjust cpus for packing inline]
Signed-off-by: default avatarAbhijeet Dharmapurikar <adharmap@codeaurora.org>
parent 1d45792c
Loading
Loading
Loading
Loading
+22 −19
Original line number Diff line number Diff line
@@ -3851,36 +3851,41 @@ struct find_best_target_env {
	int fastpath;
};

static bool is_packing_eligible(struct task_struct *p, int target_cpu,
static inline void adjust_cpus_for_packing(struct task_struct *p,
			int *target_cpu, int *best_idle_cpu,
			int target_cpus_count,
			struct find_best_target_env *fbt_env,
				unsigned int target_cpus_count,
				int best_idle_cstate, bool boosted)
			bool boosted)
{
	unsigned long tutil, estimated_capacity;

	if (task_placement_boost_enabled(p) || fbt_env->need_idle || boosted)
		return false;
	if (*best_idle_cpu == -1 || *target_cpu == -1)
		return;

	if (best_idle_cstate == -1)
		return false;
	if (task_placement_boost_enabled(p) || fbt_env->need_idle || boosted ||
			idle_get_state_idx(cpu_rq(*best_idle_cpu)) == -1) {
		*target_cpu = -1;
		return;
	}

	if (target_cpus_count != 1)
		return true;
	if (target_cpus_count > 1)
		return;

	if (task_in_cum_window_demand(cpu_rq(target_cpu), p))
	if (task_in_cum_window_demand(cpu_rq(*target_cpu), p))
		tutil = 0;
	else
		tutil = task_util(p);

	estimated_capacity = cpu_util_cum(target_cpu, tutil);
	estimated_capacity = cpu_util_cum(*target_cpu, tutil);
	estimated_capacity = add_capacity_margin(estimated_capacity,
							target_cpu);
							*target_cpu);

	/*
	 * If there is only one active CPU and it is already above its current
	 * capacity, avoid placing additional task on the CPU.
	 */
	return (estimated_capacity <= capacity_curr_of(target_cpu));
	if (estimated_capacity > capacity_curr_of(*target_cpu))
		*target_cpu = -1;
}

static inline void update_misfit_status(struct task_struct *p, struct rq *rq)
@@ -7069,11 +7074,9 @@ static void find_best_target(struct sched_domain *sd, cpumask_t *cpus,

	} while (sg = sg->next, sg != start_sd->groups);

	if (best_idle_cpu != -1 && !is_packing_eligible(p, target_cpu, fbt_env,
			active_cpus_count, shallowest_idle_cstate, boosted)) {
		target_cpu = best_idle_cpu;
		best_idle_cpu = -1;
	}
	adjust_cpus_for_packing(p, &target_cpu, &best_idle_cpu,
				active_cpus_count, fbt_env,
				boosted);

	/*
	 * For non latency sensitive tasks, cases B and C in the previous loop,