Loading kernel/sched/rt.c +36 −11 Original line number Original line Diff line number Diff line Loading @@ -1725,6 +1725,8 @@ static int find_lowest_rq(struct task_struct *task) unsigned long best_capacity; unsigned long best_capacity; unsigned long util, best_cpu_util = ULONG_MAX; unsigned long util, best_cpu_util = ULONG_MAX; unsigned long tutil = task_util(task); unsigned long tutil = task_util(task); int best_cpu_idle_idx = INT_MAX; int cpu_idle_idx = -1; bool placement_boost; bool placement_boost; /* Make sure the mask is initialized first */ /* Make sure the mask is initialized first */ Loading Loading @@ -1787,21 +1789,44 @@ static int find_lowest_rq(struct task_struct *task) * double count rt task load. * double count rt task load. */ */ util = cpu_util(cpu); util = cpu_util(cpu); if (!__cpu_overutilized(cpu, util + tutil)) { if (__cpu_overutilized(cpu, util + tutil)) continue; if (cpu_isolated(cpu)) if (cpu_isolated(cpu)) continue; continue; if (sched_cpu_high_irqload(cpu)) if (sched_cpu_high_irqload(cpu)) continue; continue; if (best_cpu_util > util || /* Find the least loaded CPU */ (best_cpu_util == util && if (util > best_cpu_util) cpu == task_cpu(task))) { continue; /* * If the previous CPU has same load, keep it as * best_cpu. */ if (best_cpu_util == util && best_cpu == task_cpu(task)) continue; /* * If candidate CPU is the previous CPU, select it. * Otherwise, if its load is same with best_cpu and in * a shallower C-state, select it. */ cpu_idle_idx = idle_get_state_idx(cpu_rq(cpu)); if (cpu != task_cpu(task) && sysctl_sched_cstate_aware) { if (best_cpu_util == util && best_cpu_idle_idx < cpu_idle_idx) continue; } best_cpu_idle_idx = cpu_idle_idx; best_cpu_util = util; best_cpu_util = util; best_cpu = cpu; best_cpu = cpu; } } } } if (best_cpu != -1) { if (best_cpu != -1) { return best_cpu; return best_cpu; Loading Loading
kernel/sched/rt.c +36 −11 Original line number Original line Diff line number Diff line Loading @@ -1725,6 +1725,8 @@ static int find_lowest_rq(struct task_struct *task) unsigned long best_capacity; unsigned long best_capacity; unsigned long util, best_cpu_util = ULONG_MAX; unsigned long util, best_cpu_util = ULONG_MAX; unsigned long tutil = task_util(task); unsigned long tutil = task_util(task); int best_cpu_idle_idx = INT_MAX; int cpu_idle_idx = -1; bool placement_boost; bool placement_boost; /* Make sure the mask is initialized first */ /* Make sure the mask is initialized first */ Loading Loading @@ -1787,21 +1789,44 @@ static int find_lowest_rq(struct task_struct *task) * double count rt task load. * double count rt task load. */ */ util = cpu_util(cpu); util = cpu_util(cpu); if (!__cpu_overutilized(cpu, util + tutil)) { if (__cpu_overutilized(cpu, util + tutil)) continue; if (cpu_isolated(cpu)) if (cpu_isolated(cpu)) continue; continue; if (sched_cpu_high_irqload(cpu)) if (sched_cpu_high_irqload(cpu)) continue; continue; if (best_cpu_util > util || /* Find the least loaded CPU */ (best_cpu_util == util && if (util > best_cpu_util) cpu == task_cpu(task))) { continue; /* * If the previous CPU has same load, keep it as * best_cpu. */ if (best_cpu_util == util && best_cpu == task_cpu(task)) continue; /* * If candidate CPU is the previous CPU, select it. * Otherwise, if its load is same with best_cpu and in * a shallower C-state, select it. */ cpu_idle_idx = idle_get_state_idx(cpu_rq(cpu)); if (cpu != task_cpu(task) && sysctl_sched_cstate_aware) { if (best_cpu_util == util && best_cpu_idle_idx < cpu_idle_idx) continue; } best_cpu_idle_idx = cpu_idle_idx; best_cpu_util = util; best_cpu_util = util; best_cpu = cpu; best_cpu = cpu; } } } } if (best_cpu != -1) { if (best_cpu != -1) { return best_cpu; return best_cpu; Loading