Loading kernel/sched/fair.c +2 −1 Original line number Diff line number Diff line Loading @@ -6155,7 +6155,8 @@ static inline bool task_fits_max(struct task_struct *p, int cpu) bool __cpu_overutilized(int cpu, int delta) { return (capacity_of(cpu) * 1024) < ((cpu_util(cpu) + delta) * capacity_margin); return (capacity_orig_of(cpu) * 1024) < ((cpu_util(cpu) + delta) * capacity_margin); } bool cpu_overutilized(int cpu) Loading kernel/sched/rt.c +1 −1 Original line number Diff line number Diff line Loading @@ -1834,7 +1834,7 @@ static int find_lowest_rq(struct task_struct *task) if (avoid_prev_cpu && cpu == prev_cpu) continue; if (__cpu_overutilized(cpu, util + tutil)) if (__cpu_overutilized(cpu, tutil)) continue; if (cpu_isolated(cpu)) Loading Loading
kernel/sched/fair.c +2 −1 Original line number Diff line number Diff line Loading @@ -6155,7 +6155,8 @@ static inline bool task_fits_max(struct task_struct *p, int cpu) bool __cpu_overutilized(int cpu, int delta) { return (capacity_of(cpu) * 1024) < ((cpu_util(cpu) + delta) * capacity_margin); return (capacity_orig_of(cpu) * 1024) < ((cpu_util(cpu) + delta) * capacity_margin); } bool cpu_overutilized(int cpu) Loading
kernel/sched/rt.c +1 −1 Original line number Diff line number Diff line Loading @@ -1834,7 +1834,7 @@ static int find_lowest_rq(struct task_struct *task) if (avoid_prev_cpu && cpu == prev_cpu) continue; if (__cpu_overutilized(cpu, util + tutil)) if (__cpu_overutilized(cpu, tutil)) continue; if (cpu_isolated(cpu)) Loading