Loading kernel/sched/fair.c +3 −3 Original line number Diff line number Diff line Loading @@ -13016,13 +13016,13 @@ void check_for_migration(struct rq *rq, struct task_struct *p) rcu_read_lock(); new_cpu = find_energy_efficient_cpu(sd, p, cpu, prev_cpu, 0); rcu_read_unlock(); if ((new_cpu != -1) && (capacity_orig_of(new_cpu) > capacity_orig_of(cpu))) { if ((new_cpu != prev_cpu) && (capacity_orig_of(new_cpu) > capacity_orig_of(prev_cpu))) { active_balance = kick_active_balance(rq, p, new_cpu); if (active_balance) { mark_reserved(new_cpu); raw_spin_unlock(&migration_lock); stop_one_cpu_nowait(cpu, stop_one_cpu_nowait(prev_cpu, active_load_balance_cpu_stop, rq, &rq->active_balance_work); return; Loading Loading
kernel/sched/fair.c +3 −3 Original line number Diff line number Diff line Loading @@ -13016,13 +13016,13 @@ void check_for_migration(struct rq *rq, struct task_struct *p) rcu_read_lock(); new_cpu = find_energy_efficient_cpu(sd, p, cpu, prev_cpu, 0); rcu_read_unlock(); if ((new_cpu != -1) && (capacity_orig_of(new_cpu) > capacity_orig_of(cpu))) { if ((new_cpu != prev_cpu) && (capacity_orig_of(new_cpu) > capacity_orig_of(prev_cpu))) { active_balance = kick_active_balance(rq, p, new_cpu); if (active_balance) { mark_reserved(new_cpu); raw_spin_unlock(&migration_lock); stop_one_cpu_nowait(cpu, stop_one_cpu_nowait(prev_cpu, active_load_balance_cpu_stop, rq, &rq->active_balance_work); return; Loading