Loading kernel/sched/fair.c +3 −4 Original line number Diff line number Diff line Loading @@ -12489,7 +12489,6 @@ void check_for_migration(struct rq *rq, struct task_struct *p) { int active_balance; int new_cpu = -1; int cpu = smp_processor_id(); int prev_cpu = task_cpu(p); if (rq->misfit_task_load) { Loading @@ -12504,13 +12503,13 @@ void check_for_migration(struct rq *rq, struct task_struct *p) rcu_read_lock(); new_cpu = find_energy_efficient_cpu(p, prev_cpu, 0); rcu_read_unlock(); if ((new_cpu != -1) && (capacity_orig_of(new_cpu) > capacity_orig_of(cpu))) { if ((new_cpu != prev_cpu) && (capacity_orig_of(new_cpu) > capacity_orig_of(prev_cpu))) { active_balance = kick_active_balance(rq, p, new_cpu); if (active_balance) { mark_reserved(new_cpu); raw_spin_unlock(&migration_lock); stop_one_cpu_nowait(cpu, stop_one_cpu_nowait(prev_cpu, active_load_balance_cpu_stop, rq, &rq->active_balance_work); return; Loading Loading
kernel/sched/fair.c +3 −4 Original line number Diff line number Diff line Loading @@ -12489,7 +12489,6 @@ void check_for_migration(struct rq *rq, struct task_struct *p) { int active_balance; int new_cpu = -1; int cpu = smp_processor_id(); int prev_cpu = task_cpu(p); if (rq->misfit_task_load) { Loading @@ -12504,13 +12503,13 @@ void check_for_migration(struct rq *rq, struct task_struct *p) rcu_read_lock(); new_cpu = find_energy_efficient_cpu(p, prev_cpu, 0); rcu_read_unlock(); if ((new_cpu != -1) && (capacity_orig_of(new_cpu) > capacity_orig_of(cpu))) { if ((new_cpu != prev_cpu) && (capacity_orig_of(new_cpu) > capacity_orig_of(prev_cpu))) { active_balance = kick_active_balance(rq, p, new_cpu); if (active_balance) { mark_reserved(new_cpu); raw_spin_unlock(&migration_lock); stop_one_cpu_nowait(cpu, stop_one_cpu_nowait(prev_cpu, active_load_balance_cpu_stop, rq, &rq->active_balance_work); return; Loading