Loading kernel/sched/fair.c +2 −2 Original line number Diff line number Diff line Loading @@ -4448,13 +4448,13 @@ static inline void update_entity_load_avg(struct sched_entity *se, */ if (entity_is_task(se)) { now = cfs_rq_clock_task(cfs_rq); if (se->on_rq) if (sched_use_pelt && se->on_rq) dec_hmp_sched_stats_fair(rq_of(cfs_rq), task_of(se)); } else now = cfs_rq_clock_task(group_cfs_rq(se)); decayed = __update_entity_runnable_avg(cpu, now, &se->avg, se->on_rq); if (entity_is_task(se) && se->on_rq) if (sched_use_pelt && entity_is_task(se) && se->on_rq) inc_hmp_sched_stats_fair(rq_of(cfs_rq), task_of(se)); if (!decayed) Loading Loading
kernel/sched/fair.c +2 −2 Original line number Diff line number Diff line Loading @@ -4448,13 +4448,13 @@ static inline void update_entity_load_avg(struct sched_entity *se, */ if (entity_is_task(se)) { now = cfs_rq_clock_task(cfs_rq); if (se->on_rq) if (sched_use_pelt && se->on_rq) dec_hmp_sched_stats_fair(rq_of(cfs_rq), task_of(se)); } else now = cfs_rq_clock_task(group_cfs_rq(se)); decayed = __update_entity_runnable_avg(cpu, now, &se->avg, se->on_rq); if (entity_is_task(se) && se->on_rq) if (sched_use_pelt && entity_is_task(se) && se->on_rq) inc_hmp_sched_stats_fair(rq_of(cfs_rq), task_of(se)); if (!decayed) Loading