Loading kernel/sched/fair.c +3 −3 Original line number Diff line number Diff line Loading @@ -3657,15 +3657,15 @@ update_cfs_rq_load_avg(u64 now, struct cfs_rq *cfs_rq) r = removed_load; sub_positive(&sa->load_avg, r); sub_positive(&sa->load_sum, r * divider); sa->load_sum = sa->load_avg * divider; r = removed_util; sub_positive(&sa->util_avg, r); sub_positive(&sa->util_sum, r * divider); sa->util_sum = sa->util_avg * divider; r = removed_runnable; sub_positive(&sa->runnable_avg, r); sub_positive(&sa->runnable_sum, r * divider); sa->runnable_sum = sa->runnable_avg * divider; /* * removed_runnable is the unweighted version of removed_load so we Loading Loading
kernel/sched/fair.c +3 −3 Original line number Diff line number Diff line Loading @@ -3657,15 +3657,15 @@ update_cfs_rq_load_avg(u64 now, struct cfs_rq *cfs_rq) r = removed_load; sub_positive(&sa->load_avg, r); sub_positive(&sa->load_sum, r * divider); sa->load_sum = sa->load_avg * divider; r = removed_util; sub_positive(&sa->util_avg, r); sub_positive(&sa->util_sum, r * divider); sa->util_sum = sa->util_avg * divider; r = removed_runnable; sub_positive(&sa->runnable_avg, r); sub_positive(&sa->runnable_sum, r * divider); sa->runnable_sum = sa->runnable_avg * divider; /* * removed_runnable is the unweighted version of removed_load so we Loading