Commit fe71bbb2 authored by Peng Wang's avatar Peng Wang Committed by Peter Zijlstra
Browse files

sched/fair: calculate delta runnable load only when it's needed



Move the code of calculation for delta_sum/delta_avg to where
it is really needed to be done.

Signed-off-by: default avatarPeng Wang <rocking@linux.alibaba.com>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: default avatarVincent Guittot <vincent.guittot@linaro.org>
Link: https://lkml.kernel.org/r/20200103114400.17668-1-rocking@linux.alibaba.com
parent 9dec1b69
Loading
Loading
Loading
Loading
+6 −5
Original line number Diff line number Diff line
@@ -3366,16 +3366,17 @@ update_tg_cfs_runnable(struct cfs_rq *cfs_rq, struct sched_entity *se, struct cf

	runnable_load_sum = (s64)se_runnable(se) * runnable_sum;
	runnable_load_avg = div_s64(runnable_load_sum, LOAD_AVG_MAX);
	delta_sum = runnable_load_sum - se_weight(se) * se->avg.runnable_load_sum;
	delta_avg = runnable_load_avg - se->avg.runnable_load_avg;

	se->avg.runnable_load_sum = runnable_sum;
	se->avg.runnable_load_avg = runnable_load_avg;

	if (se->on_rq) {
		delta_sum = runnable_load_sum -
				se_weight(se) * se->avg.runnable_load_sum;
		delta_avg = runnable_load_avg - se->avg.runnable_load_avg;
		add_positive(&cfs_rq->avg.runnable_load_avg, delta_avg);
		add_positive(&cfs_rq->avg.runnable_load_sum, delta_sum);
	}

	se->avg.runnable_load_sum = runnable_sum;
	se->avg.runnable_load_avg = runnable_load_avg;
}

static inline void add_tg_cfs_propagate(struct cfs_rq *cfs_rq, long runnable_sum)