From 1091985b482fdd577a5c511059b9d7b4467bd15d Mon Sep 17 00:00:00 2001 From: Ingo Molnar Date: Mon, 15 Oct 2007 17:00:04 +0200 Subject: [PATCH] sched: speed up update_load_add/_sub() speed up update_load_add/_sub() by not delaying the division - this reduces CPU pipeline dependencies. Signed-off-by: Ingo Molnar Signed-off-by: Peter Zijlstra Signed-off-by: Mike Galbraith Reviewed-by: Thomas Gleixner --- kernel/sched.c | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/kernel/sched.c b/kernel/sched.c index 3209e2cc2c2e..992a1fae72a7 100644 --- a/kernel/sched.c +++ b/kernel/sched.c @@ -697,16 +697,17 @@ calc_delta_fair(unsigned long delta_exec, struct load_weight *lw) return calc_delta_mine(delta_exec, NICE_0_LOAD, lw); } -static void update_load_add(struct load_weight *lw, unsigned long inc) +static inline void update_load_add(struct load_weight *lw, unsigned long inc) { lw->weight += inc; - lw->inv_weight = 0; + lw->inv_weight = WMULT_CONST / lw->weight; } -static void update_load_sub(struct load_weight *lw, unsigned long dec) +static inline void update_load_sub(struct load_weight *lw, unsigned long dec) { lw->weight -= dec; - lw->inv_weight = 0; + if (likely(lw->weight)) + lw->inv_weight = WMULT_CONST / lw->weight; } /* -- GitLab