Lines Matching defs:sa
142 static __always_inline u32 accumulate_sum(u64 delta, struct sched_avg *sa, unsigned long load, unsigned long runnable,
148 delta += sa->period_contrib;
155 sa->load_sum = decay_load(sa->load_sum, periods);
156 sa->runnable_sum = decay_load(sa->runnable_sum, periods);
157 sa->util_sum = decay_load((u64)(sa->util_sum), periods);
174 contrib = __accumulate_pelt_segments(periods, 0x400 - sa->period_contrib, delta);
177 sa->period_contrib = delta;
180 sa->load_sum += load * contrib;
183 sa->runnable_sum += runnable * contrib << SCHED_CAPACITY_SHIFT;
186 sa->util_sum += contrib << SCHED_CAPACITY_SHIFT;
220 static __always_inline int ___update_load_sum(u64 now, struct sched_avg *sa, unsigned long load, unsigned long runnable,
225 delta = now - sa->last_update_time;
231 sa->last_update_time = now;
244 sa->last_update_time += delta << 0xa;
268 if (!accumulate_sum(delta, sa, load, runnable, running)) {
284 * LOAD_AVG_MAX*y + sa->period_contrib
288 * LOAD_AVG_MAX - 1024 + sa->period_contrib
299 static __always_inline void ___update_load_avg(struct sched_avg *sa, unsigned long load)
301 u32 divider = get_pelt_divider(sa);
306 sa->load_avg = div_u64(load * sa->load_sum, divider);
307 sa->runnable_avg = div_u64(sa->runnable_sum, divider);
308 WRITE_ONCE(sa->util_avg, sa->util_sum / divider);