Lines Matching defs:active
56 * When a cgroup becomes active in terms of IOs, its hierarchical share is
137 * each active cgroup is actually using and yield part of its weight if
161 * active weight hweight% inflt% dbt delay usages%
420 struct list_head active_iocgs; /* active cgroups */
473 * When an idle cgroup becomes active its `active` goes from 0 to
474 * `weight`. `inuse` is the surplus adjusted active weight.
475 * `active` and `inuse` are used to calculate `hweight_active` and
486 u32 active;
513 * The period this iocg was last active in. Used for deactivation
1080 * Update @iocg's `active` and `inuse` to @active and @inuse, update level
1084 static void __propagate_weights(struct ioc_gq *iocg, u32 active, u32 inuse,
1093 * For an active leaf node, its inuse shouldn't be zero or exceed
1094 * @active. An active internal node's inuse is solely determined by the
1095 * inuse to active ratio of its children regardless of @inuse.
1098 inuse = DIV64_U64_ROUND_UP(active * iocg->child_inuse_sum,
1101 inuse = clamp_t(u32, inuse, 1, active);
1108 if (active == iocg->active && inuse == iocg->inuse)
1117 parent->child_active_sum += (s32)(active - child->active);
1120 child->active = active;
1124 * The delta between inuse and active sums indicates that
1126 * and active should reflect the ratio.
1136 if (parent_active == parent->active &&
1140 active = parent_active;
1159 static void propagate_weights(struct ioc_gq *iocg, u32 active, u32 inuse,
1162 __propagate_weights(iocg, active, inuse, save, now);
1196 u32 active = READ_ONCE(child->active);
1203 active_sum = max_t(u64, active, active_sum);
1204 hwa = div64_u64((u64)hwa * active, active_sum);
1227 u32 inuse = iocg->active;
1239 inuse = DIV64_U64_ROUND_UP(parent->active * child_inuse_sum,
1256 if (weight != iocg->weight && iocg->active)
1269 * If seem to be already active, just update the stamp to tell the
1270 * timer that we're still active. We don't mind occassional races.
1425 propagate_weights(iocg, iocg->active, 0, false, now);
1452 propagate_weights(iocg, iocg->active, iocg->last_inuse,
1562 /* if already active and close enough, don't bother */
1715 /* get stat counters ready for reading on all active iocgs */
1940 (u64)parent->hweight_active * iocg->active,
1954 pr_cont(": active=%u donating=%u after=%u\n",
2013 (u64)iocg->active * iocg->hweight_donating,
2052 __propagate_weights(iocg, iocg->active, inuse, true, now);
2153 * Check the active iocgs' state to avoid oversleeping and deactive
2247 /* take care of active iocgs */
2351 * its inuse to active.
2360 iocg->inuse, iocg->active,
2363 __propagate_weights(iocg, iocg->active,
2364 iocg->active, true, &now);
2481 iocg->inuse == iocg->active)
2486 /* we own inuse only when @iocg is in the normal active state */
2496 * be reading 0 iocg->active before ioc->lock which will lead to
2500 adj_step = DIV_ROUND_UP(iocg->active * INUSE_ADJ_STEP_PCT, 100);
2503 propagate_weights(iocg, iocg->active, new_inuse, true, now);
2507 iocg->inuse != iocg->active);
2685 if (!iocg->abs_vdebt && iocg->inuse != iocg->active) {
2691 propagate_weights(iocg, iocg->active, iocg->active, true,