Lines Matching refs:cl
186 eltree_insert(struct hfsc_class *cl)
188 struct rb_node **p = &cl->sched->eligible.rb_node;
195 if (cl->cl_e >= cl1->cl_e)
200 rb_link_node(&cl->el_node, parent, p);
201 rb_insert_color(&cl->el_node, &cl->sched->eligible);
205 eltree_remove(struct hfsc_class *cl)
207 rb_erase(&cl->el_node, &cl->sched->eligible);
211 eltree_update(struct hfsc_class *cl)
213 eltree_remove(cl);
214 eltree_insert(cl);
221 struct hfsc_class *p, *cl = NULL;
228 if (cl == NULL || p->cl_d < cl->cl_d)
229 cl = p;
231 return cl;
251 vttree_insert(struct hfsc_class *cl)
253 struct rb_node **p = &cl->cl_parent->vt_tree.rb_node;
260 if (cl->cl_vt >= cl1->cl_vt)
265 rb_link_node(&cl->vt_node, parent, p);
266 rb_insert_color(&cl->vt_node, &cl->cl_parent->vt_tree);
270 vttree_remove(struct hfsc_class *cl)
272 rb_erase(&cl->vt_node, &cl->cl_parent->vt_tree);
276 vttree_update(struct hfsc_class *cl)
278 vttree_remove(cl);
279 vttree_insert(cl);
283 vttree_firstfit(struct hfsc_class *cl, u64 cur_time)
288 for (n = rb_first(&cl->vt_tree); n != NULL; n = rb_next(n)) {
300 vttree_get_minvt(struct hfsc_class *cl, u64 cur_time)
303 if (cl->cl_cfmin > cur_time)
306 while (cl->level > 0) {
307 cl = vttree_firstfit(cl, cur_time);
308 if (cl == NULL)
313 if (cl->cl_parent->cl_cvtmin < cl->cl_vt)
314 cl->cl_parent->cl_cvtmin = cl->cl_vt;
316 return cl;
320 cftree_insert(struct hfsc_class *cl)
322 struct rb_node **p = &cl->cl_parent->cf_tree.rb_node;
329 if (cl->cl_f >= cl1->cl_f)
334 rb_link_node(&cl->cf_node, parent, p);
335 rb_insert_color(&cl->cf_node, &cl->cl_parent->cf_tree);
339 cftree_remove(struct hfsc_class *cl)
341 rb_erase(&cl->cf_node, &cl->cl_parent->cf_tree);
345 cftree_update(struct hfsc_class *cl)
347 cftree_remove(cl);
348 cftree_insert(cl);
610 init_ed(struct hfsc_class *cl, unsigned int next_len)
615 rtsc_min(&cl->cl_deadline, &cl->cl_rsc, cur_time, cl->cl_cumul);
622 cl->cl_eligible = cl->cl_deadline;
623 if (cl->cl_rsc.sm1 <= cl->cl_rsc.sm2) {
624 cl->cl_eligible.dx = 0;
625 cl->cl_eligible.dy = 0;
629 cl->cl_e = rtsc_y2x(&cl->cl_eligible, cl->cl_cumul);
630 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len);
632 eltree_insert(cl);
636 update_ed(struct hfsc_class *cl, unsigned int next_len)
638 cl->cl_e = rtsc_y2x(&cl->cl_eligible, cl->cl_cumul);
639 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len);
641 eltree_update(cl);
645 update_d(struct hfsc_class *cl, unsigned int next_len)
647 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len);
651 update_cfmin(struct hfsc_class *cl)
653 struct rb_node *n = rb_first(&cl->cf_tree);
657 cl->cl_cfmin = 0;
661 cl->cl_cfmin = p->cl_f;
665 init_vf(struct hfsc_class *cl, unsigned int len)
674 for (; cl->cl_parent != NULL; cl = cl->cl_parent) {
675 if (go_active && cl->cl_nactive++ == 0)
681 n = rb_last(&cl->cl_parent->vt_tree);
690 if (cl->cl_parent->cl_cvtmin != 0)
691 vt = (cl->cl_parent->cl_cvtmin + vt)/2;
693 if (cl->cl_parent->cl_vtperiod !=
694 cl->cl_parentperiod || vt > cl->cl_vt)
695 cl->cl_vt = vt;
703 cl->cl_vt = cl->cl_parent->cl_cvtoff;
704 cl->cl_parent->cl_cvtmin = 0;
708 rtsc_min(&cl->cl_virtual, &cl->cl_fsc, cl->cl_vt, cl->cl_total);
709 cl->cl_vtadj = 0;
711 cl->cl_vtperiod++; /* increment vt period */
712 cl->cl_parentperiod = cl->cl_parent->cl_vtperiod;
713 if (cl->cl_parent->cl_nactive == 0)
714 cl->cl_parentperiod++;
715 cl->cl_f = 0;
717 vttree_insert(cl);
718 cftree_insert(cl);
720 if (cl->cl_flags & HFSC_USC) {
726 rtsc_min(&cl->cl_ulimit, &cl->cl_usc, cur_time,
727 cl->cl_total);
729 cl->cl_myf = rtsc_y2x(&cl->cl_ulimit,
730 cl->cl_total);
734 f = max(cl->cl_myf, cl->cl_cfmin);
735 if (f != cl->cl_f) {
736 cl->cl_f = f;
737 cftree_update(cl);
739 update_cfmin(cl->cl_parent);
744 update_vf(struct hfsc_class *cl, unsigned int len, u64 cur_time)
749 if (cl->qdisc->q.qlen == 0 && cl->cl_flags & HFSC_FSC)
752 for (; cl->cl_parent != NULL; cl = cl->cl_parent) {
753 cl->cl_total += len;
755 if (!(cl->cl_flags & HFSC_FSC) || cl->cl_nactive == 0)
758 if (go_passive && --cl->cl_nactive == 0)
764 cl->cl_vt = rtsc_y2x(&cl->cl_virtual, cl->cl_total) + cl->cl_vtadj;
771 if (cl->cl_vt < cl->cl_parent->cl_cvtmin) {
772 cl->cl_vtadj += cl->cl_parent->cl_cvtmin - cl->cl_vt;
773 cl->cl_vt = cl->cl_parent->cl_cvtmin;
780 if (cl->cl_vt > cl->cl_parent->cl_cvtoff)
781 cl->cl_parent->cl_cvtoff = cl->cl_vt;
784 vttree_remove(cl);
786 cftree_remove(cl);
787 update_cfmin(cl->cl_parent);
793 vttree_update(cl);
796 if (cl->cl_flags & HFSC_USC) {
797 cl->cl_myf = rtsc_y2x(&cl->cl_ulimit, cl->cl_total);
799 cl->cl_myf = cl->cl_myfadj + rtsc_y2x(&cl->cl_ulimit,
800 cl->cl_total);
814 if (cl->cl_myf < myf_bound) {
815 delta = cur_time - cl->cl_myf;
816 cl->cl_myfadj += delta;
817 cl->cl_myf += delta;
822 f = max(cl->cl_myf, cl->cl_cfmin);
823 if (f != cl->cl_f) {
824 cl->cl_f = f;
825 cftree_update(cl);
826 update_cfmin(cl->cl_parent);
848 hfsc_adjust_levels(struct hfsc_class *cl)
855 list_for_each_entry(p, &cl->children, siblings) {
859 cl->level = level;
860 } while ((cl = cl->cl_parent) != NULL);
876 hfsc_change_rsc(struct hfsc_class *cl, struct tc_service_curve *rsc,
879 sc2isc(rsc, &cl->cl_rsc);
880 rtsc_init(&cl->cl_deadline, &cl->cl_rsc, cur_time, cl->cl_cumul);
881 cl->cl_eligible = cl->cl_deadline;
882 if (cl->cl_rsc.sm1 <= cl->cl_rsc.sm2) {
883 cl->cl_eligible.dx = 0;
884 cl->cl_eligible.dy = 0;
886 cl->cl_flags |= HFSC_RSC;
890 hfsc_change_fsc(struct hfsc_class *cl, struct tc_service_curve *fsc)
892 sc2isc(fsc, &cl->cl_fsc);
893 rtsc_init(&cl->cl_virtual, &cl->cl_fsc, cl->cl_vt, cl->cl_total);
894 cl->cl_flags |= HFSC_FSC;
898 hfsc_change_usc(struct hfsc_class *cl, struct tc_service_curve *usc,
901 sc2isc(usc, &cl->cl_usc);
902 rtsc_init(&cl->cl_ulimit, &cl->cl_usc, cur_time, cl->cl_total);
903 cl->cl_flags |= HFSC_USC;
907 hfsc_upgrade_rt(struct hfsc_class *cl)
909 cl->cl_fsc = cl->cl_rsc;
910 rtsc_init(&cl->cl_virtual, &cl->cl_fsc, cl->cl_vt, cl->cl_total);
911 cl->cl_flags |= HFSC_FSC;
926 struct hfsc_class *cl = (struct hfsc_class *)*arg;
960 if (cl != NULL) {
964 if (cl->cl_parent &&
965 cl->cl_parent->cl_common.classid != parentid)
967 if (cl->cl_parent == NULL && parentid != TC_H_ROOT)
973 err = gen_replace_estimator(&cl->bstats, NULL,
974 &cl->rate_est,
983 old_flags = cl->cl_flags;
986 hfsc_change_rsc(cl, rsc, cur_time);
988 hfsc_change_fsc(cl, fsc);
990 hfsc_change_usc(cl, usc, cur_time);
992 if (cl->qdisc->q.qlen != 0) {
993 int len = qdisc_peek_len(cl->qdisc);
995 if (cl->cl_flags & HFSC_RSC) {
997 update_ed(cl, len);
999 init_ed(cl, len);
1002 if (cl->cl_flags & HFSC_FSC) {
1004 update_vf(cl, 0, cur_time);
1006 init_vf(cl, len);
1032 cl = kzalloc(sizeof(struct hfsc_class), GFP_KERNEL);
1033 if (cl == NULL)
1036 err = tcf_block_get(&cl->block, &cl->filter_list, sch, extack);
1038 kfree(cl);
1043 err = gen_new_estimator(&cl->bstats, NULL, &cl->rate_est,
1048 tcf_block_put(cl->block);
1049 kfree(cl);
1055 hfsc_change_rsc(cl, rsc, 0);
1057 hfsc_change_fsc(cl, fsc);
1059 hfsc_change_usc(cl, usc, 0);
1061 cl->cl_common.classid = classid;
1062 cl->sched = q;
1063 cl->cl_parent = parent;
1064 cl->qdisc = qdisc_create_dflt(sch->dev_queue, &pfifo_qdisc_ops,
1066 if (cl->qdisc == NULL)
1067 cl->qdisc = &noop_qdisc;
1069 qdisc_hash_add(cl->qdisc, true);
1070 INIT_LIST_HEAD(&cl->children);
1071 cl->vt_tree = RB_ROOT;
1072 cl->cf_tree = RB_ROOT;
1081 qdisc_class_hash_insert(&q->clhash, &cl->cl_common);
1082 list_add_tail(&cl->siblings, &parent->children);
1090 *arg = (unsigned long)cl;
1095 hfsc_destroy_class(struct Qdisc *sch, struct hfsc_class *cl)
1099 tcf_block_put(cl->block);
1100 qdisc_put(cl->qdisc);
1101 gen_kill_estimator(&cl->rate_est);
1102 if (cl != &q->root)
1103 kfree(cl);
1110 struct hfsc_class *cl = (struct hfsc_class *)arg;
1112 if (cl->level > 0 || cl->filter_cnt > 0 || cl == &q->root)
1117 list_del(&cl->siblings);
1118 hfsc_adjust_levels(cl->cl_parent);
1120 qdisc_purge_queue(cl->qdisc);
1121 qdisc_class_hash_remove(&q->clhash, &cl->cl_common);
1125 hfsc_destroy_class(sch, cl);
1133 struct hfsc_class *head, *cl;
1139 (cl = hfsc_find_class(skb->priority, sch)) != NULL)
1140 if (cl->level == 0)
1141 return cl;
1158 cl = (struct hfsc_class *)res.class;
1159 if (!cl) {
1160 cl = hfsc_find_class(res.classid, sch);
1161 if (!cl)
1163 if (cl->level >= head->level)
1167 if (cl->level == 0)
1168 return cl; /* hit leaf class */
1171 tcf = rcu_dereference_bh(cl->filter_list);
1172 head = cl;
1176 cl = hfsc_find_class(TC_H_MAKE(TC_H_MAJ(sch->handle), q->defcls), sch);
1177 if (cl == NULL || cl->level > 0)
1180 return cl;
1187 struct hfsc_class *cl = (struct hfsc_class *)arg;
1189 if (cl->level > 0)
1193 cl->cl_common.classid, NULL);
1198 *old = qdisc_replace(sch, new, &cl->qdisc);
1205 struct hfsc_class *cl = (struct hfsc_class *)arg;
1207 if (cl->level == 0)
1208 return cl->qdisc;
1216 struct hfsc_class *cl = (struct hfsc_class *)arg;
1218 /* vttree is now handled in update_vf() so that update_vf(cl, 0, 0)
1221 update_vf(cl, 0, 0);
1222 if (cl->cl_flags & HFSC_RSC)
1223 eltree_remove(cl);
1236 struct hfsc_class *cl = hfsc_find_class(classid, sch);
1238 if (cl != NULL) {
1239 if (p != NULL && p->level <= cl->level)
1241 cl->filter_cnt++;
1244 return (unsigned long)cl;
1250 struct hfsc_class *cl = (struct hfsc_class *)arg;
1252 cl->filter_cnt--;
1259 struct hfsc_class *cl = (struct hfsc_class *)arg;
1261 if (cl == NULL)
1262 cl = &q->root;
1264 return cl->block;
1285 hfsc_dump_curves(struct sk_buff *skb, struct hfsc_class *cl)
1287 if ((cl->cl_flags & HFSC_RSC) &&
1288 (hfsc_dump_sc(skb, TCA_HFSC_RSC, &cl->cl_rsc) < 0))
1291 if ((cl->cl_flags & HFSC_FSC) &&
1292 (hfsc_dump_sc(skb, TCA_HFSC_FSC, &cl->cl_fsc) < 0))
1295 if ((cl->cl_flags & HFSC_USC) &&
1296 (hfsc_dump_sc(skb, TCA_HFSC_USC, &cl->cl_usc) < 0))
1309 struct hfsc_class *cl = (struct hfsc_class *)arg;
1312 tcm->tcm_parent = cl->cl_parent ? cl->cl_parent->cl_common.classid :
1314 tcm->tcm_handle = cl->cl_common.classid;
1315 if (cl->level == 0)
1316 tcm->tcm_info = cl->qdisc->handle;
1321 if (hfsc_dump_curves(skb, cl) < 0)
1334 struct hfsc_class *cl = (struct hfsc_class *)arg;
1338 qdisc_qstats_qlen_backlog(cl->qdisc, &qlen, &cl->qstats.backlog);
1339 xstats.level = cl->level;
1340 xstats.period = cl->cl_vtperiod;
1341 xstats.work = cl->cl_total;
1342 xstats.rtwork = cl->cl_cumul;
1344 if (gnet_stats_copy_basic(qdisc_root_sleeping_running(sch), d, NULL, &cl->bstats) < 0 ||
1345 gnet_stats_copy_rate_est(d, &cl->rate_est) < 0 ||
1346 gnet_stats_copy_queue(d, NULL, &cl->qstats, qlen) < 0)
1358 struct hfsc_class *cl;
1365 hlist_for_each_entry(cl, &q->clhash.hash[i],
1371 if (arg->fn(sch, (unsigned long)cl, arg) < 0) {
1384 struct hfsc_class *cl;
1387 cl = eltree_get_minel(q);
1388 if (cl)
1389 next_time = cl->cl_e;
1459 hfsc_reset_class(struct hfsc_class *cl)
1461 cl->cl_total = 0;
1462 cl->cl_cumul = 0;
1463 cl->cl_d = 0;
1464 cl->cl_e = 0;
1465 cl->cl_vt = 0;
1466 cl->cl_vtadj = 0;
1467 cl->cl_cvtmin = 0;
1468 cl->cl_cvtoff = 0;
1469 cl->cl_vtperiod = 0;
1470 cl->cl_parentperiod = 0;
1471 cl->cl_f = 0;
1472 cl->cl_myf = 0;
1473 cl->cl_cfmin = 0;
1474 cl->cl_nactive = 0;
1476 cl->vt_tree = RB_ROOT;
1477 cl->cf_tree = RB_ROOT;
1478 qdisc_reset(cl->qdisc);
1480 if (cl->cl_flags & HFSC_RSC)
1481 rtsc_init(&cl->cl_deadline, &cl->cl_rsc, 0, 0);
1482 if (cl->cl_flags & HFSC_FSC)
1483 rtsc_init(&cl->cl_virtual, &cl->cl_fsc, 0, 0);
1484 if (cl->cl_flags & HFSC_USC)
1485 rtsc_init(&cl->cl_ulimit, &cl->cl_usc, 0, 0);
1492 struct hfsc_class *cl;
1496 hlist_for_each_entry(cl, &q->clhash.hash[i], cl_common.hnode)
1497 hfsc_reset_class(cl);
1508 struct hfsc_class *cl;
1512 hlist_for_each_entry(cl, &q->clhash.hash[i], cl_common.hnode) {
1513 tcf_block_put(cl->block);
1514 cl->block = NULL;
1518 hlist_for_each_entry_safe(cl, next, &q->clhash.hash[i],
1520 hfsc_destroy_class(sch, cl);
1547 struct hfsc_class *cl;
1551 cl = hfsc_classify(skb, sch, &err);
1552 if (cl == NULL) {
1559 first = !cl->qdisc->q.qlen;
1560 err = qdisc_enqueue(skb, cl->qdisc, to_free);
1563 cl->qstats.drops++;
1570 if (cl->cl_flags & HFSC_RSC)
1571 init_ed(cl, len);
1572 if (cl->cl_flags & HFSC_FSC)
1573 init_vf(cl, len);
1579 if (cl->cl_flags & HFSC_RSC)
1580 cl->qdisc->ops->peek(cl->qdisc);
1594 struct hfsc_class *cl;
1610 cl = eltree_get_mindl(q, cur_time);
1611 if (cl) {
1618 cl = vttree_get_minvt(&q->root, cur_time);
1619 if (cl == NULL) {
1626 skb = qdisc_dequeue_peeked(cl->qdisc);
1628 qdisc_warn_nonwc("HFSC", cl->qdisc);
1632 bstats_update(&cl->bstats, skb);
1633 update_vf(cl, qdisc_pkt_len(skb), cur_time);
1635 cl->cl_cumul += qdisc_pkt_len(skb);
1637 if (cl->cl_flags & HFSC_RSC) {
1638 if (cl->qdisc->q.qlen != 0) {
1640 next_len = qdisc_peek_len(cl->qdisc);
1642 update_ed(cl, next_len);
1644 update_d(cl, next_len);
1647 eltree_remove(cl);