Lines Matching refs:cl
185 eltree_insert(struct hfsc_class *cl)
187 struct rb_node **p = &cl->sched->eligible.rb_node;
194 if (cl->cl_e >= cl1->cl_e)
199 rb_link_node(&cl->el_node, parent, p);
200 rb_insert_color(&cl->el_node, &cl->sched->eligible);
204 eltree_remove(struct hfsc_class *cl)
206 rb_erase(&cl->el_node, &cl->sched->eligible);
210 eltree_update(struct hfsc_class *cl)
212 eltree_remove(cl);
213 eltree_insert(cl);
220 struct hfsc_class *p, *cl = NULL;
227 if (cl == NULL || p->cl_d < cl->cl_d)
228 cl = p;
230 return cl;
250 vttree_insert(struct hfsc_class *cl)
252 struct rb_node **p = &cl->cl_parent->vt_tree.rb_node;
259 if (cl->cl_vt >= cl1->cl_vt)
264 rb_link_node(&cl->vt_node, parent, p);
265 rb_insert_color(&cl->vt_node, &cl->cl_parent->vt_tree);
269 vttree_remove(struct hfsc_class *cl)
271 rb_erase(&cl->vt_node, &cl->cl_parent->vt_tree);
275 vttree_update(struct hfsc_class *cl)
277 vttree_remove(cl);
278 vttree_insert(cl);
282 vttree_firstfit(struct hfsc_class *cl, u64 cur_time)
287 for (n = rb_first(&cl->vt_tree); n != NULL; n = rb_next(n)) {
299 vttree_get_minvt(struct hfsc_class *cl, u64 cur_time)
302 if (cl->cl_cfmin > cur_time)
305 while (cl->level > 0) {
306 cl = vttree_firstfit(cl, cur_time);
307 if (cl == NULL)
312 if (cl->cl_parent->cl_cvtmin < cl->cl_vt)
313 cl->cl_parent->cl_cvtmin = cl->cl_vt;
315 return cl;
319 cftree_insert(struct hfsc_class *cl)
321 struct rb_node **p = &cl->cl_parent->cf_tree.rb_node;
328 if (cl->cl_f >= cl1->cl_f)
333 rb_link_node(&cl->cf_node, parent, p);
334 rb_insert_color(&cl->cf_node, &cl->cl_parent->cf_tree);
338 cftree_remove(struct hfsc_class *cl)
340 rb_erase(&cl->cf_node, &cl->cl_parent->cf_tree);
344 cftree_update(struct hfsc_class *cl)
346 cftree_remove(cl);
347 cftree_insert(cl);
609 init_ed(struct hfsc_class *cl, unsigned int next_len)
614 rtsc_min(&cl->cl_deadline, &cl->cl_rsc, cur_time, cl->cl_cumul);
621 cl->cl_eligible = cl->cl_deadline;
622 if (cl->cl_rsc.sm1 <= cl->cl_rsc.sm2) {
623 cl->cl_eligible.dx = 0;
624 cl->cl_eligible.dy = 0;
628 cl->cl_e = rtsc_y2x(&cl->cl_eligible, cl->cl_cumul);
629 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len);
631 eltree_insert(cl);
635 update_ed(struct hfsc_class *cl, unsigned int next_len)
637 cl->cl_e = rtsc_y2x(&cl->cl_eligible, cl->cl_cumul);
638 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len);
640 eltree_update(cl);
644 update_d(struct hfsc_class *cl, unsigned int next_len)
646 cl->cl_d = rtsc_y2x(&cl->cl_deadline, cl->cl_cumul + next_len);
650 update_cfmin(struct hfsc_class *cl)
652 struct rb_node *n = rb_first(&cl->cf_tree);
656 cl->cl_cfmin = 0;
660 cl->cl_cfmin = p->cl_f;
664 init_vf(struct hfsc_class *cl, unsigned int len)
673 for (; cl->cl_parent != NULL; cl = cl->cl_parent) {
674 if (go_active && cl->cl_nactive++ == 0)
680 n = rb_last(&cl->cl_parent->vt_tree);
689 if (cl->cl_parent->cl_cvtmin != 0)
690 vt = (cl->cl_parent->cl_cvtmin + vt)/2;
692 if (cl->cl_parent->cl_vtperiod !=
693 cl->cl_parentperiod || vt > cl->cl_vt)
694 cl->cl_vt = vt;
702 cl->cl_vt = cl->cl_parent->cl_cvtoff;
703 cl->cl_parent->cl_cvtmin = 0;
707 rtsc_min(&cl->cl_virtual, &cl->cl_fsc, cl->cl_vt, cl->cl_total);
708 cl->cl_vtadj = 0;
710 cl->cl_vtperiod++; /* increment vt period */
711 cl->cl_parentperiod = cl->cl_parent->cl_vtperiod;
712 if (cl->cl_parent->cl_nactive == 0)
713 cl->cl_parentperiod++;
714 cl->cl_f = 0;
716 vttree_insert(cl);
717 cftree_insert(cl);
719 if (cl->cl_flags & HFSC_USC) {
725 rtsc_min(&cl->cl_ulimit, &cl->cl_usc, cur_time,
726 cl->cl_total);
728 cl->cl_myf = rtsc_y2x(&cl->cl_ulimit,
729 cl->cl_total);
733 f = max(cl->cl_myf, cl->cl_cfmin);
734 if (f != cl->cl_f) {
735 cl->cl_f = f;
736 cftree_update(cl);
738 update_cfmin(cl->cl_parent);
743 update_vf(struct hfsc_class *cl, unsigned int len, u64 cur_time)
748 if (cl->qdisc->q.qlen == 0 && cl->cl_flags & HFSC_FSC)
751 for (; cl->cl_parent != NULL; cl = cl->cl_parent) {
752 cl->cl_total += len;
754 if (!(cl->cl_flags & HFSC_FSC) || cl->cl_nactive == 0)
757 if (go_passive && --cl->cl_nactive == 0)
763 cl->cl_vt = rtsc_y2x(&cl->cl_virtual, cl->cl_total) + cl->cl_vtadj;
770 if (cl->cl_vt < cl->cl_parent->cl_cvtmin) {
771 cl->cl_vtadj += cl->cl_parent->cl_cvtmin - cl->cl_vt;
772 cl->cl_vt = cl->cl_parent->cl_cvtmin;
779 if (cl->cl_vt > cl->cl_parent->cl_cvtoff)
780 cl->cl_parent->cl_cvtoff = cl->cl_vt;
783 vttree_remove(cl);
785 cftree_remove(cl);
786 update_cfmin(cl->cl_parent);
792 vttree_update(cl);
795 if (cl->cl_flags & HFSC_USC) {
796 cl->cl_myf = rtsc_y2x(&cl->cl_ulimit, cl->cl_total);
798 cl->cl_myf = cl->cl_myfadj + rtsc_y2x(&cl->cl_ulimit,
799 cl->cl_total);
813 if (cl->cl_myf < myf_bound) {
814 delta = cur_time - cl->cl_myf;
815 cl->cl_myfadj += delta;
816 cl->cl_myf += delta;
821 f = max(cl->cl_myf, cl->cl_cfmin);
822 if (f != cl->cl_f) {
823 cl->cl_f = f;
824 cftree_update(cl);
825 update_cfmin(cl->cl_parent);
847 hfsc_adjust_levels(struct hfsc_class *cl)
854 list_for_each_entry(p, &cl->children, siblings) {
858 cl->level = level;
859 } while ((cl = cl->cl_parent) != NULL);
875 hfsc_change_rsc(struct hfsc_class *cl, struct tc_service_curve *rsc,
878 sc2isc(rsc, &cl->cl_rsc);
879 rtsc_init(&cl->cl_deadline, &cl->cl_rsc, cur_time, cl->cl_cumul);
880 cl->cl_eligible = cl->cl_deadline;
881 if (cl->cl_rsc.sm1 <= cl->cl_rsc.sm2) {
882 cl->cl_eligible.dx = 0;
883 cl->cl_eligible.dy = 0;
885 cl->cl_flags |= HFSC_RSC;
889 hfsc_change_fsc(struct hfsc_class *cl, struct tc_service_curve *fsc)
891 sc2isc(fsc, &cl->cl_fsc);
892 rtsc_init(&cl->cl_virtual, &cl->cl_fsc, cl->cl_vt, cl->cl_total);
893 cl->cl_flags |= HFSC_FSC;
897 hfsc_change_usc(struct hfsc_class *cl, struct tc_service_curve *usc,
900 sc2isc(usc, &cl->cl_usc);
901 rtsc_init(&cl->cl_ulimit, &cl->cl_usc, cur_time, cl->cl_total);
902 cl->cl_flags |= HFSC_USC;
906 hfsc_upgrade_rt(struct hfsc_class *cl)
908 cl->cl_fsc = cl->cl_rsc;
909 rtsc_init(&cl->cl_virtual, &cl->cl_fsc, cl->cl_vt, cl->cl_total);
910 cl->cl_flags |= HFSC_FSC;
925 struct hfsc_class *cl = (struct hfsc_class *)*arg;
959 if (cl != NULL) {
963 if (cl->cl_parent &&
964 cl->cl_parent->cl_common.classid != parentid)
966 if (cl->cl_parent == NULL && parentid != TC_H_ROOT)
972 err = gen_replace_estimator(&cl->bstats, NULL,
973 &cl->rate_est,
982 old_flags = cl->cl_flags;
985 hfsc_change_rsc(cl, rsc, cur_time);
987 hfsc_change_fsc(cl, fsc);
989 hfsc_change_usc(cl, usc, cur_time);
991 if (cl->qdisc->q.qlen != 0) {
992 int len = qdisc_peek_len(cl->qdisc);
994 if (cl->cl_flags & HFSC_RSC) {
996 update_ed(cl, len);
998 init_ed(cl, len);
1001 if (cl->cl_flags & HFSC_FSC) {
1003 update_vf(cl, 0, cur_time);
1005 init_vf(cl, len);
1031 cl = kzalloc(sizeof(struct hfsc_class), GFP_KERNEL);
1032 if (cl == NULL)
1035 err = tcf_block_get(&cl->block, &cl->filter_list, sch, extack);
1037 kfree(cl);
1042 err = gen_new_estimator(&cl->bstats, NULL, &cl->rate_est,
1045 tcf_block_put(cl->block);
1046 kfree(cl);
1052 hfsc_change_rsc(cl, rsc, 0);
1054 hfsc_change_fsc(cl, fsc);
1056 hfsc_change_usc(cl, usc, 0);
1058 cl->cl_common.classid = classid;
1059 cl->sched = q;
1060 cl->cl_parent = parent;
1061 cl->qdisc = qdisc_create_dflt(sch->dev_queue, &pfifo_qdisc_ops,
1063 if (cl->qdisc == NULL)
1064 cl->qdisc = &noop_qdisc;
1066 qdisc_hash_add(cl->qdisc, true);
1067 INIT_LIST_HEAD(&cl->children);
1068 cl->vt_tree = RB_ROOT;
1069 cl->cf_tree = RB_ROOT;
1078 qdisc_class_hash_insert(&q->clhash, &cl->cl_common);
1079 list_add_tail(&cl->siblings, &parent->children);
1087 *arg = (unsigned long)cl;
1092 hfsc_destroy_class(struct Qdisc *sch, struct hfsc_class *cl)
1096 tcf_block_put(cl->block);
1097 qdisc_put(cl->qdisc);
1098 gen_kill_estimator(&cl->rate_est);
1099 if (cl != &q->root)
1100 kfree(cl);
1108 struct hfsc_class *cl = (struct hfsc_class *)arg;
1110 if (cl->level > 0 || qdisc_class_in_use(&cl->cl_common) ||
1111 cl == &q->root) {
1118 list_del(&cl->siblings);
1119 hfsc_adjust_levels(cl->cl_parent);
1121 qdisc_purge_queue(cl->qdisc);
1122 qdisc_class_hash_remove(&q->clhash, &cl->cl_common);
1126 hfsc_destroy_class(sch, cl);
1134 struct hfsc_class *head, *cl;
1140 (cl = hfsc_find_class(skb->priority, sch)) != NULL)
1141 if (cl->level == 0)
1142 return cl;
1159 cl = (struct hfsc_class *)res.class;
1160 if (!cl) {
1161 cl = hfsc_find_class(res.classid, sch);
1162 if (!cl)
1164 if (cl->level >= head->level)
1168 if (cl->level == 0)
1169 return cl; /* hit leaf class */
1172 tcf = rcu_dereference_bh(cl->filter_list);
1173 head = cl;
1177 cl = hfsc_find_class(TC_H_MAKE(TC_H_MAJ(sch->handle), q->defcls), sch);
1178 if (cl == NULL || cl->level > 0)
1181 return cl;
1188 struct hfsc_class *cl = (struct hfsc_class *)arg;
1190 if (cl->level > 0)
1194 cl->cl_common.classid, NULL);
1199 *old = qdisc_replace(sch, new, &cl->qdisc);
1206 struct hfsc_class *cl = (struct hfsc_class *)arg;
1208 if (cl->level == 0)
1209 return cl->qdisc;
1217 struct hfsc_class *cl = (struct hfsc_class *)arg;
1219 /* vttree is now handled in update_vf() so that update_vf(cl, 0, 0)
1222 update_vf(cl, 0, 0);
1223 if (cl->cl_flags & HFSC_RSC)
1224 eltree_remove(cl);
1237 struct hfsc_class *cl = hfsc_find_class(classid, sch);
1239 if (cl != NULL) {
1240 if (p != NULL && p->level <= cl->level)
1242 qdisc_class_get(&cl->cl_common);
1245 return (unsigned long)cl;
1251 struct hfsc_class *cl = (struct hfsc_class *)arg;
1253 qdisc_class_put(&cl->cl_common);
1260 struct hfsc_class *cl = (struct hfsc_class *)arg;
1262 if (cl == NULL)
1263 cl = &q->root;
1265 return cl->block;
1286 hfsc_dump_curves(struct sk_buff *skb, struct hfsc_class *cl)
1288 if ((cl->cl_flags & HFSC_RSC) &&
1289 (hfsc_dump_sc(skb, TCA_HFSC_RSC, &cl->cl_rsc) < 0))
1292 if ((cl->cl_flags & HFSC_FSC) &&
1293 (hfsc_dump_sc(skb, TCA_HFSC_FSC, &cl->cl_fsc) < 0))
1296 if ((cl->cl_flags & HFSC_USC) &&
1297 (hfsc_dump_sc(skb, TCA_HFSC_USC, &cl->cl_usc) < 0))
1310 struct hfsc_class *cl = (struct hfsc_class *)arg;
1313 tcm->tcm_parent = cl->cl_parent ? cl->cl_parent->cl_common.classid :
1315 tcm->tcm_handle = cl->cl_common.classid;
1316 if (cl->level == 0)
1317 tcm->tcm_info = cl->qdisc->handle;
1322 if (hfsc_dump_curves(skb, cl) < 0)
1335 struct hfsc_class *cl = (struct hfsc_class *)arg;
1339 qdisc_qstats_qlen_backlog(cl->qdisc, &qlen, &cl->qstats.backlog);
1340 xstats.level = cl->level;
1341 xstats.period = cl->cl_vtperiod;
1342 xstats.work = cl->cl_total;
1343 xstats.rtwork = cl->cl_cumul;
1345 if (gnet_stats_copy_basic(d, NULL, &cl->bstats, true) < 0 ||
1346 gnet_stats_copy_rate_est(d, &cl->rate_est) < 0 ||
1347 gnet_stats_copy_queue(d, NULL, &cl->qstats, qlen) < 0)
1359 struct hfsc_class *cl;
1366 hlist_for_each_entry(cl, &q->clhash.hash[i],
1368 if (!tc_qdisc_stats_dump(sch, (unsigned long)cl, arg))
1378 struct hfsc_class *cl;
1381 cl = eltree_get_minel(q);
1382 if (cl)
1383 next_time = cl->cl_e;
1454 hfsc_reset_class(struct hfsc_class *cl)
1456 cl->cl_total = 0;
1457 cl->cl_cumul = 0;
1458 cl->cl_d = 0;
1459 cl->cl_e = 0;
1460 cl->cl_vt = 0;
1461 cl->cl_vtadj = 0;
1462 cl->cl_cvtmin = 0;
1463 cl->cl_cvtoff = 0;
1464 cl->cl_vtperiod = 0;
1465 cl->cl_parentperiod = 0;
1466 cl->cl_f = 0;
1467 cl->cl_myf = 0;
1468 cl->cl_cfmin = 0;
1469 cl->cl_nactive = 0;
1471 cl->vt_tree = RB_ROOT;
1472 cl->cf_tree = RB_ROOT;
1473 qdisc_reset(cl->qdisc);
1475 if (cl->cl_flags & HFSC_RSC)
1476 rtsc_init(&cl->cl_deadline, &cl->cl_rsc, 0, 0);
1477 if (cl->cl_flags & HFSC_FSC)
1478 rtsc_init(&cl->cl_virtual, &cl->cl_fsc, 0, 0);
1479 if (cl->cl_flags & HFSC_USC)
1480 rtsc_init(&cl->cl_ulimit, &cl->cl_usc, 0, 0);
1487 struct hfsc_class *cl;
1491 hlist_for_each_entry(cl, &q->clhash.hash[i], cl_common.hnode)
1492 hfsc_reset_class(cl);
1503 struct hfsc_class *cl;
1507 hlist_for_each_entry(cl, &q->clhash.hash[i], cl_common.hnode) {
1508 tcf_block_put(cl->block);
1509 cl->block = NULL;
1513 hlist_for_each_entry_safe(cl, next, &q->clhash.hash[i],
1515 hfsc_destroy_class(sch, cl);
1542 struct hfsc_class *cl;
1546 cl = hfsc_classify(skb, sch, &err);
1547 if (cl == NULL) {
1554 first = !cl->qdisc->q.qlen;
1555 err = qdisc_enqueue(skb, cl->qdisc, to_free);
1558 cl->qstats.drops++;
1565 if (cl->cl_flags & HFSC_RSC)
1566 init_ed(cl, len);
1567 if (cl->cl_flags & HFSC_FSC)
1568 init_vf(cl, len);
1574 if (cl->cl_flags & HFSC_RSC)
1575 cl->qdisc->ops->peek(cl->qdisc);
1589 struct hfsc_class *cl;
1605 cl = eltree_get_mindl(q, cur_time);
1606 if (cl) {
1613 cl = vttree_get_minvt(&q->root, cur_time);
1614 if (cl == NULL) {
1621 skb = qdisc_dequeue_peeked(cl->qdisc);
1623 qdisc_warn_nonwc("HFSC", cl->qdisc);
1627 bstats_update(&cl->bstats, skb);
1628 update_vf(cl, qdisc_pkt_len(skb), cur_time);
1630 cl->cl_cumul += qdisc_pkt_len(skb);
1632 if (cl->cl_flags & HFSC_RSC) {
1633 if (cl->qdisc->q.qlen != 0) {
1635 next_len = qdisc_peek_len(cl->qdisc);
1637 update_ed(cl, next_len);
1639 update_d(cl, next_len);
1642 eltree_remove(cl);