/kernel/linux/linux-5.10/net/core/ |
H A D | gen_estimator.c | 46 struct gnet_stats_basic_cpu __percpu *cpu_bstats; member 69 __gnet_stats_copy_basic(e->running, b, e->cpu_bstats, e->bstats); in est_fetch_counters() 109 * @cpu_bstats: bstats per cpu 125 struct gnet_stats_basic_cpu __percpu *cpu_bstats, in gen_new_estimator() 160 est->cpu_bstats = cpu_bstats; in gen_new_estimator() 214 * @cpu_bstats: bstats per cpu 226 struct gnet_stats_basic_cpu __percpu *cpu_bstats, in gen_replace_estimator() 231 return gen_new_estimator(bstats, cpu_bstats, rate_est, in gen_replace_estimator() 124 gen_new_estimator(struct gnet_stats_basic_packed *bstats, struct gnet_stats_basic_cpu __percpu *cpu_bstats, struct net_rate_estimator __rcu **rate_est, spinlock_t *lock, seqcount_t *running, struct nlattr *opt) gen_new_estimator() argument 225 gen_replace_estimator(struct gnet_stats_basic_packed *bstats, struct gnet_stats_basic_cpu __percpu *cpu_bstats, struct net_rate_estimator __rcu **rate_est, spinlock_t *lock, seqcount_t *running, struct nlattr *opt) gen_replace_estimator() argument
|
/kernel/linux/linux-6.6/net/core/ |
H A D | gen_estimator.c | 46 struct gnet_stats_basic_sync __percpu *cpu_bstats; member 69 gnet_stats_add_basic(b, e->cpu_bstats, e->bstats, e->running); in est_fetch_counters() 113 * @cpu_bstats: bstats per cpu 131 struct gnet_stats_basic_sync __percpu *cpu_bstats, in gen_new_estimator() 166 est->cpu_bstats = cpu_bstats; in gen_new_estimator() 220 * @cpu_bstats: bstats per cpu 225 * if @cpu_bstats is NULL 234 struct gnet_stats_basic_sync __percpu *cpu_bstats, in gen_replace_estimator() 239 return gen_new_estimator(bstats, cpu_bstats, rate_es in gen_replace_estimator() 130 gen_new_estimator(struct gnet_stats_basic_sync *bstats, struct gnet_stats_basic_sync __percpu *cpu_bstats, struct net_rate_estimator __rcu **rate_est, spinlock_t *lock, bool running, struct nlattr *opt) gen_new_estimator() argument 233 gen_replace_estimator(struct gnet_stats_basic_sync *bstats, struct gnet_stats_basic_sync __percpu *cpu_bstats, struct net_rate_estimator __rcu **rate_est, spinlock_t *lock, bool running, struct nlattr *opt) gen_replace_estimator() argument [all...] |
/kernel/linux/linux-6.6/include/net/ |
H A D | gen_stats.h | 71 struct gnet_stats_basic_sync __percpu *cpu_bstats, 77 struct gnet_stats_basic_sync __percpu *cpu_bstats,
|
H A D | act_api.h | 39 struct gnet_stats_basic_sync __percpu *cpu_bstats; member 225 if (likely(a->cpu_bstats)) { in tcf_action_update_bstats() 226 bstats_update(this_cpu_ptr(a->cpu_bstats), skb); in tcf_action_update_bstats()
|
H A D | sch_generic.h | 107 struct gnet_stats_basic_sync __percpu *cpu_bstats; member 871 bstats_update(this_cpu_ptr(sch->cpu_bstats), skb); in qdisc_bstats_cpu_update() 1286 struct gnet_stats_basic_sync __percpu *cpu_bstats; member 1294 bstats_update(this_cpu_ptr(miniq->cpu_bstats), skb); in mini_qdisc_bstats_cpu_update()
|
/kernel/linux/linux-5.10/include/net/ |
H A D | gen_stats.h | 70 struct gnet_stats_basic_cpu __percpu *cpu_bstats, 76 struct gnet_stats_basic_cpu __percpu *cpu_bstats,
|
H A D | act_api.h | 38 struct gnet_stats_basic_cpu __percpu *cpu_bstats; member 202 if (likely(a->cpu_bstats)) { in tcf_action_update_bstats() 203 bstats_cpu_update(this_cpu_ptr(a->cpu_bstats), skb); in tcf_action_update_bstats()
|
H A D | sch_generic.h | 93 struct gnet_stats_basic_cpu __percpu *cpu_bstats; member 871 bstats_cpu_update(this_cpu_ptr(sch->cpu_bstats), skb); in qdisc_bstats_cpu_update() 1294 struct gnet_stats_basic_cpu __percpu *cpu_bstats; member 1302 bstats_cpu_update(this_cpu_ptr(miniq->cpu_bstats), skb); in mini_qdisc_bstats_cpu_update()
|
/kernel/linux/linux-5.10/net/sched/ |
H A D | sch_generic.c | 871 sch->cpu_bstats = in qdisc_alloc() 873 if (!sch->cpu_bstats) in qdisc_alloc() 878 free_percpu(sch->cpu_bstats); in qdisc_alloc() 971 free_percpu(qdisc->cpu_bstats); in qdisc_free() 1476 miniqp->miniq1.cpu_bstats = qdisc->cpu_bstats; in mini_qdisc_pair_init() 1478 miniqp->miniq2.cpu_bstats = qdisc->cpu_bstats; in mini_qdisc_pair_init()
|
H A D | sch_mq.c | 174 qdisc->cpu_bstats, in mq_dump() 272 if (gnet_stats_copy_basic(&sch->running, d, sch->cpu_bstats, in mq_dump_class_stats()
|
H A D | sch_mqprio.c | 473 qdisc->cpu_bstats, in mqprio_dump() 603 qdisc->cpu_bstats, 632 sch->cpu_bstats, &sch->bstats) < 0 ||
|
H A D | act_api.c | 105 free_percpu(p->cpu_bstats); in free_tcf() 441 p->cpu_bstats = netdev_alloc_pcpu_stats(struct gnet_stats_basic_cpu); in tcf_idr_create() 442 if (!p->cpu_bstats) in tcf_idr_create() 458 err = gen_new_estimator(&p->tcfa_bstats, p->cpu_bstats, in tcf_idr_create() 475 free_percpu(p->cpu_bstats); in tcf_idr_create() 1123 if (a->cpu_bstats) { in tcf_action_update_stats() 1124 _bstats_cpu_update(this_cpu_ptr(a->cpu_bstats), bytes, packets); in tcf_action_update_stats() 1169 if (gnet_stats_copy_basic(NULL, &d, p->cpu_bstats, &p->tcfa_bstats) < 0 || in tcf_action_copy_stats()
|
H A D | sch_api.c | 887 struct gnet_stats_basic_cpu __percpu *cpu_bstats = NULL; in tc_fill_qdisc() local 941 cpu_bstats = q->cpu_bstats; in tc_fill_qdisc() 946 &d, cpu_bstats, &q->bstats) < 0 || in tc_fill_qdisc() 1021 free_percpu(sch->cpu_bstats); in qdisc_clear_nolock() 1023 sch->cpu_bstats = NULL; in qdisc_clear_nolock() 1294 sch->cpu_bstats, in qdisc_create() 1370 sch->cpu_bstats, in qdisc_change()
|
H A D | act_police.c | 122 police->common.cpu_bstats, in tcf_police_init() 239 bstats_cpu_update(this_cpu_ptr(police->common.cpu_bstats), skb); in tcf_police_act()
|
H A D | act_sample.c | 174 bstats_cpu_update(this_cpu_ptr(s->common.cpu_bstats), skb); in tcf_sample_act()
|
H A D | act_skbmod.c | 35 bstats_cpu_update(this_cpu_ptr(d->common.cpu_bstats), skb); in tcf_skbmod_act()
|
H A D | act_ife.c | 720 bstats_cpu_update(this_cpu_ptr(ife->common.cpu_bstats), skb); in tcf_ife_decode() 808 bstats_cpu_update(this_cpu_ptr(ife->common.cpu_bstats), skb); in tcf_ife_encode()
|
/kernel/linux/linux-6.6/net/sched/ |
H A D | sch_generic.c | 948 sch->cpu_bstats = in qdisc_alloc() 950 if (!sch->cpu_bstats) in qdisc_alloc() 955 free_percpu(sch->cpu_bstats); in qdisc_alloc() 1035 free_percpu(qdisc->cpu_bstats); in qdisc_free() 1596 miniqp->miniq1.cpu_bstats = qdisc->cpu_bstats; in mini_qdisc_pair_init() 1598 miniqp->miniq2.cpu_bstats = qdisc->cpu_bstats; in mini_qdisc_pair_init()
|
H A D | sch_mq.c | 147 gnet_stats_add_basic(&sch->bstats, qdisc->cpu_bstats, in mq_dump() 234 if (gnet_stats_copy_basic(d, sch->cpu_bstats, &sch->bstats, true) < 0 || in mq_dump_class_stats()
|
H A D | act_api.c | 124 free_percpu(p->cpu_bstats); in free_tcf() 748 p->cpu_bstats = netdev_alloc_pcpu_stats(struct gnet_stats_basic_sync); in tcf_idr_create() 749 if (!p->cpu_bstats) in tcf_idr_create() 767 err = gen_new_estimator(&p->tcfa_bstats, p->cpu_bstats, in tcf_idr_create() 784 free_percpu(p->cpu_bstats); in tcf_idr_create() 1510 if (a->cpu_bstats) { in tcf_action_update_stats() 1511 _bstats_update(this_cpu_ptr(a->cpu_bstats), bytes, packets); in tcf_action_update_stats() 1556 if (gnet_stats_copy_basic(&d, p->cpu_bstats, in tcf_action_copy_stats()
|
H A D | sch_api.c | 916 struct gnet_stats_basic_sync __percpu *cpu_bstats = NULL; in tc_fill_qdisc() local 970 cpu_bstats = q->cpu_bstats; in tc_fill_qdisc() 974 if (gnet_stats_copy_basic(&d, cpu_bstats, &q->bstats, true) < 0 || in tc_fill_qdisc() 1056 free_percpu(sch->cpu_bstats); in qdisc_clear_nolock() 1058 sch->cpu_bstats = NULL; in qdisc_clear_nolock() 1339 sch->cpu_bstats, in qdisc_create() 1408 sch->cpu_bstats, in qdisc_change()
|
H A D | sch_mqprio.c | 577 gnet_stats_add_basic(&sch->bstats, qdisc->cpu_bstats, in mqprio_dump() 691 gnet_stats_add_basic(&bstats, qdisc->cpu_bstats, 711 if (gnet_stats_copy_basic(d, sch->cpu_bstats,
|
H A D | act_police.c | 116 police->common.cpu_bstats, in tcf_police_init() 257 bstats_update(this_cpu_ptr(police->common.cpu_bstats), skb); in tcf_police_act()
|
H A D | act_skbmod.c | 35 bstats_update(this_cpu_ptr(d->common.cpu_bstats), skb); in tcf_skbmod_act()
|
H A D | act_ife.c | 721 bstats_update(this_cpu_ptr(ife->common.cpu_bstats), skb); in tcf_ife_decode() 809 bstats_update(this_cpu_ptr(ife->common.cpu_bstats), skb); in tcf_ife_encode()
|