Lines Matching defs:mrp
55 struct br_mrp *mrp;
57 hlist_for_each_entry_rcu(mrp, &br->mrp_list, list,
59 if (mrp->ring_id == ring_id) {
60 res = mrp;
71 struct br_mrp *mrp;
73 hlist_for_each_entry_rcu(mrp, &br->mrp_list, list,
75 if (mrp->in_id == in_id) {
76 res = mrp;
86 struct br_mrp *mrp;
88 hlist_for_each_entry_rcu(mrp, &br->mrp_list, list,
92 p = rtnl_dereference(mrp->p_port);
96 p = rtnl_dereference(mrp->s_port);
100 p = rtnl_dereference(mrp->i_port);
112 struct br_mrp *mrp;
114 hlist_for_each_entry_rcu(mrp, &br->mrp_list, list,
116 if (rcu_access_pointer(mrp->p_port) == p ||
117 rcu_access_pointer(mrp->s_port) == p ||
118 rcu_access_pointer(mrp->i_port) == p) {
119 res = mrp;
127 static int br_mrp_next_seq(struct br_mrp *mrp)
129 mrp->seq_id++;
130 return mrp->seq_id;
171 static void br_mrp_skb_common(struct sk_buff *skb, struct br_mrp *mrp)
178 hdr->seq_id = cpu_to_be16(br_mrp_next_seq(mrp));
182 static struct sk_buff *br_mrp_alloc_test_skb(struct br_mrp *mrp,
199 hdr->prio = cpu_to_be16(mrp->prio);
202 hdr->state = cpu_to_be16(mrp->ring_state);
203 hdr->transitions = cpu_to_be16(mrp->ring_transitions);
206 br_mrp_skb_common(skb, mrp);
212 if (mrp->ring_role == BR_MRP_RING_ROLE_MRA) {
239 static struct sk_buff *br_mrp_alloc_in_test_skb(struct br_mrp *mrp,
256 hdr->id = cpu_to_be16(mrp->in_id);
259 hdr->state = cpu_to_be16(mrp->in_state);
260 hdr->transitions = cpu_to_be16(mrp->in_transitions);
263 br_mrp_skb_common(skb, mrp);
282 struct br_mrp *mrp = container_of(del_work, struct br_mrp, test_work);
287 if (time_before_eq(mrp->test_end, jiffies))
290 if (mrp->test_count_miss < mrp->test_max_miss) {
291 mrp->test_count_miss++;
302 if (mrp->ring_state == BR_MRP_RING_STATE_CLOSED ||
303 mrp->test_monitor)
309 p = rcu_dereference(mrp->p_port);
311 if (!mrp->test_monitor) {
312 skb = br_mrp_alloc_test_skb(mrp, p,
321 if (notify_open && !mrp->ring_role_offloaded)
325 p = rcu_dereference(mrp->s_port);
327 if (!mrp->test_monitor) {
328 skb = br_mrp_alloc_test_skb(mrp, p,
337 if (notify_open && !mrp->ring_role_offloaded)
344 queue_delayed_work(system_wq, &mrp->test_work,
345 usecs_to_jiffies(mrp->test_interval));
355 struct br_mrp *mrp = container_of(del_work, struct br_mrp, in_test_work);
360 if (time_before_eq(mrp->in_test_end, jiffies))
363 if (mrp->in_test_count_miss < mrp->in_test_max_miss) {
364 mrp->in_test_count_miss++;
370 if (mrp->in_state == BR_MRP_IN_STATE_CLOSED)
376 p = rcu_dereference(mrp->p_port);
378 skb = br_mrp_alloc_in_test_skb(mrp, p,
386 if (notify_open && !mrp->in_role_offloaded)
390 p = rcu_dereference(mrp->s_port);
392 skb = br_mrp_alloc_in_test_skb(mrp, p,
400 if (notify_open && !mrp->in_role_offloaded)
404 p = rcu_dereference(mrp->i_port);
406 skb = br_mrp_alloc_in_test_skb(mrp, p,
414 if (notify_open && !mrp->in_role_offloaded)
421 queue_delayed_work(system_wq, &mrp->in_test_work,
422 usecs_to_jiffies(mrp->in_test_interval));
428 static void br_mrp_del_impl(struct net_bridge *br, struct br_mrp *mrp)
434 cancel_delayed_work_sync(&mrp->test_work);
435 br_mrp_switchdev_send_ring_test(br, mrp, 0, 0, 0, 0);
438 cancel_delayed_work_sync(&mrp->in_test_work);
439 br_mrp_switchdev_send_in_test(br, mrp, 0, 0, 0);
442 br_mrp_switchdev_set_ring_role(br, mrp, BR_MRP_RING_ROLE_DISABLED);
443 p = rtnl_dereference(mrp->i_port);
445 br_mrp_switchdev_set_in_role(br, mrp, mrp->in_id, mrp->ring_id,
448 br_mrp_switchdev_del(br, mrp);
451 p = rtnl_dereference(mrp->p_port);
460 rcu_assign_pointer(mrp->p_port, NULL);
463 p = rtnl_dereference(mrp->s_port);
472 rcu_assign_pointer(mrp->s_port, NULL);
475 p = rtnl_dereference(mrp->i_port);
484 rcu_assign_pointer(mrp->i_port, NULL);
487 hlist_del_rcu(&mrp->list);
488 kfree_rcu(mrp, rcu);
500 struct br_mrp *mrp;
506 mrp = br_mrp_find_id(br, instance->ring_id);
507 if (mrp)
519 mrp = kzalloc(sizeof(*mrp), GFP_KERNEL);
520 if (!mrp)
523 mrp->ring_id = instance->ring_id;
524 mrp->prio = instance->prio;
531 rcu_assign_pointer(mrp->p_port, p);
538 rcu_assign_pointer(mrp->s_port, p);
543 INIT_DELAYED_WORK(&mrp->test_work, br_mrp_test_work_expired);
544 INIT_DELAYED_WORK(&mrp->in_test_work, br_mrp_in_test_work_expired);
545 hlist_add_tail_rcu(&mrp->list, &br->mrp_list);
547 err = br_mrp_switchdev_add(br, mrp);
554 br_mrp_del_impl(br, mrp);
564 struct br_mrp *mrp = br_mrp_find_port(br, p);
567 if (!mrp)
570 br_mrp_del_impl(br, mrp);
578 struct br_mrp *mrp = br_mrp_find_id(br, instance->ring_id);
580 if (!mrp)
583 br_mrp_del_impl(br, mrp);
620 struct br_mrp *mrp;
625 mrp = br_mrp_find_port(p->br, p);
627 if (!mrp)
632 rcu_assign_pointer(mrp->p_port, p);
635 rcu_assign_pointer(mrp->s_port, p);
652 struct br_mrp *mrp = br_mrp_find_id(br, state->ring_id);
654 if (!mrp)
657 if (mrp->ring_state != state->ring_state)
658 mrp->ring_transitions++;
660 mrp->ring_state = state->ring_state;
662 br_mrp_switchdev_set_ring_state(br, mrp, state->ring_state);
674 struct br_mrp *mrp = br_mrp_find_id(br, role->ring_id);
677 if (!mrp)
680 mrp->ring_role = role->ring_role;
683 support = br_mrp_switchdev_set_ring_role(br, mrp, role->ring_role);
693 mrp->ring_role_offloaded = support == BR_MRP_SW ? 0 : 1;
705 struct br_mrp *mrp = br_mrp_find_id(br, test->ring_id);
708 if (!mrp)
714 support = br_mrp_switchdev_send_ring_test(br, mrp, test->interval,
723 mrp->test_interval = test->interval;
724 mrp->test_end = jiffies + usecs_to_jiffies(test->period);
725 mrp->test_max_miss = test->max_miss;
726 mrp->test_monitor = test->monitor;
727 mrp->test_count_miss = 0;
728 queue_delayed_work(system_wq, &mrp->test_work,
739 struct br_mrp *mrp = br_mrp_find_in_id(br, state->in_id);
741 if (!mrp)
744 if (mrp->in_state != state->in_state)
745 mrp->in_transitions++;
747 mrp->in_state = state->in_state;
749 br_mrp_switchdev_set_in_state(br, mrp, state->in_state);
760 struct br_mrp *mrp = br_mrp_find_id(br, role->ring_id);
764 if (!mrp)
774 p = rtnl_dereference(mrp->i_port);
779 cancel_delayed_work_sync(&mrp->in_test_work);
780 br_mrp_switchdev_send_in_test(br, mrp, 0, 0, 0);
790 rcu_assign_pointer(mrp->i_port, NULL);
792 mrp->in_role = role->in_role;
793 mrp->in_id = 0;
802 /* It is not allowed to set a different interconnect port if the mrp
806 if (rcu_access_pointer(mrp->i_port))
814 rcu_assign_pointer(mrp->i_port, p);
816 mrp->in_role = role->in_role;
817 mrp->in_id = role->in_id;
820 support = br_mrp_switchdev_set_in_role(br, mrp, role->in_id,
831 mrp->in_role_offloaded = support == BR_MRP_SW ? 0 : 1;
843 struct br_mrp *mrp = br_mrp_find_in_id(br, in_test->in_id);
846 if (!mrp)
849 if (mrp->in_role != BR_MRP_IN_ROLE_MIM)
855 support = br_mrp_switchdev_send_in_test(br, mrp, in_test->interval,
864 mrp->in_test_interval = in_test->interval;
865 mrp->in_test_end = jiffies + usecs_to_jiffies(in_test->period);
866 mrp->in_test_max_miss = in_test->max_miss;
867 mrp->in_test_count_miss = 0;
868 queue_delayed_work(system_wq, &mrp->in_test_work,
918 static void br_mrp_mrm_process(struct br_mrp *mrp, struct net_bridge_port *port,
934 mrp->test_count_miss = 0;
939 if (mrp->ring_state != BR_MRP_RING_STATE_CLOSED)
944 static bool br_mrp_test_better_than_own(struct br_mrp *mrp,
950 if (prio < mrp->prio ||
951 (prio == mrp->prio &&
962 static void br_mrp_mra_process(struct br_mrp *mrp, struct net_bridge *br,
990 if (br_mrp_test_better_than_own(mrp, br, test_hdr))
991 mrp->test_count_miss = 0;
998 static bool br_mrp_mim_process(struct br_mrp *mrp, struct net_bridge_port *port,
1020 if (mrp->in_id != ntohs(in_hdr->id))
1023 mrp->in_test_count_miss = 0;
1028 if (mrp->in_state != BR_MRP_IN_STATE_CLOSED)
1052 static bool br_mrp_mrm_behaviour(struct br_mrp *mrp)
1054 if (mrp->ring_role == BR_MRP_RING_ROLE_MRM ||
1055 (mrp->ring_role == BR_MRP_RING_ROLE_MRA && !mrp->test_monitor))
1061 static bool br_mrp_mrc_behaviour(struct br_mrp *mrp)
1063 if (mrp->ring_role == BR_MRP_RING_ROLE_MRC ||
1064 (mrp->ring_role == BR_MRP_RING_ROLE_MRA && mrp->test_monitor))
1070 /* This will just forward the frame to the other mrp ring ports, depending on
1080 struct br_mrp *mrp;
1087 mrp = br_mrp_find_port(br, p);
1088 if (unlikely(!mrp))
1091 p_port = rcu_dereference(mrp->p_port);
1096 s_port = rcu_dereference(mrp->s_port);
1106 if (mrp->ring_role == BR_MRP_RING_ROLE_MRM) {
1107 br_mrp_mrm_process(mrp, p, skb);
1114 if (mrp->ring_role == BR_MRP_RING_ROLE_MRA) {
1115 if (!mrp->test_monitor) {
1116 br_mrp_mrm_process(mrp, p, skb);
1120 br_mrp_mra_process(mrp, br, p, skb);
1129 i_port = rcu_dereference(mrp->i_port);
1144 if (br_mrp_mrm_behaviour(mrp) &&
1156 if (br_mrp_mrc_behaviour(mrp) &&
1157 mrp->in_role == BR_MRP_IN_ROLE_DISABLED)
1160 if (mrp->in_role == BR_MRP_IN_ROLE_MIM) {
1165 if (br_mrp_mim_process(mrp, p, skb)) {
1190 if (mrp->in_role == BR_MRP_IN_ROLE_MIC) {