Lines Matching defs:rxr
726 struct bnxt_rx_ring_info *rxr,
732 page = page_pool_dev_alloc_pages(rxr->page_pool);
739 page_pool_recycle_direct(rxr->page_pool, page);
767 int bnxt_alloc_rx_data(struct bnxt *bp, struct bnxt_rx_ring_info *rxr,
770 struct rx_bd *rxbd = &rxr->rx_desc_ring[RX_RING(prod)][RX_IDX(prod)];
771 struct bnxt_sw_rx_bd *rx_buf = &rxr->rx_buf_ring[prod];
776 __bnxt_alloc_rx_page(bp, &mapping, rxr, gfp);
798 void bnxt_reuse_rx_data(struct bnxt_rx_ring_info *rxr, u16 cons, void *data)
800 u16 prod = rxr->rx_prod;
804 prod_rx_buf = &rxr->rx_buf_ring[prod];
805 cons_rx_buf = &rxr->rx_buf_ring[cons];
812 prod_bd = &rxr->rx_desc_ring[RX_RING(prod)][RX_IDX(prod)];
813 cons_bd = &rxr->rx_desc_ring[RX_RING(cons)][RX_IDX(cons)];
818 static inline u16 bnxt_find_next_agg_idx(struct bnxt_rx_ring_info *rxr, u16 idx)
820 u16 next, max = rxr->rx_agg_bmap_size;
822 next = find_next_zero_bit(rxr->rx_agg_bmap, max, idx);
824 next = find_first_zero_bit(rxr->rx_agg_bmap, max);
829 struct bnxt_rx_ring_info *rxr,
833 &rxr->rx_agg_desc_ring[RX_RING(prod)][RX_IDX(prod)];
838 u16 sw_prod = rxr->rx_sw_agg_prod;
842 page = rxr->rx_page;
847 rxr->rx_page = page;
848 rxr->rx_page_offset = 0;
850 offset = rxr->rx_page_offset;
851 rxr->rx_page_offset += BNXT_RX_PAGE_SIZE;
852 if (rxr->rx_page_offset == PAGE_SIZE)
853 rxr->rx_page = NULL;
870 if (unlikely(test_bit(sw_prod, rxr->rx_agg_bmap)))
871 sw_prod = bnxt_find_next_agg_idx(rxr, sw_prod);
873 __set_bit(sw_prod, rxr->rx_agg_bmap);
874 rx_agg_buf = &rxr->rx_agg_ring[sw_prod];
875 rxr->rx_sw_agg_prod = NEXT_RX_AGG(sw_prod);
898 struct bnxt_rx_ring_info *rxr,
901 struct bnxt_tpa_info *tpa_info = &rxr->rx_tpa[agg_id];
911 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
912 u16 prod = rxr->rx_agg_prod;
913 u16 sw_prod = rxr->rx_sw_agg_prod;
928 agg = bnxt_get_tpa_agg_p5(bp, rxr, idx, start + i);
932 __clear_bit(cons, rxr->rx_agg_bmap);
934 if (unlikely(test_bit(sw_prod, rxr->rx_agg_bmap)))
935 sw_prod = bnxt_find_next_agg_idx(rxr, sw_prod);
937 __set_bit(sw_prod, rxr->rx_agg_bmap);
938 prod_rx_buf = &rxr->rx_agg_ring[sw_prod];
939 cons_rx_buf = &rxr->rx_agg_ring[cons];
951 prod_bd = &rxr->rx_agg_desc_ring[RX_RING(prod)][RX_IDX(prod)];
959 rxr->rx_agg_prod = prod;
960 rxr->rx_sw_agg_prod = sw_prod;
964 struct bnxt_rx_ring_info *rxr,
973 u16 prod = rxr->rx_prod;
977 err = bnxt_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC);
979 bnxt_reuse_rx_data(rxr, cons, data);
985 page_pool_release_page(rxr->page_pool, page);
990 skb = napi_alloc_skb(&rxr->bnapi->napi, payload);
1011 struct bnxt_rx_ring_info *rxr, u16 cons,
1016 u16 prod = rxr->rx_prod;
1020 err = bnxt_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC);
1022 bnxt_reuse_rx_data(rxr, cons, data);
1046 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
1047 u16 prod = rxr->rx_agg_prod;
1062 agg = bnxt_get_tpa_agg_p5(bp, rxr, idx, i);
1069 cons_rx_buf = &rxr->rx_agg_ring[cons];
1072 __clear_bit(cons, rxr->rx_agg_bmap);
1082 if (bnxt_alloc_rx_page(bp, rxr, prod, GFP_ATOMIC) != 0) {
1097 rxr->rx_agg_prod = prod;
1112 rxr->rx_agg_prod = prod;
1203 static void bnxt_sched_reset(struct bnxt *bp, struct bnxt_rx_ring_info *rxr)
1205 if (!rxr->bnapi->in_reset) {
1206 rxr->bnapi->in_reset = true;
1213 rxr->rx_next_cons = 0xffff;
1216 static u16 bnxt_alloc_agg_idx(struct bnxt_rx_ring_info *rxr, u16 agg_id)
1218 struct bnxt_tpa_idx_map *map = rxr->rx_tpa_idx_map;
1229 static void bnxt_free_agg_idx(struct bnxt_rx_ring_info *rxr, u16 idx)
1231 struct bnxt_tpa_idx_map *map = rxr->rx_tpa_idx_map;
1236 static u16 bnxt_lookup_agg_idx(struct bnxt_rx_ring_info *rxr, u16 agg_id)
1238 struct bnxt_tpa_idx_map *map = rxr->rx_tpa_idx_map;
1243 static void bnxt_tpa_start(struct bnxt *bp, struct bnxt_rx_ring_info *rxr,
1255 agg_id = bnxt_alloc_agg_idx(rxr, agg_id);
1260 prod = rxr->rx_prod;
1261 cons_rx_buf = &rxr->rx_buf_ring[cons];
1262 prod_rx_buf = &rxr->rx_buf_ring[prod];
1263 tpa_info = &rxr->rx_tpa[agg_id];
1265 if (unlikely(cons != rxr->rx_next_cons ||
1268 cons, rxr->rx_next_cons,
1270 bnxt_sched_reset(bp, rxr);
1283 prod_bd = &rxr->rx_desc_ring[RX_RING(prod)][RX_IDX(prod)];
1315 rxr->rx_prod = NEXT_RX(prod);
1317 rxr->rx_next_cons = NEXT_RX(cons);
1318 cons_rx_buf = &rxr->rx_buf_ring[cons];
1320 bnxt_reuse_rx_data(rxr, cons, cons_rx_buf->data);
1321 rxr->rx_prod = NEXT_RX(rxr->rx_prod);
1546 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
1566 agg_id = bnxt_lookup_agg_idx(rxr, agg_id);
1568 tpa_info = &rxr->rx_tpa[agg_id];
1576 bnxt_free_agg_idx(rxr, agg_id);
1582 tpa_info = &rxr->rx_tpa[agg_id];
1682 static void bnxt_tpa_agg(struct bnxt *bp, struct bnxt_rx_ring_info *rxr,
1688 agg_id = bnxt_lookup_agg_idx(rxr, agg_id);
1689 tpa_info = &rxr->rx_tpa[agg_id];
1717 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
1738 bnxt_tpa_agg(bp, rxr, (struct rx_agg_cmp *)rxcmp);
1754 prod = rxr->rx_prod;
1757 bnxt_tpa_start(bp, rxr, (struct rx_tpa_start_cmp *)rxcmp,
1781 if (unlikely(cons != rxr->rx_next_cons)) {
1785 if (rxr->rx_next_cons != 0xffff)
1787 cons, rxr->rx_next_cons);
1788 bnxt_sched_reset(bp, rxr);
1793 rx_buf = &rxr->rx_buf_ring[cons];
1814 bnxt_reuse_rx_data(rxr, cons, data);
1826 bnxt_sched_reset(bp, rxr);
1835 if (bnxt_rx_xdp(bp, rxr, cons, data, &data_ptr, &len, event)) {
1842 bnxt_reuse_rx_data(rxr, cons, data);
1857 skb = bp->rx_skb_func(bp, rxr, cons, data, data_ptr, dma_addr,
1923 rxr->rx_prod = NEXT_RX(prod);
1924 rxr->rx_next_cons = NEXT_RX(cons);
2150 struct bnxt_rx_ring_info *rxr;
2167 rxr = bp->bnapi[grp_idx]->rx_ring;
2168 bnxt_sched_reset(bp, rxr);
2370 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
2373 bnxt_db_write(bp, &rxr->rx_agg_db, rxr->rx_agg_prod);
2374 bnxt_db_write(bp, &rxr->rx_db, rxr->rx_prod);
2402 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
2459 bnxt_db_write(bp, &rxr->rx_db, rxr->rx_prod);
2462 bnxt_db_write(bp, &rxr->rx_agg_db, rxr->rx_agg_prod);
2677 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[ring_nr];
2684 if (!rxr->rx_tpa)
2688 struct bnxt_tpa_info *tpa_info = &rxr->rx_tpa[i];
2704 if (!rxr->rx_buf_ring)
2708 struct bnxt_sw_rx_bd *rx_buf = &rxr->rx_buf_ring[i];
2721 page_pool_recycle_direct(rxr->page_pool, data);
2731 if (!rxr->rx_agg_ring)
2735 struct bnxt_sw_rx_agg_bd *rx_agg_buf = &rxr->rx_agg_ring[i];
2746 __clear_bit(i, rxr->rx_agg_bmap);
2752 if (rxr->rx_page) {
2753 __free_page(rxr->rx_page);
2754 rxr->rx_page = NULL;
2756 map = rxr->rx_tpa_idx_map;
2865 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
2867 kfree(rxr->rx_tpa_idx_map);
2868 rxr->rx_tpa_idx_map = NULL;
2869 if (rxr->rx_tpa) {
2871 kfree(rxr->rx_tpa[j].agg_arr);
2872 rxr->rx_tpa[j].agg_arr = NULL;
2875 kfree(rxr->rx_tpa);
2876 rxr->rx_tpa = NULL;
2892 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
2895 rxr->rx_tpa = kcalloc(bp->max_tpa, sizeof(struct bnxt_tpa_info),
2897 if (!rxr->rx_tpa)
2906 rxr->rx_tpa[j].agg_arr = agg;
2908 rxr->rx_tpa_idx_map = kzalloc(sizeof(*rxr->rx_tpa_idx_map),
2910 if (!rxr->rx_tpa_idx_map)
2925 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
2928 if (rxr->xdp_prog)
2929 bpf_prog_put(rxr->xdp_prog);
2931 if (xdp_rxq_info_is_reg(&rxr->xdp_rxq))
2932 xdp_rxq_info_unreg(&rxr->xdp_rxq);
2934 page_pool_destroy(rxr->page_pool);
2935 rxr->page_pool = NULL;
2937 kfree(rxr->rx_agg_bmap);
2938 rxr->rx_agg_bmap = NULL;
2940 ring = &rxr->rx_ring_struct;
2943 ring = &rxr->rx_agg_ring_struct;
2949 struct bnxt_rx_ring_info *rxr)
2958 rxr->page_pool = page_pool_create(&pp);
2959 if (IS_ERR(rxr->page_pool)) {
2960 int err = PTR_ERR(rxr->page_pool);
2962 rxr->page_pool = NULL;
2979 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
2982 ring = &rxr->rx_ring_struct;
2984 rc = bnxt_alloc_rx_page_pool(bp, rxr);
2988 rc = xdp_rxq_info_reg(&rxr->xdp_rxq, bp->dev, i);
2992 rc = xdp_rxq_info_reg_mem_model(&rxr->xdp_rxq,
2994 rxr->page_pool);
2996 xdp_rxq_info_unreg(&rxr->xdp_rxq);
3008 ring = &rxr->rx_agg_ring_struct;
3014 rxr->rx_agg_bmap_size = bp->rx_agg_ring_mask + 1;
3015 mem_size = rxr->rx_agg_bmap_size / 8;
3016 rxr->rx_agg_bmap = kzalloc(mem_size, GFP_KERNEL);
3017 if (!rxr->rx_agg_bmap)
3233 struct bnxt_rx_ring_info *rxr;
3249 rxr = bnapi->rx_ring;
3250 if (!rxr)
3253 ring = &rxr->rx_ring_struct;
3257 rmem->pg_arr = (void **)rxr->rx_desc_ring;
3258 rmem->dma_arr = rxr->rx_desc_mapping;
3260 rmem->vmem = (void **)&rxr->rx_buf_ring;
3262 ring = &rxr->rx_agg_ring_struct;
3266 rmem->pg_arr = (void **)rxr->rx_agg_desc_ring;
3267 rmem->dma_arr = rxr->rx_agg_desc_mapping;
3269 rmem->vmem = (void **)&rxr->rx_agg_ring;
3311 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[ring_nr];
3316 prod = rxr->rx_prod;
3318 if (bnxt_alloc_rx_data(bp, rxr, prod, GFP_KERNEL)) {
3325 rxr->rx_prod = prod;
3330 prod = rxr->rx_agg_prod;
3332 if (bnxt_alloc_rx_page(bp, rxr, prod, GFP_KERNEL)) {
3339 rxr->rx_agg_prod = prod;
3341 if (rxr->rx_tpa) {
3350 rxr->rx_tpa[i].data = data;
3351 rxr->rx_tpa[i].data_ptr = data + bp->rx_offset;
3352 rxr->rx_tpa[i].mapping = mapping;
3360 struct bnxt_rx_ring_info *rxr;
3370 rxr = &bp->rx_ring[ring_nr];
3371 ring = &rxr->rx_ring_struct;
3376 rxr->xdp_prog = bp->xdp_prog;
3380 ring = &rxr->rx_agg_ring_struct;
4117 struct bnxt_rx_ring_info *rxr;
4132 rxr = bnapi->rx_ring;
4133 if (rxr) {
4134 rxr->rx_prod = 0;
4135 rxr->rx_agg_prod = 0;
4136 rxr->rx_sw_agg_prod = 0;
4137 rxr->rx_next_cons = 0;
4259 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
4262 rxr->rx_ring_struct.ring_mem.flags =
4264 rxr->rx_agg_ring_struct.ring_mem.flags =
4267 rxr->bnapi = bp->bnapi[i];
5076 static u16 bnxt_cp_ring_for_rx(struct bnxt *bp, struct bnxt_rx_ring_info *rxr)
5079 struct bnxt_napi *bnapi = rxr->bnapi;
5085 return bnxt_cp_ring_from_grp(bp, &rxr->rx_ring_struct);
5180 struct bnxt_rx_ring_info *rxr;
5189 rxr = &bp->rx_ring[j];
5191 ring_id = rxr->rx_ring_struct.fw_ring_id;
5193 ring_id = bnxt_cp_ring_for_rx(bp, rxr);
5344 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[0];
5347 cpu_to_le16(rxr->rx_ring_struct.fw_ring_id);
5349 cpu_to_le16(bnxt_cp_ring_for_rx(bp, rxr));
5816 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
5817 struct bnxt_ring_struct *ring = &rxr->rx_ring_struct;
5818 struct bnxt_napi *bnapi = rxr->bnapi;
5824 bnxt_set_db(bp, &rxr->rx_db, type, map_idx, ring->fw_ring_id);
5827 bnxt_db_write(bp, &rxr->rx_db, rxr->rx_prod);
5849 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
5851 &rxr->rx_agg_ring_struct;
5859 bnxt_set_db(bp, &rxr->rx_agg_db, type, map_idx,
5861 bnxt_db_write(bp, &rxr->rx_agg_db, rxr->rx_agg_prod);
5862 bnxt_db_write(bp, &rxr->rx_db, rxr->rx_prod);
5923 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
5924 struct bnxt_ring_struct *ring = &rxr->rx_ring_struct;
5925 u32 grp_idx = rxr->bnapi->index;
5928 u32 cmpl_ring_id = bnxt_cp_ring_for_rx(bp, rxr);
5945 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
5946 struct bnxt_ring_struct *ring = &rxr->rx_agg_ring_struct;
5947 u32 grp_idx = rxr->bnapi->index;
5950 u32 cmpl_ring_id = bnxt_cp_ring_for_rx(bp, rxr);
10677 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
10680 if (!rxr)
10684 i, rxr->rx_ring_struct.fw_ring_id, rxr->rx_prod,
10685 rxr->rx_agg_ring_struct.fw_ring_id, rxr->rx_agg_prod,
10686 rxr->rx_sw_agg_prod);
10715 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[ring_nr];
10717 struct bnxt_napi *bnapi = rxr->bnapi;
10881 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
10885 if (!rxr->bnapi->in_reset)
10899 rxr->rx_prod = 0;
10900 rxr->rx_agg_prod = 0;
10901 rxr->rx_sw_agg_prod = 0;
10902 rxr->rx_next_cons = 0;
10903 rxr->bnapi->in_reset = false;
10905 cpr = &rxr->bnapi->cp_ring;
10908 bnxt_db_write(bp, &rxr->rx_agg_db, rxr->rx_agg_prod);
10909 bnxt_db_write(bp, &rxr->rx_db, rxr->rx_prod);