Lines Matching defs:rxr

321 static void bnxt_sched_reset_rxr(struct bnxt *bp, struct bnxt_rx_ring_info *rxr)
323 if (!rxr->bnapi->in_reset) {
324 rxr->bnapi->in_reset = true;
331 rxr->rx_next_cons = 0xffff;
760 struct bnxt_rx_ring_info *rxr,
767 page = page_pool_dev_alloc_frag(rxr->page_pool, offset,
770 page = page_pool_dev_alloc_pages(rxr->page_pool);
804 int bnxt_alloc_rx_data(struct bnxt *bp, struct bnxt_rx_ring_info *rxr,
807 struct rx_bd *rxbd = &rxr->rx_desc_ring[RX_RING(prod)][RX_IDX(prod)];
808 struct bnxt_sw_rx_bd *rx_buf = &rxr->rx_buf_ring[prod];
814 __bnxt_alloc_rx_page(bp, &mapping, rxr, &offset, gfp);
837 void bnxt_reuse_rx_data(struct bnxt_rx_ring_info *rxr, u16 cons, void *data)
839 u16 prod = rxr->rx_prod;
843 prod_rx_buf = &rxr->rx_buf_ring[prod];
844 cons_rx_buf = &rxr->rx_buf_ring[cons];
851 prod_bd = &rxr->rx_desc_ring[RX_RING(prod)][RX_IDX(prod)];
852 cons_bd = &rxr->rx_desc_ring[RX_RING(cons)][RX_IDX(cons)];
857 static inline u16 bnxt_find_next_agg_idx(struct bnxt_rx_ring_info *rxr, u16 idx)
859 u16 next, max = rxr->rx_agg_bmap_size;
861 next = find_next_zero_bit(rxr->rx_agg_bmap, max, idx);
863 next = find_first_zero_bit(rxr->rx_agg_bmap, max);
868 struct bnxt_rx_ring_info *rxr,
872 &rxr->rx_agg_desc_ring[RX_RING(prod)][RX_IDX(prod)];
876 u16 sw_prod = rxr->rx_sw_agg_prod;
879 page = __bnxt_alloc_rx_page(bp, &mapping, rxr, &offset, gfp);
884 if (unlikely(test_bit(sw_prod, rxr->rx_agg_bmap)))
885 sw_prod = bnxt_find_next_agg_idx(rxr, sw_prod);
887 __set_bit(sw_prod, rxr->rx_agg_bmap);
888 rx_agg_buf = &rxr->rx_agg_ring[sw_prod];
889 rxr->rx_sw_agg_prod = NEXT_RX_AGG(sw_prod);
912 struct bnxt_rx_ring_info *rxr,
915 struct bnxt_tpa_info *tpa_info = &rxr->rx_tpa[agg_id];
925 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
926 u16 prod = rxr->rx_agg_prod;
927 u16 sw_prod = rxr->rx_sw_agg_prod;
942 agg = bnxt_get_tpa_agg_p5(bp, rxr, idx, start + i);
946 __clear_bit(cons, rxr->rx_agg_bmap);
948 if (unlikely(test_bit(sw_prod, rxr->rx_agg_bmap)))
949 sw_prod = bnxt_find_next_agg_idx(rxr, sw_prod);
951 __set_bit(sw_prod, rxr->rx_agg_bmap);
952 prod_rx_buf = &rxr->rx_agg_ring[sw_prod];
953 cons_rx_buf = &rxr->rx_agg_ring[cons];
965 prod_bd = &rxr->rx_agg_desc_ring[RX_RING(prod)][RX_IDX(prod)];
973 rxr->rx_agg_prod = prod;
974 rxr->rx_sw_agg_prod = sw_prod;
978 struct bnxt_rx_ring_info *rxr,
985 u16 prod = rxr->rx_prod;
989 err = bnxt_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC);
991 bnxt_reuse_rx_data(rxr, cons, data);
999 page_pool_recycle_direct(rxr->page_pool, page);
1010 struct bnxt_rx_ring_info *rxr,
1019 u16 prod = rxr->rx_prod;
1023 err = bnxt_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC);
1025 bnxt_reuse_rx_data(rxr, cons, data);
1035 skb = napi_alloc_skb(&rxr->bnapi->napi, payload);
1037 page_pool_recycle_direct(rxr->page_pool, page);
1057 struct bnxt_rx_ring_info *rxr, u16 cons,
1062 u16 prod = rxr->rx_prod;
1066 err = bnxt_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC);
1068 bnxt_reuse_rx_data(rxr, cons, data);
1093 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
1094 u16 prod = rxr->rx_agg_prod;
1110 agg = bnxt_get_tpa_agg_p5(bp, rxr, idx, i);
1117 cons_rx_buf = &rxr->rx_agg_ring[cons];
1121 __clear_bit(cons, rxr->rx_agg_bmap);
1134 if (bnxt_alloc_rx_page(bp, rxr, prod, GFP_ATOMIC) != 0) {
1141 rxr->rx_agg_prod = prod;
1152 rxr->rx_agg_prod = prod;
1267 static u16 bnxt_alloc_agg_idx(struct bnxt_rx_ring_info *rxr, u16 agg_id)
1269 struct bnxt_tpa_idx_map *map = rxr->rx_tpa_idx_map;
1280 static void bnxt_free_agg_idx(struct bnxt_rx_ring_info *rxr, u16 idx)
1282 struct bnxt_tpa_idx_map *map = rxr->rx_tpa_idx_map;
1287 static u16 bnxt_lookup_agg_idx(struct bnxt_rx_ring_info *rxr, u16 agg_id)
1289 struct bnxt_tpa_idx_map *map = rxr->rx_tpa_idx_map;
1294 static void bnxt_tpa_start(struct bnxt *bp, struct bnxt_rx_ring_info *rxr,
1306 agg_id = bnxt_alloc_agg_idx(rxr, agg_id);
1311 prod = rxr->rx_prod;
1312 cons_rx_buf = &rxr->rx_buf_ring[cons];
1313 prod_rx_buf = &rxr->rx_buf_ring[prod];
1314 tpa_info = &rxr->rx_tpa[agg_id];
1316 if (unlikely(cons != rxr->rx_next_cons ||
1319 cons, rxr->rx_next_cons,
1321 bnxt_sched_reset_rxr(bp, rxr);
1334 prod_bd = &rxr->rx_desc_ring[RX_RING(prod)][RX_IDX(prod)];
1366 rxr->rx_prod = NEXT_RX(prod);
1368 rxr->rx_next_cons = NEXT_RX(cons);
1369 cons_rx_buf = &rxr->rx_buf_ring[cons];
1371 bnxt_reuse_rx_data(rxr, cons, cons_rx_buf->data);
1372 rxr->rx_prod = NEXT_RX(rxr->rx_prod);
1597 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
1617 agg_id = bnxt_lookup_agg_idx(rxr, agg_id);
1619 tpa_info = &rxr->rx_tpa[agg_id];
1627 bnxt_free_agg_idx(rxr, agg_id);
1633 tpa_info = &rxr->rx_tpa[agg_id];
1737 static void bnxt_tpa_agg(struct bnxt *bp, struct bnxt_rx_ring_info *rxr,
1743 agg_id = bnxt_lookup_agg_idx(rxr, agg_id);
1744 tpa_info = &rxr->rx_tpa[agg_id];
1789 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
1813 bnxt_tpa_agg(bp, rxr, (struct rx_agg_cmp *)rxcmp);
1829 prod = rxr->rx_prod;
1832 bnxt_tpa_start(bp, rxr, (struct rx_tpa_start_cmp *)rxcmp,
1856 if (unlikely(cons != rxr->rx_next_cons)) {
1860 if (rxr->rx_next_cons != 0xffff)
1862 cons, rxr->rx_next_cons);
1863 bnxt_sched_reset_rxr(bp, rxr);
1868 rx_buf = &rxr->rx_buf_ring[cons];
1889 bnxt_reuse_rx_data(rxr, cons, data);
1901 bnxt_sched_reset_rxr(bp, rxr);
1911 if (bnxt_xdp_attached(bp, rxr)) {
1912 bnxt_xdp_buff_init(bp, rxr, cons, data_ptr, len, &xdp);
1927 if (bnxt_rx_xdp(bp, rxr, cons, xdp, data, &data_ptr, &len, event)) {
1935 bnxt_reuse_rx_data(rxr, cons, data);
1942 bnxt_xdp_buff_frags_free(rxr, &xdp);
1955 skb = bp->rx_skb_func(bp, rxr, cons, data, data_ptr, dma_addr,
1973 skb = bnxt_xdp_build_skb(bp, skb, agg_bufs, rxr->page_pool, &xdp, rxcmp1);
1976 bnxt_xdp_buff_frags_free(rxr, &xdp);
2050 rxr->rx_prod = NEXT_RX(prod);
2051 rxr->rx_next_cons = NEXT_RX(cons);
2328 struct bnxt_rx_ring_info *rxr;
2345 rxr = bp->bnapi[grp_idx]->rx_ring;
2346 bnxt_sched_reset_rxr(bp, rxr);
2592 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
2594 bnxt_db_write(bp, &rxr->rx_db, rxr->rx_prod);
2597 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
2599 bnxt_db_write(bp, &rxr->rx_agg_db, rxr->rx_agg_prod);
2627 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
2684 bnxt_db_write(bp, &rxr->rx_db, rxr->rx_prod);
2687 bnxt_db_write(bp, &rxr->rx_agg_db, rxr->rx_agg_prod);
2923 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[ring_nr];
2930 if (!rxr->rx_tpa)
2934 struct bnxt_tpa_info *tpa_info = &rxr->rx_tpa[i];
2950 if (!rxr->rx_buf_ring)
2954 struct bnxt_sw_rx_bd *rx_buf = &rxr->rx_buf_ring[i];
2963 page_pool_recycle_direct(rxr->page_pool, data);
2973 if (!rxr->rx_agg_ring)
2977 struct bnxt_sw_rx_agg_bd *rx_agg_buf = &rxr->rx_agg_ring[i];
2984 __clear_bit(i, rxr->rx_agg_bmap);
2986 page_pool_recycle_direct(rxr->page_pool, page);
2990 map = rxr->rx_tpa_idx_map;
3120 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
3122 kfree(rxr->rx_tpa_idx_map);
3123 rxr->rx_tpa_idx_map = NULL;
3124 if (rxr->rx_tpa) {
3126 kfree(rxr->rx_tpa[j].agg_arr);
3127 rxr->rx_tpa[j].agg_arr = NULL;
3130 kfree(rxr->rx_tpa);
3131 rxr->rx_tpa = NULL;
3147 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
3150 rxr->rx_tpa = kcalloc(bp->max_tpa, sizeof(struct bnxt_tpa_info),
3152 if (!rxr->rx_tpa)
3161 rxr->rx_tpa[j].agg_arr = agg;
3163 rxr->rx_tpa_idx_map = kzalloc(sizeof(*rxr->rx_tpa_idx_map),
3165 if (!rxr->rx_tpa_idx_map)
3180 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
3183 if (rxr->xdp_prog)
3184 bpf_prog_put(rxr->xdp_prog);
3186 if (xdp_rxq_info_is_reg(&rxr->xdp_rxq))
3187 xdp_rxq_info_unreg(&rxr->xdp_rxq);
3189 page_pool_destroy(rxr->page_pool);
3190 rxr->page_pool = NULL;
3192 kfree(rxr->rx_agg_bmap);
3193 rxr->rx_agg_bmap = NULL;
3195 ring = &rxr->rx_ring_struct;
3198 ring = &rxr->rx_agg_ring_struct;
3204 struct bnxt_rx_ring_info *rxr)
3212 pp.napi = &rxr->bnapi->napi;
3220 rxr->page_pool = page_pool_create(&pp);
3221 if (IS_ERR(rxr->page_pool)) {
3222 int err = PTR_ERR(rxr->page_pool);
3224 rxr->page_pool = NULL;
3241 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
3244 ring = &rxr->rx_ring_struct;
3246 rc = bnxt_alloc_rx_page_pool(bp, rxr);
3250 rc = xdp_rxq_info_reg(&rxr->xdp_rxq, bp->dev, i, 0);
3254 rc = xdp_rxq_info_reg_mem_model(&rxr->xdp_rxq,
3256 rxr->page_pool);
3258 xdp_rxq_info_unreg(&rxr->xdp_rxq);
3270 ring = &rxr->rx_agg_ring_struct;
3276 rxr->rx_agg_bmap_size = bp->rx_agg_ring_mask + 1;
3277 mem_size = rxr->rx_agg_bmap_size / 8;
3278 rxr->rx_agg_bmap = kzalloc(mem_size, GFP_KERNEL);
3279 if (!rxr->rx_agg_bmap)
3560 struct bnxt_rx_ring_info *rxr;
3576 rxr = bnapi->rx_ring;
3577 if (!rxr)
3580 ring = &rxr->rx_ring_struct;
3584 rmem->pg_arr = (void **)rxr->rx_desc_ring;
3585 rmem->dma_arr = rxr->rx_desc_mapping;
3587 rmem->vmem = (void **)&rxr->rx_buf_ring;
3589 ring = &rxr->rx_agg_ring_struct;
3593 rmem->pg_arr = (void **)rxr->rx_agg_desc_ring;
3594 rmem->dma_arr = rxr->rx_agg_desc_mapping;
3596 rmem->vmem = (void **)&rxr->rx_agg_ring;
3638 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[ring_nr];
3643 prod = rxr->rx_prod;
3645 if (bnxt_alloc_rx_data(bp, rxr, prod, GFP_KERNEL)) {
3652 rxr->rx_prod = prod;
3657 prod = rxr->rx_agg_prod;
3659 if (bnxt_alloc_rx_page(bp, rxr, prod, GFP_KERNEL)) {
3666 rxr->rx_agg_prod = prod;
3668 if (rxr->rx_tpa) {
3677 rxr->rx_tpa[i].data = data;
3678 rxr->rx_tpa[i].data_ptr = data + bp->rx_offset;
3679 rxr->rx_tpa[i].mapping = mapping;
3687 struct bnxt_rx_ring_info *rxr;
3697 rxr = &bp->rx_ring[ring_nr];
3698 ring = &rxr->rx_ring_struct;
3703 rxr->xdp_prog = bp->xdp_prog;
3707 ring = &rxr->rx_agg_ring_struct;
4418 struct bnxt_rx_ring_info *rxr;
4433 rxr = bnapi->rx_ring;
4434 if (rxr) {
4435 rxr->rx_prod = 0;
4436 rxr->rx_agg_prod = 0;
4437 rxr->rx_sw_agg_prod = 0;
4438 rxr->rx_next_cons = 0;
4559 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
4562 rxr->rx_ring_struct.ring_mem.flags =
4564 rxr->rx_agg_ring_struct.ring_mem.flags =
4567 rxr->bnapi = bp->bnapi[i];
5152 static u16 bnxt_cp_ring_for_rx(struct bnxt *bp, struct bnxt_rx_ring_info *rxr)
5155 struct bnxt_napi *bnapi = rxr->bnapi;
5161 return bnxt_cp_ring_from_grp(bp, &rxr->rx_ring_struct);
5256 struct bnxt_rx_ring_info *rxr;
5265 rxr = &bp->rx_ring[j];
5267 ring_id = rxr->rx_ring_struct.fw_ring_id;
5269 ring_id = bnxt_cp_ring_for_rx(bp, rxr);
5469 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[0];
5472 cpu_to_le16(rxr->rx_ring_struct.fw_ring_id);
5474 cpu_to_le16(bnxt_cp_ring_for_rx(bp, rxr));
5960 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
5961 struct bnxt_ring_struct *ring = &rxr->rx_ring_struct;
5962 struct bnxt_napi *bnapi = rxr->bnapi;
5968 bnxt_set_db(bp, &rxr->rx_db, type, map_idx, ring->fw_ring_id);
5971 bnxt_db_write(bp, &rxr->rx_db, rxr->rx_prod);
5993 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
5995 &rxr->rx_agg_ring_struct;
6003 bnxt_set_db(bp, &rxr->rx_agg_db, type, map_idx,
6005 bnxt_db_write(bp, &rxr->rx_agg_db, rxr->rx_agg_prod);
6006 bnxt_db_write(bp, &rxr->rx_db, rxr->rx_prod);
6071 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
6072 struct bnxt_ring_struct *ring = &rxr->rx_ring_struct;
6073 u32 grp_idx = rxr->bnapi->index;
6076 u32 cmpl_ring_id = bnxt_cp_ring_for_rx(bp, rxr);
6093 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
6094 struct bnxt_ring_struct *ring = &rxr->rx_agg_ring_struct;
6095 u32 grp_idx = rxr->bnapi->index;
6098 u32 cmpl_ring_id = bnxt_cp_ring_for_rx(bp, rxr);
11567 struct bnxt_rx_ring_info *rxr = bnapi->rx_ring;
11570 if (!rxr)
11574 i, rxr->rx_ring_struct.fw_ring_id, rxr->rx_prod,
11575 rxr->rx_agg_ring_struct.fw_ring_id, rxr->rx_agg_prod,
11576 rxr->rx_sw_agg_prod);
11605 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[ring_nr];
11607 struct bnxt_napi *bnapi = rxr->bnapi;
11773 struct bnxt_rx_ring_info *rxr = &bp->rx_ring[i];
11777 if (!rxr->bnapi->in_reset)
11791 rxr->rx_prod = 0;
11792 rxr->rx_agg_prod = 0;
11793 rxr->rx_sw_agg_prod = 0;
11794 rxr->rx_next_cons = 0;
11795 rxr->bnapi->in_reset = false;
11797 cpr = &rxr->bnapi->cp_ring;
11800 bnxt_db_write(bp, &rxr->rx_agg_db, rxr->rx_agg_prod);
11801 bnxt_db_write(bp, &rxr->rx_db, rxr->rx_prod);