/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/ |
H A D | rx.h | 25 dma_info->xsk = xsk_buff_alloc(rq->xsk_pool); in mlx5e_xsk_page_alloc_pool() 41 if (!xsk_uses_need_wakeup(rq->xsk_pool)) in mlx5e_xsk_update_rx_wakeup() 45 xsk_set_rx_need_wakeup(rq->xsk_pool); in mlx5e_xsk_update_rx_wakeup() 47 xsk_clear_rx_need_wakeup(rq->xsk_pool); in mlx5e_xsk_update_rx_wakeup()
|
H A D | tx.h | 18 if (!xsk_uses_need_wakeup(sq->xsk_pool)) in mlx5e_xsk_update_tx_wakeup() 22 xsk_clear_tx_need_wakeup(sq->xsk_pool); in mlx5e_xsk_update_tx_wakeup() 24 xsk_set_tx_need_wakeup(sq->xsk_pool); in mlx5e_xsk_update_tx_wakeup()
|
H A D | rx.c | 50 xsk_buff_dma_sync_for_cpu(xdp, rq->xsk_pool); in mlx5e_xsk_skb_from_cqe_mpwrq_linear() 96 xsk_buff_dma_sync_for_cpu(xdp, rq->xsk_pool); in mlx5e_xsk_skb_from_cqe_linear()
|
H A D | tx.c | 69 struct xsk_buff_pool *pool = sq->xsk_pool; in mlx5e_xsk_tx()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/i40e/ |
H A D | i40e_xsk.c | 256 xdp = xsk_buff_alloc(rx_ring->xsk_pool); in i40e_alloc_rx_buffers_zc() 387 xsk_buff_dma_sync_for_cpu(*bi, rx_ring->xsk_pool); in i40e_clean_rx_irq_zc() 438 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in i40e_clean_rx_irq_zc() 440 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in i40e_clean_rx_irq_zc() 442 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in i40e_clean_rx_irq_zc() 465 if (!xsk_tx_peek_desc(xdp_ring->xsk_pool, &desc)) in i40e_xmit_zc() 468 dma = xsk_buff_raw_get_dma(xdp_ring->xsk_pool, desc.addr); in i40e_xmit_zc() 469 xsk_buff_raw_dma_sync_for_device(xdp_ring->xsk_pool, dma, in i40e_xmit_zc() 496 xsk_tx_release(xdp_ring->xsk_pool); in i40e_xmit_zc() 528 struct xsk_buff_pool *bp = tx_ring->xsk_pool; in i40e_clean_xdp_tx_irq() [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/intel/i40e/ |
H A D | i40e_xsk.c | 214 if (xsk_uses_need_wakeup(rx_ring->xsk_pool) && err == -ENOBUFS) in i40e_run_xdp_zc() 256 nb_buffs = xsk_buff_alloc_batch(rx_ring->xsk_pool, xdp, nb_buffs); in i40e_alloc_rx_buffers_zc() 491 xsk_buff_dma_sync_for_cpu(bi, rx_ring->xsk_pool); in i40e_clean_rx_irq_zc() 526 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in i40e_clean_rx_irq_zc() 528 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in i40e_clean_rx_irq_zc() 530 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in i40e_clean_rx_irq_zc() 544 dma = xsk_buff_raw_get_dma(xdp_ring->xsk_pool, desc->addr); in i40e_xmit_pkt() 545 xsk_buff_raw_dma_sync_for_device(xdp_ring->xsk_pool, dma, desc->len); in i40e_xmit_pkt() 565 dma = xsk_buff_raw_get_dma(xdp_ring->xsk_pool, desc[i].addr); in i40e_xmit_pkt_batch() 566 xsk_buff_raw_dma_sync_for_device(xdp_ring->xsk_pool, dm in i40e_xmit_pkt_batch() [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/intel/ice/ |
H A D | ice_xsk.c | 498 nb_buffs_extra = ice_fill_rx_descs(rx_ring->xsk_pool, xdp, in __ice_alloc_rx_bufs_zc() 512 nb_buffs = ice_fill_rx_descs(rx_ring->xsk_pool, xdp, rx_desc, count); in __ice_alloc_rx_bufs_zc() 668 xsk_tx_completed(xdp_ring->xsk_pool, xsk_frames); in ice_clean_xdp_irq_zc() 720 xsk_buff_raw_dma_sync_for_device(xdp_ring->xsk_pool, dma, size); in ice_xmit_xdp_tx_zc() 778 if (xsk_uses_need_wakeup(rx_ring->xsk_pool) && err == -ENOBUFS) in ice_run_xdp_zc() 848 struct xsk_buff_pool *xsk_pool = rx_ring->xsk_pool; in ice_clean_rx_irq_zc() local 898 xsk_buff_dma_sync_for_cpu(xdp, xsk_pool); in ice_clean_rx_irq_zc() 969 if (xsk_uses_need_wakeup(xsk_pool)) { in ice_clean_rx_irq_zc() 974 xsk_set_rx_need_wakeup(xsk_pool); in ice_clean_rx_irq_zc() [all...] |
H A D | ice_base.c | 546 ring->xsk_pool = ice_xsk_pool(ring); in ice_vsi_cfg_rxq() 547 if (ring->xsk_pool) { in ice_vsi_cfg_rxq() 551 xsk_pool_get_rx_frame_size(ring->xsk_pool); in ice_vsi_cfg_rxq() 563 xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq); in ice_vsi_cfg_rxq() 594 if (ring->xsk_pool) { in ice_vsi_cfg_rxq() 597 if (!xsk_buff_can_alloc(ring->xsk_pool, num_bufs)) { in ice_vsi_cfg_rxq()
|
H A D | ice_txrx.h | 335 struct xsk_buff_pool *xsk_pool; member 361 struct xsk_buff_pool *xsk_pool; member
|
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/ice/ |
H A D | ice_xsk.c | 241 xdp_ring->xsk_pool = ice_xsk_pool(xdp_ring); in ice_qp_ena() 444 rx_buf->xdp = xsk_buff_alloc(rx_ring->xsk_pool); in ice_alloc_rx_bufs_zc() 625 xsk_buff_dma_sync_for_cpu(rx_buf->xdp, rx_ring->xsk_pool); in ice_clean_rx_irq_zc() 675 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in ice_clean_rx_irq_zc() 677 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in ice_clean_rx_irq_zc() 679 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in ice_clean_rx_irq_zc() 712 if (!xsk_tx_peek_desc(xdp_ring->xsk_pool, &desc)) in ice_xmit_zc() 715 dma = xsk_buff_raw_get_dma(xdp_ring->xsk_pool, desc.addr); in ice_xmit_zc() 716 xsk_buff_raw_dma_sync_for_device(xdp_ring->xsk_pool, dma, in ice_xmit_zc() 733 xsk_tx_release(xdp_ring->xsk_pool); in ice_xmit_zc() [all...] |
H A D | ice_base.c | 311 ring->xsk_pool = ice_xsk_pool(ring); in ice_setup_rx_ctx() 312 if (ring->xsk_pool) { in ice_setup_rx_ctx() 316 xsk_pool_get_rx_frame_size(ring->xsk_pool); in ice_setup_rx_ctx() 327 xsk_pool_set_rxq_info(ring->xsk_pool, &ring->xdp_rxq); in ice_setup_rx_ctx() 421 if (ring->xsk_pool) { in ice_setup_rx_ctx() 422 if (!xsk_buff_can_alloc(ring->xsk_pool, num_bufs)) { in ice_setup_rx_ctx()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/ |
H A D | rx.c | 30 if (unlikely(!xsk_buff_can_alloc(rq->xsk_pool, rq->mpwqe.pages_per_wqe))) in mlx5e_xsk_alloc_rx_mpwqe() 35 batch = xsk_buff_alloc_batch(rq->xsk_pool, xsk_buffs, in mlx5e_xsk_alloc_rx_mpwqe() 45 xsk_buffs[batch] = xsk_buff_alloc(rq->xsk_pool); in mlx5e_xsk_alloc_rx_mpwqe() 102 rq->xsk_pool->chunk_size); in mlx5e_xsk_alloc_rx_mpwqe() 103 __be32 frame_size = cpu_to_be32(rq->xsk_pool->chunk_size); in mlx5e_xsk_alloc_rx_mpwqe() 173 alloc = xsk_buff_alloc_batch(rq->xsk_pool, buffs + ix, wqe_bulk); in mlx5e_xsk_alloc_rx_wqes_batched() 175 alloc = xsk_buff_alloc_batch(rq->xsk_pool, buffs + ix, contig); in mlx5e_xsk_alloc_rx_wqes_batched() 177 alloc += xsk_buff_alloc_batch(rq->xsk_pool, buffs, wqe_bulk - contig); in mlx5e_xsk_alloc_rx_wqes_batched() 213 *frag->xskp = xsk_buff_alloc(rq->xsk_pool); in mlx5e_xsk_alloc_rx_wqes() 273 xsk_buff_dma_sync_for_cpu(&mxbuf->xdp, rq->xsk_pool); in mlx5e_xsk_skb_from_cqe_mpwrq_linear() [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/ |
H A D | en_txrx.c | 90 bool need_wakeup = xsk_uses_need_wakeup(xskrq->xsk_pool); in mlx5e_napi_xsk_post() 98 xsk_set_tx_need_wakeup(xsksq->xsk_pool); in mlx5e_napi_xsk_post() 102 xsk_clear_tx_need_wakeup(xsksq->xsk_pool); in mlx5e_napi_xsk_post() 108 xsk_set_rx_need_wakeup(xskrq->xsk_pool); in mlx5e_napi_xsk_post() 117 xsk_set_rx_need_wakeup(xskrq->xsk_pool); in mlx5e_napi_xsk_post() 119 xsk_clear_rx_need_wakeup(xskrq->xsk_pool); in mlx5e_napi_xsk_post()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/freescale/dpaa2/ |
H A D | dpaa2-xsk.c | 49 ch->xsk_pool->umem->headroom); in dpaa2_xsk_run_xdp() 58 xsk_buff_dma_sync_for_cpu(xdp_buff, ch->xsk_pool); in dpaa2_xsk_run_xdp() 183 if (!ch->xsk_pool) in dpaa2_xsk_disable_pool() 200 ch->xsk_pool = NULL; in dpaa2_xsk_disable_pool() 265 ch->xsk_pool = pool; in dpaa2_xsk_enable_pool() 354 addr = xsk_buff_raw_get_dma(ch->xsk_pool, xdp_desc->addr); in dpaa2_xsk_tx_build_fd() 355 xsk_buff_raw_dma_sync_for_device(ch->xsk_pool, addr, xdp_desc->len); in dpaa2_xsk_tx_build_fd() 393 struct xdp_desc *xdp_descs = ch->xsk_pool->tx_descs; in dpaa2_xsk_tx() 410 batch = xsk_tx_peek_release_desc_batch(ch->xsk_pool, budget); in dpaa2_xsk_tx() 451 xsk_tx_release(ch->xsk_pool); in dpaa2_xsk_tx() [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/netronome/nfp/nfd3/ |
H A D | xsk.c | 21 struct xsk_buff_pool *pool = r_vec->xsk_pool; in nfp_nfd3_xsk_tx_xdp() 187 xsk_buff_dma_sync_for_cpu(xrxbuf->xdp, r_vec->xsk_pool); in nfp_nfd3_xsk_rx() 324 xsk_tx_completed(r_vec->xsk_pool, done_pkts - reused); in nfp_nfd3_xsk_complete() 337 struct xsk_buff_pool *xsk_pool; in nfp_nfd3_xsk_tx() local 342 xsk_pool = r_vec->xsk_pool; in nfp_nfd3_xsk_tx() 346 if (!xsk_tx_peek_desc(xsk_pool, &desc[i])) in nfp_nfd3_xsk_tx() 356 xsk_buff_raw_dma_sync_for_device(xsk_pool, desc[i].addr, in nfp_nfd3_xsk_tx() 368 xsk_buff_raw_get_dma(xsk_pool, desc[i].addr)); in nfp_nfd3_xsk_tx() 381 xsk_tx_release(xsk_pool); in nfp_nfd3_xsk_tx() [all...] |
H A D | rings.c | 25 if (tx_ring->r_vec->xsk_pool) { in nfp_nfd3_xsk_tx_bufs_free() 29 xsk_tx_completed(tx_ring->r_vec->xsk_pool, 1); in nfp_nfd3_xsk_tx_bufs_free()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/intel/ixgbe/ |
H A D | ixgbe_xsk.c | 114 if (xsk_uses_need_wakeup(rx_ring->xsk_pool) && err == -ENOBUFS) in ixgbe_run_xdp_zc() 168 bi->xdp = xsk_buff_alloc(rx_ring->xsk_pool); in ixgbe_alloc_rx_buffers_zc() 307 xsk_buff_dma_sync_for_cpu(bi->xdp, rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc() 369 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in ixgbe_clean_rx_irq_zc() 371 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc() 373 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc() 398 struct xsk_buff_pool *pool = xdp_ring->xsk_pool; in ixgbe_xmit_zc() 466 struct xsk_buff_pool *pool = tx_ring->xsk_pool; in ixgbe_clean_xdp_tx_irq() 538 if (!ring->xsk_pool) in ixgbe_xsk_wakeup() 553 struct xsk_buff_pool *pool = tx_ring->xsk_pool; in ixgbe_xsk_clean_tx_ring() [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/ixgbe/ |
H A D | ixgbe_xsk.c | 161 bi->xdp = xsk_buff_alloc(rx_ring->xsk_pool); in ixgbe_alloc_rx_buffers_zc() 298 xsk_buff_dma_sync_for_cpu(bi->xdp, rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc() 356 if (xsk_uses_need_wakeup(rx_ring->xsk_pool)) { in ixgbe_clean_rx_irq_zc() 358 xsk_set_rx_need_wakeup(rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc() 360 xsk_clear_rx_need_wakeup(rx_ring->xsk_pool); in ixgbe_clean_rx_irq_zc() 385 struct xsk_buff_pool *pool = xdp_ring->xsk_pool; in ixgbe_xmit_zc() 453 struct xsk_buff_pool *pool = tx_ring->xsk_pool; in ixgbe_clean_xdp_tx_irq() 525 if (!ring->xsk_pool) in ixgbe_xsk_wakeup() 540 struct xsk_buff_pool *pool = tx_ring->xsk_pool; in ixgbe_xsk_clean_tx_ring()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/engleder/ |
H A D | tsnep_main.c | 708 dma = xsk_buff_raw_get_dma(tx->xsk_pool, xdpd->addr); in tsnep_xdp_tx_map_zc() 709 xsk_buff_raw_dma_sync_for_device(tx->xsk_pool, dma, xdpd->len); in tsnep_xdp_tx_map_zc() 733 struct xdp_desc *descs = tx->xsk_pool->tx_descs; in tsnep_xdp_xmit_zc() 744 batch = xsk_tx_peek_release_desc_batch(tx->xsk_pool, desc_available); in tsnep_xdp_xmit_zc() 833 if (tx->xsk_pool) { in tsnep_tx_poll() 835 xsk_tx_completed(tx->xsk_pool, xsk_frames); in tsnep_tx_poll() 836 if (xsk_uses_need_wakeup(tx->xsk_pool)) in tsnep_tx_poll() 837 xsk_set_tx_need_wakeup(tx->xsk_pool); in tsnep_tx_poll() 899 if (!rx->xsk_pool && entry->page) in tsnep_rx_ring_cleanup() 902 if (rx->xsk_pool in tsnep_rx_ring_cleanup() [all...] |
H A D | tsnep.h | 93 struct xsk_buff_pool *xsk_pool; member 131 struct xsk_buff_pool *xsk_pool; member
|
/kernel/linux/linux-6.6/drivers/net/ethernet/netronome/nfp/ |
H A D | nfp_net_xsk.c | 22 headroom = xsk_pool_get_headroom(rx_ring->r_vec->xsk_pool); in nfp_net_xsk_rx_bufs_stash() 60 struct xsk_buff_pool *pool = r_vec->xsk_pool; in nfp_net_xsk_rx_ring_fill_freelist()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/google/gve/ |
H A D | gve_tx.c | 187 if (xsk_complete > 0 && tx->xsk_pool) in gve_clean_xdp_done() 188 xsk_tx_completed(tx->xsk_pool, xsk_complete); in gve_clean_xdp_done() 901 if (!xsk_tx_peek_desc(tx->xsk_pool, &desc)) { in gve_xsk_tx() 906 data = xsk_buff_raw_get_data(tx->xsk_pool, desc.addr); in gve_xsk_tx() 914 xsk_tx_release(tx->xsk_pool); in gve_xsk_tx() 934 if (tx->xsk_pool) { in gve_xdp_poll() 941 if (xsk_uses_need_wakeup(tx->xsk_pool)) in gve_xdp_poll() 942 xsk_set_tx_need_wakeup(tx->xsk_pool); in gve_xdp_poll()
|
H A D | gve_main.c | 1212 rx->xsk_pool = xsk_get_pool_from_qid(dev, i); in gve_reg_xdp_info() 1213 if (rx->xsk_pool) { in gve_reg_xdp_info() 1222 xsk_pool_set_rxq_info(rx->xsk_pool, in gve_reg_xdp_info() 1229 priv->tx[tx_qid].xsk_pool = xsk_get_pool_from_qid(dev, i); in gve_reg_xdp_info() 1255 if (rx->xsk_pool) { in gve_unreg_xdp_info() 1257 rx->xsk_pool = NULL; in gve_unreg_xdp_info() 1263 priv->tx[tx_qid].xsk_pool = NULL; in gve_unreg_xdp_info() 1547 rx->xsk_pool = pool; in gve_xsk_pool_enable() 1550 priv->tx[tx_qid].xsk_pool = pool; in gve_xsk_pool_enable() 1583 priv->rx[qid].xsk_pool in gve_xsk_pool_disable() [all...] |
/kernel/linux/linux-6.6/drivers/net/ethernet/stmicro/stmmac/ |
H A D | stmmac.h | 74 struct xsk_buff_pool *xsk_pool; member 107 struct xsk_buff_pool *xsk_pool; member
|
H A D | stmmac_main.c | 239 if (rx_q->xsk_pool) { in stmmac_disable_all_queues() 1646 buf->xdp = xsk_buff_alloc(rx_q->xsk_pool); in stmmac_alloc_rx_buffers_zc() 1691 rx_q->xsk_pool = stmmac_get_xsk_pool(priv, queue); in __init_dma_rx_desc_rings() 1693 if (rx_q->xsk_pool) { in __init_dma_rx_desc_rings() 1700 xsk_pool_set_rxq_info(rx_q->xsk_pool, &rx_q->xdp_rxq); in __init_dma_rx_desc_rings() 1710 if (rx_q->xsk_pool) { in __init_dma_rx_desc_rings() 1761 if (rx_q->xsk_pool) in init_dma_rx_desc_rings() 1767 rx_q->xsk_pool = NULL; in init_dma_rx_desc_rings() 1807 tx_q->xsk_pool = stmmac_get_xsk_pool(priv, queue); in __init_dma_tx_desc_rings() 1894 if (tx_q->xsk_pool in dma_free_tx_skbufs() [all...] |