Lines Matching refs:pool

17 void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries);
18 bool xsk_tx_peek_desc(struct xsk_buff_pool *pool, struct xdp_desc *desc);
19 u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max);
20 void xsk_tx_release(struct xsk_buff_pool *pool);
23 void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool);
24 void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool);
25 void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool);
26 void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool);
27 bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool);
29 static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
31 return XDP_PACKET_HEADROOM + pool->headroom;
34 static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
36 return pool->chunk_size;
39 static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
41 return xsk_pool_get_chunk_size(pool) - xsk_pool_get_headroom(pool);
44 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
47 xp_set_rxq_info(pool, rxq);
50 static inline unsigned int xsk_pool_get_napi_id(struct xsk_buff_pool *pool)
53 return pool->heads[0].xdp.rxq->napi_id;
59 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
62 xp_dma_unmap(pool, attrs);
65 static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
68 struct xdp_umem *umem = pool->umem;
70 return xp_dma_map(pool, dev, attrs, umem->pgs, umem->npgs);
87 static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
89 return xp_alloc(pool);
98 static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
100 return xp_alloc_batch(pool, xdp, max);
103 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
105 return xp_can_alloc(pool, count);
111 struct list_head *xskb_list = &xskb->pool->xskb_list;
131 list_add_tail(&frag->xskb_list_node, &frag->pool->xskb_list);
140 frag = list_first_entry_or_null(&xskb->pool->xskb_list,
162 frag = list_last_entry(&xskb->pool->xskb_list, struct xdp_buff_xsk,
175 static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
178 return xp_raw_get_dma(pool, addr);
181 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
183 return xp_raw_get_data(pool, addr);
186 static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff *xdp, struct xsk_buff_pool *pool)
190 if (!pool->dma_need_sync)
196 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,
200 xp_dma_sync_for_device(pool, dma, size);
205 static inline void xsk_tx_completed(struct xsk_buff_pool *pool, u32 nb_entries)
209 static inline bool xsk_tx_peek_desc(struct xsk_buff_pool *pool,
215 static inline u32 xsk_tx_peek_release_desc_batch(struct xsk_buff_pool *pool, u32 max)
220 static inline void xsk_tx_release(struct xsk_buff_pool *pool)
230 static inline void xsk_set_rx_need_wakeup(struct xsk_buff_pool *pool)
234 static inline void xsk_set_tx_need_wakeup(struct xsk_buff_pool *pool)
238 static inline void xsk_clear_rx_need_wakeup(struct xsk_buff_pool *pool)
242 static inline void xsk_clear_tx_need_wakeup(struct xsk_buff_pool *pool)
246 static inline bool xsk_uses_need_wakeup(struct xsk_buff_pool *pool)
251 static inline u32 xsk_pool_get_headroom(struct xsk_buff_pool *pool)
256 static inline u32 xsk_pool_get_chunk_size(struct xsk_buff_pool *pool)
261 static inline u32 xsk_pool_get_rx_frame_size(struct xsk_buff_pool *pool)
266 static inline void xsk_pool_set_rxq_info(struct xsk_buff_pool *pool,
271 static inline unsigned int xsk_pool_get_napi_id(struct xsk_buff_pool *pool)
276 static inline void xsk_pool_dma_unmap(struct xsk_buff_pool *pool,
281 static inline int xsk_pool_dma_map(struct xsk_buff_pool *pool,
297 static inline struct xdp_buff *xsk_buff_alloc(struct xsk_buff_pool *pool)
307 static inline u32 xsk_buff_alloc_batch(struct xsk_buff_pool *pool, struct xdp_buff **xdp, u32 max)
312 static inline bool xsk_buff_can_alloc(struct xsk_buff_pool *pool, u32 count)
343 static inline dma_addr_t xsk_buff_raw_get_dma(struct xsk_buff_pool *pool,
349 static inline void *xsk_buff_raw_get_data(struct xsk_buff_pool *pool, u64 addr)
354 static inline void xsk_buff_dma_sync_for_cpu(struct xdp_buff *xdp, struct xsk_buff_pool *pool)
358 static inline void xsk_buff_raw_dma_sync_for_device(struct xsk_buff_pool *pool,