Lines Matching defs:pool
25 struct xsk_buff_pool *pool;
58 /* For performance reasons, each buff pool has its own array of dma_pages
87 int xp_assign_dev(struct xsk_buff_pool *pool, struct net_device *dev,
89 int xp_assign_dev_shared(struct xsk_buff_pool *pool, struct xdp_sock *umem_xs,
91 void xp_destroy(struct xsk_buff_pool *pool);
93 void xp_get_pool(struct xsk_buff_pool *pool);
94 bool xp_put_pool(struct xsk_buff_pool *pool);
95 void xp_clear_dev(struct xsk_buff_pool *pool);
96 void xp_add_xsk(struct xsk_buff_pool *pool, struct xdp_sock *xs);
97 void xp_del_xsk(struct xsk_buff_pool *pool, struct xdp_sock *xs);
103 void xp_set_rxq_info(struct xsk_buff_pool *pool, struct xdp_rxq_info *rxq);
104 int xp_dma_map(struct xsk_buff_pool *pool, struct device *dev,
106 void xp_dma_unmap(struct xsk_buff_pool *pool, unsigned long attrs);
107 struct xdp_buff *xp_alloc(struct xsk_buff_pool *pool);
108 bool xp_can_alloc(struct xsk_buff_pool *pool, u32 count);
109 void *xp_raw_get_data(struct xsk_buff_pool *pool, u64 addr);
110 dma_addr_t xp_raw_get_dma(struct xsk_buff_pool *pool, u64 addr);
127 void xp_dma_sync_for_device_slow(struct xsk_buff_pool *pool, dma_addr_t dma,
129 static inline void xp_dma_sync_for_device(struct xsk_buff_pool *pool,
132 if (!pool->dma_need_sync)
135 xp_dma_sync_for_device_slow(pool, dma, size);
145 static inline bool xp_desc_crosses_non_contig_pg(struct xsk_buff_pool *pool,
153 return pool->dma_pages_cnt &&
154 !(pool->dma_pages[addr >> PAGE_SHIFT] & XSK_NEXT_PG_CONTIG_MASK);
157 static inline u64 xp_aligned_extract_addr(struct xsk_buff_pool *pool, u64 addr)
159 return addr & pool->chunk_mask;