/kernel/linux/linux-5.10/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_hw_wq.c | 45 ((void *)((wqs)->page_vaddr[(wq)->page_idx]) \ 49 ((wqs)->page_paddr[(wq)->page_idx] \ 53 ((void *)((wqs)->shadow_page_vaddr[(wq)->page_idx]) \ 132 * @page_idx: the page index of the page will be allocated 136 static int wqs_allocate_page(struct hinic_wqs *wqs, int page_idx) in wqs_allocate_page() argument 138 return queue_alloc_page(wqs->hwif, &wqs->page_vaddr[page_idx], in wqs_allocate_page() 139 &wqs->page_paddr[page_idx], in wqs_allocate_page() 140 &wqs->shadow_page_vaddr[page_idx], in wqs_allocate_page() 147 * @page_idx: the page index of the page will be freed 149 static void wqs_free_page(struct hinic_wqs *wqs, int page_idx) in wqs_free_page() argument 232 wqs_next_block(struct hinic_wqs *wqs, int *page_idx, int *block_idx) wqs_next_block() argument 260 wqs_return_block(struct hinic_wqs *wqs, int page_idx, int block_idx) wqs_return_block() argument 280 int page_idx, blk_idx, pos = 0; init_wqs_blocks_arr() local 309 int err, i, page_idx; hinic_wqs_alloc() local 361 int page_idx; hinic_wqs_free() local [all...] |
H A D | hinic_hw_wq.h | 18 int page_idx; member 25 int page_idx; member
|
/kernel/linux/linux-6.6/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_hw_wq.c | 45 ((void *)((wqs)->page_vaddr[(wq)->page_idx]) \ 49 ((wqs)->page_paddr[(wq)->page_idx] \ 53 ((void *)((wqs)->shadow_page_vaddr[(wq)->page_idx]) \ 133 * @page_idx: the page index of the page will be allocated 137 static int wqs_allocate_page(struct hinic_wqs *wqs, int page_idx) in wqs_allocate_page() argument 139 return queue_alloc_page(wqs->hwif, &wqs->page_vaddr[page_idx], in wqs_allocate_page() 140 &wqs->page_paddr[page_idx], in wqs_allocate_page() 141 &wqs->shadow_page_vaddr[page_idx], in wqs_allocate_page() 148 * @page_idx: the page index of the page will be freed 150 static void wqs_free_page(struct hinic_wqs *wqs, int page_idx) in wqs_free_page() argument 231 wqs_next_block(struct hinic_wqs *wqs, int *page_idx, int *block_idx) wqs_next_block() argument 259 wqs_return_block(struct hinic_wqs *wqs, int page_idx, int block_idx) wqs_return_block() argument 279 int page_idx, blk_idx, pos = 0; init_wqs_blocks_arr() local 308 int err, i, page_idx; hinic_wqs_alloc() local 360 int page_idx; hinic_wqs_free() local [all...] |
H A D | hinic_hw_wq.h | 18 int page_idx; member 25 int page_idx; member
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx4/ |
H A D | icm.h | 75 int page_idx; member 103 iter->page_idx = 0; in mlx4_icm_first() 113 if (++iter->page_idx >= iter->chunk->nsg) { in mlx4_icm_next() 121 iter->page_idx = 0; in mlx4_icm_next() 128 return iter->chunk->buf[iter->page_idx].dma_addr; in mlx4_icm_addr() 130 return sg_dma_address(&iter->chunk->sg[iter->page_idx]); in mlx4_icm_addr() 136 return iter->chunk->buf[iter->page_idx].size; in mlx4_icm_size() 138 return sg_dma_len(&iter->chunk->sg[iter->page_idx]); in mlx4_icm_size()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx4/ |
H A D | icm.h | 75 int page_idx; member 103 iter->page_idx = 0; in mlx4_icm_first() 113 if (++iter->page_idx >= iter->chunk->nsg) { in mlx4_icm_next() 121 iter->page_idx = 0; in mlx4_icm_next() 128 return iter->chunk->buf[iter->page_idx].dma_addr; in mlx4_icm_addr() 130 return sg_dma_address(&iter->chunk->sg[iter->page_idx]); in mlx4_icm_addr() 136 return iter->chunk->buf[iter->page_idx].size; in mlx4_icm_size() 138 return sg_dma_len(&iter->chunk->sg[iter->page_idx]); in mlx4_icm_size()
|
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx5/ |
H A D | dm.c | 24 u64 page_idx = 0; in mlx5_cmd_alloc_memic() local 44 while (page_idx < num_memic_hw_pages) { in mlx5_cmd_alloc_memic() 46 page_idx = bitmap_find_next_zero_area(dm->memic_alloc_pages, in mlx5_cmd_alloc_memic() 48 page_idx, in mlx5_cmd_alloc_memic() 51 if (page_idx < num_memic_hw_pages) in mlx5_cmd_alloc_memic() 53 page_idx, num_pages); in mlx5_cmd_alloc_memic() 57 if (page_idx >= num_memic_hw_pages) in mlx5_cmd_alloc_memic() 61 hw_start_addr + (page_idx * PAGE_SIZE)); in mlx5_cmd_alloc_memic() 67 page_idx, num_pages); in mlx5_cmd_alloc_memic() 71 page_idx in mlx5_cmd_alloc_memic() 177 u16 page_idx; copy_op_to_user() local 281 u16 page_idx; handle_alloc_dm_memic() local 508 u16 page_idx; MLX5_IB_METHOD_DM_QUERY() local [all...] |
/kernel/linux/linux-5.10/drivers/infiniband/hw/mthca/ |
H A D | mthca_memfree.h | 77 int page_idx; member 106 iter->page_idx = 0; in mthca_icm_first() 116 if (++iter->page_idx >= iter->chunk->nsg) { in mthca_icm_next() 124 iter->page_idx = 0; in mthca_icm_next() 130 return sg_dma_address(&iter->chunk->mem[iter->page_idx]); in mthca_icm_addr() 135 return sg_dma_len(&iter->chunk->mem[iter->page_idx]); in mthca_icm_size()
|
/kernel/linux/linux-6.6/drivers/infiniband/hw/mthca/ |
H A D | mthca_memfree.h | 77 int page_idx; member 106 iter->page_idx = 0; in mthca_icm_first() 116 if (++iter->page_idx >= iter->chunk->nsg) { in mthca_icm_next() 124 iter->page_idx = 0; in mthca_icm_next() 130 return sg_dma_address(&iter->chunk->mem[iter->page_idx]); in mthca_icm_addr() 135 return sg_dma_len(&iter->chunk->mem[iter->page_idx]); in mthca_icm_size()
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/hns/ |
H A D | hns_roce_hem.h | 90 int page_idx; member 143 iter->page_idx = 0; in hns_roce_hem_first() 153 if (++iter->page_idx >= iter->chunk->nsg) { in hns_roce_hem_next() 161 iter->page_idx = 0; in hns_roce_hem_next() 167 return sg_dma_address(&iter->chunk->mem[iter->page_idx]); in hns_roce_hem_addr()
|
/kernel/linux/linux-6.6/drivers/infiniband/hw/hns/ |
H A D | hns_roce_hem.h | 91 int page_idx; member 143 iter->page_idx = 0; in hns_roce_hem_first() 153 if (++iter->page_idx >= iter->chunk->nsg) { in hns_roce_hem_next() 161 iter->page_idx = 0; in hns_roce_hem_next() 167 return sg_dma_address(&iter->chunk->mem[iter->page_idx]); in hns_roce_hem_addr()
|
/kernel/linux/linux-5.10/drivers/infiniband/sw/siw/ |
H A D | siw_mem.h | 65 unsigned int page_idx = (addr - umem->fp_addr) >> PAGE_SHIFT, in siw_get_upage() local 66 chunk_idx = page_idx >> CHUNK_SHIFT, in siw_get_upage() 67 page_in_chunk = page_idx & ~CHUNK_MASK; in siw_get_upage() 69 if (likely(page_idx < umem->num_pages)) in siw_get_upage()
|
/kernel/linux/linux-6.6/drivers/infiniband/sw/siw/ |
H A D | siw_mem.h | 60 unsigned int page_idx = (addr - umem->fp_addr) >> PAGE_SHIFT, in siw_get_upage() local 61 chunk_idx = page_idx >> CHUNK_SHIFT, in siw_get_upage() 62 page_in_chunk = page_idx & ~CHUNK_MASK; in siw_get_upage() 64 if (likely(page_idx < umem->num_pages)) in siw_get_upage()
|
/kernel/linux/linux-6.6/sound/soc/sof/amd/ |
H A D | acp-stream.c | 34 int page_idx; in acp_dsp_stream_config() local 106 for (page_idx = 0; page_idx < stream->num_pages; page_idx++) { in acp_dsp_stream_config() 107 addr = snd_sgbuf_get_addr(stream->dmab, page_idx * PAGE_SIZE); in acp_dsp_stream_config()
|
H A D | acp-loader.c | 113 u16 page_idx; in configure_pte_for_fw_loading() local 136 for (page_idx = 0; page_idx < num_pages; page_idx++) { in configure_pte_for_fw_loading()
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/ |
H A D | cmd.c | 62 u64 page_idx = 0; in mlx5_cmd_alloc_memic() local 82 while (page_idx < num_memic_hw_pages) { in mlx5_cmd_alloc_memic() 84 page_idx = bitmap_find_next_zero_area(dm->memic_alloc_pages, in mlx5_cmd_alloc_memic() 86 page_idx, in mlx5_cmd_alloc_memic() 89 if (page_idx < num_memic_hw_pages) in mlx5_cmd_alloc_memic() 91 page_idx, num_pages); in mlx5_cmd_alloc_memic() 95 if (page_idx >= num_memic_hw_pages) in mlx5_cmd_alloc_memic() 99 hw_start_addr + (page_idx * PAGE_SIZE)); in mlx5_cmd_alloc_memic() 105 page_idx, num_pages); in mlx5_cmd_alloc_memic() 109 page_idx in mlx5_cmd_alloc_memic() [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/ |
H A D | rx.c | 30 u32 page_idx) in mlx5e_xsk_skb_from_cqe_mpwrq_linear() 32 struct xdp_buff *xdp = wi->umr.dma_info[page_idx].xsk; in mlx5e_xsk_skb_from_cqe_mpwrq_linear() 70 __set_bit(page_idx, wi->xdp_xmit_bitmap); /* non-atomic */ in mlx5e_xsk_skb_from_cqe_mpwrq_linear() 26 mlx5e_xsk_skb_from_cqe_mpwrq_linear(struct mlx5e_rq *rq, struct mlx5e_mpw_info *wi, u16 cqe_bcnt, u32 head_offset, u32 page_idx) mlx5e_xsk_skb_from_cqe_mpwrq_linear() argument
|
/kernel/linux/linux-6.6/sound/soc/amd/acp/ |
H A D | acp-platform.c | 155 u16 page_idx; in config_acp_dma() local 159 for (page_idx = 0; page_idx < num_pages; page_idx++) { in config_acp_dma()
|
/kernel/linux/linux-6.6/drivers/vfio/ |
H A D | iova_bitmap.c | 413 unsigned int page_idx = cur_bit / BITS_PER_PAGE; in iova_bitmap_set() local 419 if (unlikely(page_idx > last_page_idx)) in iova_bitmap_set() 422 kaddr = kmap_local_page(mapped->pages[page_idx]); in iova_bitmap_set()
|
/kernel/linux/linux-5.10/sound/soc/amd/renoir/ |
H A D | acp3x-pdm-dma.c | 178 u16 page_idx; in config_acp_dma() local 191 for (page_idx = 0; page_idx < rtd->num_pages; page_idx++) { in config_acp_dma()
|
/kernel/linux/linux-5.10/sound/soc/amd/raven/ |
H A D | acp3x-pcm-dma.c | 111 u16 page_idx; in config_acp3x_dma() local 143 for (page_idx = 0; page_idx < rtd->num_pages; page_idx++) { in config_acp3x_dma()
|
/kernel/linux/linux-6.6/sound/soc/amd/yc/ |
H A D | acp6x-pdm-dma.c | 155 u16 page_idx; in acp6x_config_dma() local 167 for (page_idx = 0; page_idx < rtd->num_pages; page_idx++) { in acp6x_config_dma()
|
/kernel/linux/linux-6.6/sound/soc/amd/vangogh/ |
H A D | acp5x-pcm-dma.c | 105 u16 page_idx; in config_acp5x_dma() local 136 for (page_idx = 0; page_idx < rtd->num_pages; page_idx++) { in config_acp5x_dma()
|
/kernel/linux/linux-6.6/sound/soc/amd/renoir/ |
H A D | acp3x-pdm-dma.c | 178 u16 page_idx; in config_acp_dma() local 191 for (page_idx = 0; page_idx < rtd->num_pages; page_idx++) { in config_acp_dma()
|
/kernel/linux/linux-6.6/sound/soc/amd/raven/ |
H A D | acp3x-pcm-dma.c | 109 u16 page_idx; in config_acp3x_dma() local 141 for (page_idx = 0; page_idx < rtd->num_pages; page_idx++) { in config_acp3x_dma()
|