/kernel/linux/linux-5.10/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_hw_wq.c | 31 #define WQS_FREE_BLOCKS_SIZE(wqs) (WQS_MAX_NUM_BLOCKS * \ 32 sizeof((wqs)->free_blocks[0])) 44 #define WQ_BASE_VADDR(wqs, wq) \ 45 ((void *)((wqs)->page_vaddr[(wq)->page_idx]) \ 48 #define WQ_BASE_PADDR(wqs, wq) \ 49 ((wqs)->page_paddr[(wq)->page_idx] \ 52 #define WQ_BASE_ADDR(wqs, wq) \ 53 ((void *)((wqs)->shadow_page_vaddr[(wq)->page_idx]) \ 111 dev_err(&pdev->dev, "Failed to allocate dma for wqs page\n"); in queue_alloc_page() 131 * @wqs 136 wqs_allocate_page(struct hinic_wqs *wqs, int page_idx) wqs_allocate_page() argument 149 wqs_free_page(struct hinic_wqs *wqs, int page_idx) wqs_free_page() argument 191 alloc_page_arrays(struct hinic_wqs *wqs) alloc_page_arrays() argument 222 free_page_arrays(struct hinic_wqs *wqs) free_page_arrays() argument 232 wqs_next_block(struct hinic_wqs *wqs, int *page_idx, int *block_idx) wqs_next_block() argument 260 wqs_return_block(struct hinic_wqs *wqs, int page_idx, int block_idx) wqs_return_block() argument 278 init_wqs_blocks_arr(struct hinic_wqs *wqs) init_wqs_blocks_arr() argument 305 hinic_wqs_alloc(struct hinic_wqs *wqs, int max_wqs, struct hinic_hwif *hwif) hinic_wqs_alloc() argument 357 hinic_wqs_free(struct hinic_wqs *wqs) hinic_wqs_free() argument 504 hinic_wq_allocate(struct hinic_wqs *wqs, struct hinic_wq *wq, u16 wqebb_size, u32 wq_page_size, u16 q_depth, u16 max_wqe_size) hinic_wq_allocate() argument 580 hinic_wq_free(struct hinic_wqs *wqs, struct hinic_wq *wq) hinic_wq_free() argument [all...] |
H A D | hinic_hw_wq.h | 85 int hinic_wqs_alloc(struct hinic_wqs *wqs, int num_wqs, 88 void hinic_wqs_free(struct hinic_wqs *wqs); 90 int hinic_wq_allocate(struct hinic_wqs *wqs, struct hinic_wq *wq, 94 void hinic_wq_free(struct hinic_wqs *wqs, struct hinic_wq *wq);
|
H A D | hinic_hw_io.c | 283 err = hinic_wq_allocate(&func_to_io->wqs, &func_to_io->sq_wq[q_id], in init_qp() 291 err = hinic_wq_allocate(&func_to_io->wqs, &func_to_io->rq_wq[q_id], in init_qp() 335 hinic_wq_free(&func_to_io->wqs, &func_to_io->rq_wq[q_id]); in init_qp() 338 hinic_wq_free(&func_to_io->wqs, &func_to_io->sq_wq[q_id]); in init_qp() 357 hinic_wq_free(&func_to_io->wqs, &func_to_io->rq_wq[q_id]); in destroy_qp() 358 hinic_wq_free(&func_to_io->wqs, &func_to_io->sq_wq[q_id]); in destroy_qp() 551 err = hinic_wqs_alloc(&func_to_io->wqs, 2 * max_qps, hwif); in hinic_io_init() 607 hinic_wqs_free(&func_to_io->wqs); in hinic_io_init() 633 hinic_wqs_free(&func_to_io->wqs); in hinic_io_free()
|
H A D | hinic_hw_io.h | 65 struct hinic_wqs wqs; member
|
/kernel/linux/linux-6.6/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_hw_wq.c | 31 #define WQS_FREE_BLOCKS_SIZE(wqs) (WQS_MAX_NUM_BLOCKS * \ 32 sizeof((wqs)->free_blocks[0])) 44 #define WQ_BASE_VADDR(wqs, wq) \ 45 ((void *)((wqs)->page_vaddr[(wq)->page_idx]) \ 48 #define WQ_BASE_PADDR(wqs, wq) \ 49 ((wqs)->page_paddr[(wq)->page_idx] \ 52 #define WQ_BASE_ADDR(wqs, wq) \ 53 ((void *)((wqs)->shadow_page_vaddr[(wq)->page_idx]) \ 112 dev_err(&pdev->dev, "Failed to allocate dma for wqs page\n"); in queue_alloc_page() 132 * @wqs 137 wqs_allocate_page(struct hinic_wqs *wqs, int page_idx) wqs_allocate_page() argument 150 wqs_free_page(struct hinic_wqs *wqs, int page_idx) wqs_free_page() argument 190 alloc_page_arrays(struct hinic_wqs *wqs) alloc_page_arrays() argument 221 free_page_arrays(struct hinic_wqs *wqs) free_page_arrays() argument 231 wqs_next_block(struct hinic_wqs *wqs, int *page_idx, int *block_idx) wqs_next_block() argument 259 wqs_return_block(struct hinic_wqs *wqs, int page_idx, int block_idx) wqs_return_block() argument 277 init_wqs_blocks_arr(struct hinic_wqs *wqs) init_wqs_blocks_arr() argument 304 hinic_wqs_alloc(struct hinic_wqs *wqs, int max_wqs, struct hinic_hwif *hwif) hinic_wqs_alloc() argument 356 hinic_wqs_free(struct hinic_wqs *wqs) hinic_wqs_free() argument 503 hinic_wq_allocate(struct hinic_wqs *wqs, struct hinic_wq *wq, u16 wqebb_size, u32 wq_page_size, u16 q_depth, u16 max_wqe_size) hinic_wq_allocate() argument 579 hinic_wq_free(struct hinic_wqs *wqs, struct hinic_wq *wq) hinic_wq_free() argument [all...] |
H A D | hinic_hw_wq.h | 85 int hinic_wqs_alloc(struct hinic_wqs *wqs, int num_wqs, 88 void hinic_wqs_free(struct hinic_wqs *wqs); 90 int hinic_wq_allocate(struct hinic_wqs *wqs, struct hinic_wq *wq, 94 void hinic_wq_free(struct hinic_wqs *wqs, struct hinic_wq *wq);
|
H A D | hinic_hw_io.c | 283 err = hinic_wq_allocate(&func_to_io->wqs, &func_to_io->sq_wq[q_id], in init_qp() 291 err = hinic_wq_allocate(&func_to_io->wqs, &func_to_io->rq_wq[q_id], in init_qp() 335 hinic_wq_free(&func_to_io->wqs, &func_to_io->rq_wq[q_id]); in init_qp() 338 hinic_wq_free(&func_to_io->wqs, &func_to_io->sq_wq[q_id]); in init_qp() 357 hinic_wq_free(&func_to_io->wqs, &func_to_io->rq_wq[q_id]); in destroy_qp() 358 hinic_wq_free(&func_to_io->wqs, &func_to_io->sq_wq[q_id]); in destroy_qp() 550 err = hinic_wqs_alloc(&func_to_io->wqs, 2 * max_qps, hwif); in hinic_io_init() 606 hinic_wqs_free(&func_to_io->wqs); in hinic_io_init() 632 hinic_wqs_free(&func_to_io->wqs); in hinic_io_free()
|
H A D | hinic_hw_io.h | 65 struct hinic_wqs wqs; member
|
/kernel/linux/linux-5.10/drivers/dma/idxd/ |
H A D | irq.c | 30 struct idxd_wq *wq = &idxd->wqs[i]; in idxd_device_reinit() 75 struct idxd_wq *wq = &idxd->wqs[id]; in process_misc_interrupts() 83 struct idxd_wq *wq = &idxd->wqs[i]; in process_misc_interrupts()
|
H A D | device.c | 450 struct idxd_wq *wq = &idxd->wqs[i]; in idxd_device_wqs_clear_state() 511 iowrite64(group->grpcfg.wqs[i], in idxd_group_config_write() 614 struct idxd_wq *wq = &idxd->wqs[i]; in idxd_wqs_config_write() 688 group->grpcfg.wqs[j] = 0; in idxd_wqs_setup() 692 wq = &idxd->wqs[i]; in idxd_wqs_setup() 705 group->grpcfg.wqs[wq->id / 64] |= BIT(wq->id % 64); in idxd_wqs_setup()
|
H A D | init.c | 162 idxd->wqs = devm_kcalloc(dev, idxd->max_wqs, sizeof(struct idxd_wq), in idxd_setup_internals() 164 if (!idxd->wqs) in idxd_setup_internals() 173 struct idxd_wq *wq = &idxd->wqs[i]; in idxd_setup_internals()
|
H A D | sysfs.c | 291 struct idxd_wq *wq = &idxd->wqs[i]; in idxd_config_bus_remove() 303 struct idxd_wq *wq = &idxd->wqs[i]; in idxd_config_bus_remove() 642 struct idxd_wq *wq = &idxd->wqs[i]; in group_work_queues_show() 897 struct idxd_wq *wq = &idxd->wqs[i]; in total_claimed_wq_size() 1313 struct idxd_wq *wq = &idxd->wqs[i]; in clients_show() 1534 struct idxd_wq *wq = &idxd->wqs[i]; in idxd_setup_wq_sysfs() 1554 struct idxd_wq *wq = &idxd->wqs[i]; in idxd_setup_wq_sysfs() 1624 struct idxd_wq *wq = &idxd->wqs[i]; in idxd_cleanup_sysfs()
|
H A D | registers.h | 286 u64 wqs[4]; member
|
H A D | idxd.h | 181 struct idxd_wq *wqs; member
|
/kernel/linux/linux-6.6/drivers/dma/idxd/ |
H A D | init.c | 153 idxd->wqs = kcalloc_node(idxd->max_wqs, sizeof(struct idxd_wq *), in idxd_setup_wqs() 155 if (!idxd->wqs) in idxd_setup_wqs() 160 kfree(idxd->wqs); in idxd_setup_wqs() 210 idxd->wqs[i] = wq; in idxd_setup_wqs() 217 wq = idxd->wqs[i]; in idxd_setup_wqs() 338 put_device(wq_confdev(idxd->wqs[i])); in idxd_cleanup_internals() 419 put_device(wq_confdev(idxd->wqs[i])); in idxd_setup_internals() 789 wq = idxd->wqs[i]; in idxd_wqs_quiesce()
|
H A D | device.c | 295 struct idxd_wq *wq = idxd->wqs[i]; in idxd_wqs_unmap_portal() 709 struct idxd_wq *wq = idxd->wqs[i]; in idxd_device_wqs_clear_state() 851 iowrite64(group->grpcfg.wqs[i], idxd->reg_base + grpcfg_offset); in idxd_group_config_write() 1001 struct idxd_wq *wq = idxd->wqs[i]; in idxd_wqs_config_write() 1073 group->grpcfg.wqs[j] = 0; in idxd_wqs_setup() 1077 wq = idxd->wqs[i]; in idxd_wqs_setup() 1089 group->grpcfg.wqs[wq->id / 64] |= BIT(wq->id % 64); in idxd_wqs_setup() 1173 group->grpcfg.wqs[i] = ioread64(idxd->reg_base + grpcfg_offset); in idxd_group_load_config() 1175 group->id, i, grpcfg_offset, group->grpcfg.wqs[i]); in idxd_group_load_config() 1184 /* No need to check beyond max wqs */ in idxd_group_load_config() [all...] |
H A D | irq.c | 49 struct idxd_wq *wq = idxd->wqs[i]; in idxd_device_reinit() 336 struct idxd_wq *wq = idxd->wqs[entry_head->wq_idx]; in process_evl_entry() 417 struct idxd_wq *wq = idxd->wqs[id]; in idxd_misc_thread() 425 struct idxd_wq *wq = idxd->wqs[i]; in idxd_misc_thread()
|
H A D | sysfs.c | 342 struct idxd_wq *wq = idxd->wqs[i]; in group_work_queues_show() 721 struct idxd_wq *wq = idxd->wqs[i]; in total_claimed_wq_size() 1454 struct idxd_wq *wq = idxd->wqs[i]; in clients_show() 1758 kfree(idxd->wqs); in idxd_conf_device_release() 1841 wq = idxd->wqs[i]; in idxd_register_wq_devices() 1852 wq = idxd->wqs[i]; in idxd_register_wq_devices() 1857 wq = idxd->wqs[j]; in idxd_register_wq_devices() 1897 device_unregister(wq_confdev(idxd->wqs[i])); in idxd_register_devices() 1908 struct idxd_wq *wq = idxd->wqs[i]; in idxd_unregister_devices()
|
H A D | idxd.h | 321 struct idxd_wq **wqs; member 462 return (idx == 0) ? &idxd->ie : &idxd->wqs[idx - 1]->ie; in idxd_get_ie()
|
H A D | registers.h | 356 u64 wqs[4]; member
|
/kernel/linux/linux-5.10/drivers/infiniband/core/ |
H A D | uverbs_cmd.c | 3049 struct ib_wq **wqs = NULL; in ib_uverbs_ex_create_rwq_ind_table() local 3082 wqs = kcalloc(num_wq_handles, sizeof(*wqs), GFP_KERNEL); in ib_uverbs_ex_create_rwq_ind_table() 3083 if (!wqs) { in ib_uverbs_ex_create_rwq_ind_table() 3097 wqs[num_read_wqs] = wq; in ib_uverbs_ex_create_rwq_ind_table() 3098 atomic_inc(&wqs[num_read_wqs]->usecnt); in ib_uverbs_ex_create_rwq_ind_table() 3114 init_attr.ind_tbl = wqs; in ib_uverbs_ex_create_rwq_ind_table() 3116 rwq_ind_tbl->ind_tbl = wqs; in ib_uverbs_ex_create_rwq_ind_table() 3129 rdma_lookup_put_uobject(&wqs[i]->uobject->uevent.uobject, in ib_uverbs_ex_create_rwq_ind_table() 3145 rdma_lookup_put_uobject(&wqs[ in ib_uverbs_ex_create_rwq_ind_table() [all...] |
/kernel/linux/linux-6.6/drivers/infiniband/core/ |
H A D | uverbs_cmd.c | 3054 struct ib_wq **wqs = NULL; in ib_uverbs_ex_create_rwq_ind_table() local 3087 wqs = kcalloc(num_wq_handles, sizeof(*wqs), GFP_KERNEL); in ib_uverbs_ex_create_rwq_ind_table() 3088 if (!wqs) { in ib_uverbs_ex_create_rwq_ind_table() 3102 wqs[num_read_wqs] = wq; in ib_uverbs_ex_create_rwq_ind_table() 3103 atomic_inc(&wqs[num_read_wqs]->usecnt); in ib_uverbs_ex_create_rwq_ind_table() 3119 init_attr.ind_tbl = wqs; in ib_uverbs_ex_create_rwq_ind_table() 3121 rwq_ind_tbl->ind_tbl = wqs; in ib_uverbs_ex_create_rwq_ind_table() 3134 rdma_lookup_put_uobject(&wqs[i]->uobject->uevent.uobject, in ib_uverbs_ex_create_rwq_ind_table() 3150 rdma_lookup_put_uobject(&wqs[ in ib_uverbs_ex_create_rwq_ind_table() [all...] |