/kernel/linux/linux-5.10/drivers/mailbox/ |
H A D | ti-msgmgr.c | 53 * @queue_count: Number of Queues 73 u8 queue_count; member 600 if (qinst->queue_id > d->queue_count) { in ti_msgmgr_queue_setup() 602 idx, qinst->queue_id, d->queue_count); in ti_msgmgr_queue_setup() 676 .queue_count = 64, 691 .queue_count = 190, 723 int queue_count; in ti_msgmgr_probe() local 771 queue_count = desc->num_valid_queues; in ti_msgmgr_probe() 772 if (!queue_count || queue_count > des in ti_msgmgr_probe() [all...] |
/kernel/linux/linux-6.6/drivers/mailbox/ |
H A D | ti-msgmgr.c | 54 * @queue_count: Number of Queues 74 u8 queue_count; member 650 if (qinst->queue_id > d->queue_count) { in ti_msgmgr_queue_setup() 652 idx, qinst->queue_id, d->queue_count); in ti_msgmgr_queue_setup() 774 .queue_count = 64, 789 .queue_count = 190, 820 int queue_count; in ti_msgmgr_probe() local 865 queue_count = desc->num_valid_queues; in ti_msgmgr_probe() 866 if (!queue_count || queue_count > des in ti_msgmgr_probe() [all...] |
/kernel/linux/linux-5.10/drivers/s390/crypto/ |
H A D | ap_queue.c | 144 aq->queue_count = max_t(int, 0, aq->queue_count - 1); in ap_sm_recv() 145 if (!status.queue_empty && !aq->queue_count) in ap_sm_recv() 146 aq->queue_count++; in ap_sm_recv() 147 if (aq->queue_count > 0) in ap_sm_recv() 166 if (!status.queue_empty || aq->queue_count <= 0) in ap_sm_recv() 169 aq->queue_count = 0; in ap_sm_recv() 195 if (aq->queue_count > 0) { in ap_sm_read() 202 if (aq->queue_count > 0) in ap_sm_read() 245 aq->queue_count in ap_sm_write() [all...] |
/kernel/linux/linux-6.6/drivers/s390/crypto/ |
H A D | ap_queue.c | 135 aq->queue_count = max_t(int, 0, aq->queue_count - 1); in ap_sm_recv() 136 if (!status.queue_empty && !aq->queue_count) in ap_sm_recv() 137 aq->queue_count++; in ap_sm_recv() 138 if (aq->queue_count > 0) in ap_sm_recv() 162 if (!status.queue_empty || aq->queue_count <= 0) in ap_sm_recv() 165 aq->queue_count = 0; in ap_sm_recv() 193 if (aq->queue_count > 0) { in ap_sm_read() 200 if (aq->queue_count > 0) in ap_sm_read() 239 aq->queue_count in ap_sm_write() [all...] |
/kernel/linux/linux-5.10/drivers/nvme/target/ |
H A D | loop.c | 222 BUG_ON(hctx_idx >= ctrl->ctrl.queue_count); in nvme_loop_init_hctx() 297 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_loop_destroy_io_queues() 301 ctrl->ctrl.queue_count = 1; in nvme_loop_destroy_io_queues() 323 ctrl->ctrl.queue_count++; in nvme_loop_init_io_queues() 337 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_loop_connect_io_queues() 367 ctrl->ctrl.queue_count = 1; in nvme_loop_configure_admin_queue() 423 if (ctrl->ctrl.queue_count > 1) { in nvme_loop_shutdown_ctrl() 488 ctrl->ctrl.queue_count - 1); in nvme_loop_reset_ctrl_work() 536 ctrl->tag_set.nr_hw_queues = ctrl->ctrl.queue_count - 1; in nvme_loop_create_io_queues()
|
/kernel/linux/linux-6.6/drivers/nvme/target/ |
H A D | loop.c | 224 BUG_ON(hctx_idx >= ctrl->ctrl.queue_count); in nvme_loop_init_hctx() 295 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_loop_destroy_io_queues() 299 ctrl->ctrl.queue_count = 1; in nvme_loop_destroy_io_queues() 321 ctrl->ctrl.queue_count++; in nvme_loop_init_io_queues() 335 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_loop_connect_io_queues() 353 ctrl->ctrl.queue_count = 1; in nvme_loop_configure_admin_queue() 396 if (ctrl->ctrl.queue_count > 1) { in nvme_loop_shutdown_ctrl() 457 ctrl->ctrl.queue_count - 1); in nvme_loop_reset_ctrl_work()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/freescale/dpaa2/ |
H A D | dpaa2-ethtool.c | 913 int queue_count = dpaa2_eth_queue_count(priv); in dpaa2_eth_get_channels() local 915 channels->max_rx = queue_count; in dpaa2_eth_get_channels() 916 channels->max_tx = queue_count; in dpaa2_eth_get_channels() 917 channels->rx_count = queue_count; in dpaa2_eth_get_channels() 918 channels->tx_count = queue_count; in dpaa2_eth_get_channels() 921 channels->max_other = queue_count + 1; in dpaa2_eth_get_channels() 926 channels->other_count = queue_count + 1; in dpaa2_eth_get_channels()
|
/kernel/linux/linux-5.10/drivers/nvme/host/ |
H A D | fc.c | 2290 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_fc_free_io_queues() 2311 struct nvme_fc_queue *queue = &ctrl->queues[ctrl->ctrl.queue_count - 1]; in nvme_fc_delete_hw_io_queues() 2314 for (i = ctrl->ctrl.queue_count - 1; i >= 1; i--, queue--) in nvme_fc_delete_hw_io_queues() 2324 for (i = 1; i < ctrl->ctrl.queue_count; i++, queue++) { in nvme_fc_create_hw_io_queues() 2343 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_fc_connect_io_queues() 2363 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_fc_init_io_queues() 2469 if (ctrl->ctrl.queue_count > 1) { in __nvme_fc_abort_outstanding_ios() 2470 for (q = 1; q < ctrl->ctrl.queue_count; q++) in __nvme_fc_abort_outstanding_ios() 2487 if (ctrl->ctrl.queue_count > 1) { in __nvme_fc_abort_outstanding_ios() 2871 ctrl->ctrl.queue_count in nvme_fc_create_io_queues() [all...] |
H A D | rdma.c | 327 BUG_ON(hctx_idx >= ctrl->ctrl.queue_count); in nvme_rdma_init_hctx() 670 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_rdma_free_io_queues() 678 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_rdma_stop_io_queues() 708 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_rdma_start_io_queues() 747 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_rdma_alloc_io_queues() 779 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_rdma_alloc_io_queues() 829 set->nr_hw_queues = nctrl->queue_count - 1; in nvme_rdma_alloc_tagset() 1008 ctrl->ctrl.queue_count - 1); in nvme_rdma_configure_io_queues() 1049 if (ctrl->ctrl.queue_count > 1) { in nvme_rdma_teardown_io_queues() 1146 if (ctrl->ctrl.queue_count > in nvme_rdma_setup_ctrl() [all...] |
H A D | tcp.c | 1658 set->nr_hw_queues = nctrl->queue_count - 1; in nvme_tcp_alloc_tagset() 1685 for (i = 1; i < ctrl->queue_count; i++) in nvme_tcp_free_io_queues() 1693 for (i = 1; i < ctrl->queue_count; i++) in nvme_tcp_stop_io_queues() 1701 for (i = 1; i < ctrl->queue_count; i++) { in nvme_tcp_start_io_queues() 1738 for (i = 1; i < ctrl->queue_count; i++) { in __nvme_tcp_alloc_io_queues() 1816 ctrl->queue_count = nr_io_queues + 1; in nvme_tcp_alloc_io_queues() 1875 ctrl->queue_count - 1); in nvme_tcp_configure_io_queues() 1991 if (ctrl->queue_count <= 1) in nvme_tcp_teardown_io_queues() 2053 if (ctrl->queue_count > 1) { in nvme_tcp_setup_ctrl() 2076 if (ctrl->queue_count > in nvme_tcp_setup_ctrl() [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_packet_manager.c | 44 unsigned int process_count, queue_count, compute_queue_count, gws_queue_count; in pm_calc_rlib_size() local 50 queue_count = pm->dqm->active_queue_count; in pm_calc_rlib_size() 74 queue_count * map_queue_size; in pm_calc_rlib_size()
|
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdkfd/ |
H A D | kfd_packet_manager.c | 45 unsigned int process_count, queue_count, compute_queue_count, gws_queue_count; in pm_calc_rlib_size() local 51 queue_count = pm->dqm->active_queue_count; in pm_calc_rlib_size() 75 queue_count * map_queue_size; in pm_calc_rlib_size()
|
/kernel/linux/linux-6.6/drivers/nvme/host/ |
H A D | fc.c | 2308 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_fc_free_io_queues() 2329 struct nvme_fc_queue *queue = &ctrl->queues[ctrl->ctrl.queue_count - 1]; in nvme_fc_delete_hw_io_queues() 2332 for (i = ctrl->ctrl.queue_count - 1; i >= 1; i--, queue--) in nvme_fc_delete_hw_io_queues() 2342 for (i = 1; i < ctrl->ctrl.queue_count; i++, queue++) { in nvme_fc_create_hw_io_queues() 2361 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_fc_connect_io_queues() 2381 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_fc_init_io_queues() 2482 if (ctrl->ctrl.queue_count > 1) { in __nvme_fc_abort_outstanding_ios() 2483 for (q = 1; q < ctrl->ctrl.queue_count; q++) in __nvme_fc_abort_outstanding_ios() 2500 if (ctrl->ctrl.queue_count > 1) { in __nvme_fc_abort_outstanding_ios() 2908 ctrl->ctrl.queue_count in nvme_fc_create_io_queues() [all...] |
H A D | rdma.c | 325 BUG_ON(hctx_idx >= ctrl->ctrl.queue_count); in nvme_rdma_init_hctx() 664 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_rdma_free_io_queues() 672 for (i = 1; i < ctrl->ctrl.queue_count; i++) in nvme_rdma_stop_io_queues() 733 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_rdma_alloc_io_queues() 738 for (i = 1; i < ctrl->ctrl.queue_count; i++) { in nvme_rdma_alloc_io_queues() 883 nr_queues = min(ctrl->tag_set.nr_hw_queues + 1, ctrl->ctrl.queue_count); in nvme_rdma_configure_io_queues() 902 ctrl->ctrl.queue_count - 1); in nvme_rdma_configure_io_queues() 947 if (ctrl->ctrl.queue_count > 1) { in nvme_rdma_teardown_io_queues() 1050 if (ctrl->ctrl.queue_count > 1) { in nvme_rdma_setup_ctrl() 1076 if (ctrl->ctrl.queue_count > in nvme_rdma_setup_ctrl() [all...] |
H A D | tcp.c | 1741 for (i = 1; i < ctrl->queue_count; i++) in nvme_tcp_free_io_queues() 1749 for (i = 1; i < ctrl->queue_count; i++) in nvme_tcp_stop_io_queues() 1795 for (i = 1; i < ctrl->queue_count; i++) { in __nvme_tcp_alloc_io_queues() 1826 ctrl->queue_count = nr_io_queues + 1; in nvme_tcp_alloc_io_queues() 1865 nr_queues = min(ctrl->tagset->nr_hw_queues + 1, ctrl->queue_count); in nvme_tcp_configure_io_queues() 1884 ctrl->queue_count - 1); in nvme_tcp_configure_io_queues() 1982 if (ctrl->queue_count <= 1) in nvme_tcp_teardown_io_queues() 2048 if (ctrl->queue_count > 1) { in nvme_tcp_setup_ctrl() 2073 if (ctrl->queue_count > 1) { in nvme_tcp_setup_ctrl() 2528 ctrl->ctrl.queue_count in nvme_tcp_create_ctrl() [all...] |
/kernel/linux/linux-5.10/drivers/net/ethernet/pensando/ionic/ |
H A D | ionic_debugfs.c | 70 (u32 *)&ionic->ident.lif.eth.config.queue_count[IONIC_QTYPE_TXQ]); in ionic_debugfs_add_sizes() 72 (u32 *)&ionic->ident.lif.eth.config.queue_count[IONIC_QTYPE_RXQ]); in ionic_debugfs_add_sizes()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/pensando/ionic/ |
H A D | ionic_debugfs.c | 70 (u32 *)&ionic->ident.lif.eth.config.queue_count[IONIC_QTYPE_TXQ]); in ionic_debugfs_add_sizes() 72 (u32 *)&ionic->ident.lif.eth.config.queue_count[IONIC_QTYPE_RXQ]); in ionic_debugfs_add_sizes()
|
/kernel/linux/linux-5.10/drivers/infiniband/sw/rxe/ |
H A D | rxe_queue.h | 136 static inline unsigned int queue_count(const struct rxe_queue *q) in queue_count() function
|
H A D | rxe_queue.c | 114 if (!queue_empty(q) && (num_elem < queue_count(q))) in resize_finish()
|
H A D | rxe_cq.c | 28 count = queue_count(cq->queue); in rxe_cq_chk_attr()
|
/kernel/linux/linux-6.6/drivers/infiniband/sw/rxe/ |
H A D | rxe_cq.c | 28 count = queue_count(cq->queue, QUEUE_TYPE_TO_CLIENT); in rxe_cq_chk_attr()
|
H A D | rxe_queue.c | 118 if (!queue_empty(q, q->type) && (num_elem < queue_count(q, type))) in resize_finish()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/intel/fm10k/ |
H A D | fm10k_pf.c | 501 u16 glort, queue_count, vsi_count, pc_count; in fm10k_configure_dglort_map_pf() local 516 queue_count = BIT(dglort->rss_l + dglort->pc_l); in fm10k_configure_dglort_map_pf() 523 for (queue = 0; queue < queue_count; queue++, q_idx++) { in fm10k_configure_dglort_map_pf() 533 queue_count = BIT(dglort->queue_l + dglort->rss_l + dglort->vsi_l); in fm10k_configure_dglort_map_pf() 539 for (queue = 0; queue < queue_count; queue++) { in fm10k_configure_dglort_map_pf()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/intel/fm10k/ |
H A D | fm10k_pf.c | 501 u16 glort, queue_count, vsi_count, pc_count; in fm10k_configure_dglort_map_pf() local 516 queue_count = BIT(dglort->rss_l + dglort->pc_l); in fm10k_configure_dglort_map_pf() 523 for (queue = 0; queue < queue_count; queue++, q_idx++) { in fm10k_configure_dglort_map_pf() 533 queue_count = BIT(dglort->queue_l + dglort->rss_l + dglort->vsi_l); in fm10k_configure_dglort_map_pf() 539 for (queue = 0; queue < queue_count; queue++) { in fm10k_configure_dglort_map_pf()
|
/kernel/linux/linux-5.10/drivers/scsi/hisi_sas/ |
H A D | hisi_sas_main.c | 677 int queue = i % hisi_hba->queue_count; in hisi_sas_alloc_dev() 2282 for (i = 0; i < hisi_hba->queue_count; i++) { in hisi_sas_init_mem() 2334 for (i = 0; i < hisi_hba->queue_count; i++) { in hisi_sas_alloc() 2550 &hisi_hba->queue_count)) { in hisi_sas_get_fw_info() 2710 for (i = 0; i < hisi_hba->queue_count; i++) in hisi_sas_debugfs_snapshot_cq_reg() 2722 for (i = 0; i < hisi_hba->queue_count; i++) { in hisi_sas_debugfs_snapshot_dq_reg() 3246 for (c = 0; c < hisi_hba->queue_count; c++) { in hisi_sas_debugfs_create_files() 3256 for (d = 0; d < hisi_hba->queue_count; d++) { in hisi_sas_debugfs_create_files() 3828 for (i = 0; i < hisi_hba->queue_count; i++) in hisi_sas_debugfs_release() 3831 for (i = 0; i < hisi_hba->queue_count; in hisi_sas_debugfs_release() [all...] |