/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlxbf_gige/ |
H A D | mlxbf_gige_rx.c | 84 size_t wq_size, cq_size; in mlxbf_gige_rx_init() local 94 wq_size = MLXBF_GIGE_RX_WQE_SZ * priv->rx_q_entries; in mlxbf_gige_rx_init() 95 priv->rx_wqe_base = dma_alloc_coherent(priv->dev, wq_size, in mlxbf_gige_rx_init() 170 dma_free_coherent(priv->dev, wq_size, in mlxbf_gige_rx_init()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/huawei/hinic/ |
H A D | hinic_hw_io.c | 378 size_t qps_size, wq_size, db_size; in hinic_io_create_qps() local 387 wq_size = num_qps * sizeof(*func_to_io->sq_wq); in hinic_io_create_qps() 388 func_to_io->sq_wq = devm_kzalloc(&pdev->dev, wq_size, GFP_KERNEL); in hinic_io_create_qps() 394 wq_size = num_qps * sizeof(*func_to_io->rq_wq); in hinic_io_create_qps() 395 func_to_io->rq_wq = devm_kzalloc(&pdev->dev, wq_size, GFP_KERNEL); in hinic_io_create_qps()
|
/kernel/linux/linux-5.10/include/uapi/rdma/ |
H A D | i40iw-abi.h | 53 __u32 wq_size; /* size of the WQs (sq+rq) allocated to the mmaped area */ member
|
/kernel/linux/patches/linux-5.10/prebuilts/usr/include/rdma/ |
H A D | i40iw-abi.h | 31 __u32 wq_size; member
|
/kernel/linux/patches/linux-4.19/prebuilts/usr/include/rdma/ |
H A D | i40iw-abi.h | 18 __u32 wq_size; member
|
H A D | nes-abi.h | 19 __u32 wq_size; member
|
/kernel/linux/patches/linux-6.6/prebuilts/usr/include/rdma/ |
H A D | i40iw-abi.h | 31 __u32 wq_size; member
|
/kernel/linux/linux-6.6/include/uapi/rdma/ |
H A D | irdma-abi.h | 40 __u32 wq_size; /* size of the WQs (SQ+RQ) in the mmaped area */ member
|
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/gt/uc/ |
H A D | intel_guc_fwif.h | 235 u32 wq_size; member 260 u32 wq_size; member
|
H A D | intel_guc_submission.c | 2304 action[len++] = info->wq_size; in __guc_action_register_multi_lrc_v70() 2355 info->wq_size, in __guc_action_register_context_v70() 2631 desc->wq_size = WQ_SIZE; in prepare_context_registration_info_v69() 2637 pdesc->wq_size_bytes = desc->wq_size; in prepare_context_registration_info_v69() 2708 info->wq_size = WQ_SIZE; in prepare_context_registration_info_v70()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/en/ |
H A D | params.c | 888 int wq_size = BIT(mlx5e_mpwqe_get_log_rq_size(mdev, params, xsk)); in mlx5e_shampo_get_log_cq_size() local 894 return order_base_2((wqe_size / rsrv_size) * wq_size * (pkt_per_rsrv + 1)); in mlx5e_shampo_get_log_cq_size() 1120 int wq_size = BIT(MLX5_GET(wq, wqc, log_wq_sz)); in mlx5e_shampo_hd_per_wq() local 1124 hd_per_wq = roundup_pow_of_two(hd_per_wqe * wq_size); in mlx5e_shampo_hd_per_wq() 1134 int wq_size = BIT(MLX5_GET(wq, wqc, log_wq_sz)); in mlx5e_shampo_icosq_sz() local 1144 wqebbs *= wq_size; in mlx5e_shampo_icosq_sz()
|
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/ |
H A D | en.h | 179 static inline u16 mlx5_min_rx_wqes(int wq_type, u32 wq_size) in mlx5_min_rx_wqes() argument 184 wq_size / 2); in mlx5_min_rx_wqes() 187 wq_size / 2); in mlx5_min_rx_wqes()
|
/kernel/linux/linux-5.10/drivers/dma/idxd/ |
H A D | registers.h | 294 u16 wq_size; member
|
H A D | sysfs.c | 894 int wq_size = 0; in total_claimed_wq_size() local 899 wq_size += wq->size; in total_claimed_wq_size() 902 return wq_size; in total_claimed_wq_size()
|
H A D | device.c | 578 wq->wqcfg->wq_size = wq->size; in idxd_wq_config_write()
|
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/gt/uc/ |
H A D | intel_guc_fwif.h | 198 u32 wq_size; member
|
H A D | intel_guc_submission.c | 158 desc->wq_size = GUC_WQ_SIZE; in guc_stage_desc_init()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/ |
H A D | en.h | 179 static inline u16 mlx5_min_rx_wqes(int wq_type, u32 wq_size) in mlx5_min_rx_wqes() argument 184 wq_size / 2); in mlx5_min_rx_wqes() 187 wq_size / 2); in mlx5_min_rx_wqes()
|
/kernel/linux/linux-6.6/include/net/mana/ |
H A D | mana.h | 560 u32 wq_size; member
|
/kernel/linux/linux-6.6/drivers/dma/idxd/ |
H A D | registers.h | 364 u16 wq_size; member
|
H A D | sysfs.c | 718 int wq_size = 0; in total_claimed_wq_size() local 723 wq_size += wq->size; in total_claimed_wq_size() 726 return wq_size; in total_claimed_wq_size()
|
H A D | device.c | 929 wq->wqcfg->wq_size = wq->size; in idxd_wq_config_write() 1137 wq->size = wq->wqcfg->wq_size; in idxd_wq_load_config()
|
/kernel/linux/linux-6.6/drivers/net/ethernet/microsoft/mana/ |
H A D | gdma_main.c | 1008 u32 wq_size = wq->queue_size; in mana_gd_wq_avail_space() local 1010 WARN_ON_ONCE(used_space > wq_size); in mana_gd_wq_avail_space() 1012 return wq_size - used_space; in mana_gd_wq_avail_space()
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/ |
H A D | qp.c | 355 int wq_size; in set_rq_size() local 390 wq_size = roundup_pow_of_two(cap->max_recv_wr) * wqe_size; in set_rq_size() 391 wq_size = max_t(int, wq_size, MLX5_SEND_WQE_BB); in set_rq_size() 392 qp->rq.wqe_cnt = wq_size / wqe_size; in set_rq_size() 513 int wq_size; in calc_sq_size() local 533 wq_size = roundup_pow_of_two(attr->cap.max_send_wr * wqe_size); in calc_sq_size() 534 qp->sq.wqe_cnt = wq_size / MLX5_SEND_WQE_BB; in calc_sq_size() 548 qp->sq.max_post = wq_size / wqe_size; in calc_sq_size() 551 return wq_size; in calc_sq_size() [all...] |
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx5/ |
H A D | qp.c | 437 int wq_size; in set_rq_size() local 472 wq_size = roundup_pow_of_two(cap->max_recv_wr) * wqe_size; in set_rq_size() 473 wq_size = max_t(int, wq_size, MLX5_SEND_WQE_BB); in set_rq_size() 474 qp->rq.wqe_cnt = wq_size / wqe_size; in set_rq_size() 595 int wq_size; in calc_sq_size() local 615 wq_size = roundup_pow_of_two(attr->cap.max_send_wr * wqe_size); in calc_sq_size() 616 qp->sq.wqe_cnt = wq_size / MLX5_SEND_WQE_BB; in calc_sq_size() 630 qp->sq.max_post = wq_size / wqe_size; in calc_sq_size() 633 return wq_size; in calc_sq_size() [all...] |