Home
last modified time | relevance | path

Searched refs:mcq (Results 1 - 25 of 73) sorted by relevance

123

/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx4/
H A Den_cq.c98 cq->mcq.set_ci_db = cq->wqres.db.db; in mlx4_en_activate_cq()
99 cq->mcq.arm_db = cq->wqres.db.db + 1; in mlx4_en_activate_cq()
100 *cq->mcq.set_ci_db = 0; in mlx4_en_activate_cq()
101 *cq->mcq.arm_db = 0; in mlx4_en_activate_cq()
140 cq->mcq.usage = MLX4_RES_USAGE_DRIVER; in mlx4_en_activate_cq()
142 &mdev->priv_uar, cq->wqres.db.dma, &cq->mcq, in mlx4_en_activate_cq()
147 cq->mcq.event = mlx4_en_cq_event; in mlx4_en_activate_cq()
151 cq->mcq.comp = mlx4_en_tx_irq; in mlx4_en_activate_cq()
157 cq->mcq.comp = mlx4_en_rx_irq; in mlx4_en_activate_cq()
199 mlx4_cq_free(priv->mdev->dev, &cq->mcq); in mlx4_en_deactivate_cq()
[all...]
H A Dcq.c63 struct mlx4_cq *mcq, *temp; in mlx4_cq_tasklet_cb() local
69 list_for_each_entry_safe(mcq, temp, &ctx->process_list, tasklet_ctx.list) { in mlx4_cq_tasklet_cb()
70 list_del_init(&mcq->tasklet_ctx.list); in mlx4_cq_tasklet_cb()
71 mcq->tasklet_ctx.comp(mcq); in mlx4_cq_tasklet_cb()
72 if (refcount_dec_and_test(&mcq->refcount)) in mlx4_cq_tasklet_cb()
73 complete(&mcq->free); in mlx4_cq_tasklet_cb()
H A Den_rx.c341 ring->cqn = priv->rx_cq[ring_ind]->mcq.cqn; in mlx4_en_activate_rx_rings()
692 index = cq->mcq.cons_index & ring->size_mask; in mlx4_en_process_rx_cq()
697 cq->mcq.cons_index & cq->size)) { in mlx4_en_process_rx_cq()
898 ++cq->mcq.cons_index; in mlx4_en_process_rx_cq()
899 index = (cq->mcq.cons_index) & ring->size_mask; in mlx4_en_process_rx_cq()
913 mlx4_cq_set_ci(&cq->mcq); in mlx4_en_process_rx_cq()
915 ring->cons = cq->mcq.cons_index; in mlx4_en_process_rx_cq()
925 void mlx4_en_rx_irq(struct mlx4_cq *mcq) in mlx4_en_rx_irq() argument
927 struct mlx4_en_cq *cq = container_of(mcq, struct mlx4_en_cq, mcq); in mlx4_en_rx_irq()
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx4/
H A Den_cq.c98 cq->mcq.set_ci_db = cq->wqres.db.db; in mlx4_en_activate_cq()
99 cq->mcq.arm_db = cq->wqres.db.db + 1; in mlx4_en_activate_cq()
100 *cq->mcq.set_ci_db = 0; in mlx4_en_activate_cq()
101 *cq->mcq.arm_db = 0; in mlx4_en_activate_cq()
138 cq->mcq.usage = MLX4_RES_USAGE_DRIVER; in mlx4_en_activate_cq()
140 &mdev->priv_uar, cq->wqres.db.dma, &cq->mcq, in mlx4_en_activate_cq()
145 cq->mcq.event = mlx4_en_cq_event; in mlx4_en_activate_cq()
149 cq->mcq.comp = mlx4_en_tx_irq; in mlx4_en_activate_cq()
154 cq->mcq.comp = mlx4_en_rx_irq; in mlx4_en_activate_cq()
196 mlx4_cq_free(priv->mdev->dev, &cq->mcq); in mlx4_en_deactivate_cq()
[all...]
H A Dcq.c63 struct mlx4_cq *mcq, *temp; in mlx4_cq_tasklet_cb() local
69 list_for_each_entry_safe(mcq, temp, &ctx->process_list, tasklet_ctx.list) { in mlx4_cq_tasklet_cb()
70 list_del_init(&mcq->tasklet_ctx.list); in mlx4_cq_tasklet_cb()
71 mcq->tasklet_ctx.comp(mcq); in mlx4_cq_tasklet_cb()
72 if (refcount_dec_and_test(&mcq->refcount)) in mlx4_cq_tasklet_cb()
73 complete(&mcq->free); in mlx4_cq_tasklet_cb()
H A Den_rx.c341 ring->cqn = priv->rx_cq[ring_ind]->mcq.cqn; in mlx4_en_activate_rx_rings()
740 index = cq->mcq.cons_index & ring->size_mask; in mlx4_en_process_rx_cq()
745 cq->mcq.cons_index & cq->size)) { in mlx4_en_process_rx_cq()
958 ++cq->mcq.cons_index; in mlx4_en_process_rx_cq()
959 index = (cq->mcq.cons_index) & ring->size_mask; in mlx4_en_process_rx_cq()
974 mlx4_cq_set_ci(&cq->mcq); in mlx4_en_process_rx_cq()
976 ring->cons = cq->mcq.cons_index; in mlx4_en_process_rx_cq()
985 void mlx4_en_rx_irq(struct mlx4_cq *mcq) in mlx4_en_rx_irq() argument
987 struct mlx4_en_cq *cq = container_of(mcq, struct mlx4_en_cq, mcq); in mlx4_en_rx_irq()
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx4/
H A Dcq.c90 return get_sw_cqe(cq, cq->mcq.cons_index); in next_cqe_sw()
95 struct mlx4_ib_cq *mcq = to_mcq(cq); in mlx4_ib_modify_cq() local
98 return mlx4_cq_modify(dev->dev, &mcq->mcq, cq_count, cq_period); in mlx4_ib_modify_cq()
223 cq->mcq.usage = MLX4_RES_USAGE_USER_VERBS; in mlx4_ib_create_cq()
229 cq->mcq.set_ci_db = cq->db.db; in mlx4_ib_create_cq()
230 cq->mcq.arm_db = cq->db.db + 1; in mlx4_ib_create_cq()
231 *cq->mcq.set_ci_db = 0; in mlx4_ib_create_cq()
232 *cq->mcq.arm_db = 0; in mlx4_ib_create_cq()
241 cq->mcq in mlx4_ib_create_cq()
480 struct mlx4_ib_cq *mcq = to_mcq(cq); mlx4_ib_destroy_cq() local
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx4/
H A Dcq.c90 return get_sw_cqe(cq, cq->mcq.cons_index); in next_cqe_sw()
95 struct mlx4_ib_cq *mcq = to_mcq(cq); in mlx4_ib_modify_cq() local
98 return mlx4_cq_modify(dev->dev, &mcq->mcq, cq_count, cq_period); in mlx4_ib_modify_cq()
223 cq->mcq.usage = MLX4_RES_USAGE_USER_VERBS; in mlx4_ib_create_cq()
229 cq->mcq.set_ci_db = cq->db.db; in mlx4_ib_create_cq()
230 cq->mcq.arm_db = cq->db.db + 1; in mlx4_ib_create_cq()
231 *cq->mcq.set_ci_db = 0; in mlx4_ib_create_cq()
232 *cq->mcq.arm_db = 0; in mlx4_ib_create_cq()
241 cq->mcq in mlx4_ib_create_cq()
480 struct mlx4_ib_cq *mcq = to_mcq(cq); mlx4_ib_destroy_cq() local
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/
H A Den_dim.c38 struct mlx5_core_dev *mdev, struct mlx5_core_cq *mcq) in mlx5e_complete_dim_work()
40 mlx5_core_modify_cq_moderation(mdev, mcq, moder.usec, moder.pkts); in mlx5e_complete_dim_work()
51 mlx5e_complete_dim_work(dim, cur_moder, rq->mdev, &rq->cq.mcq); in mlx5e_rx_dim_work()
61 mlx5e_complete_dim_work(dim, cur_moder, sq->cq.mdev, &sq->cq.mcq); in mlx5e_tx_dim_work()
37 mlx5e_complete_dim_work(struct dim *dim, struct dim_cq_moder moder, struct mlx5_core_dev *mdev, struct mlx5_core_cq *mcq) mlx5e_complete_dim_work() argument
H A Den_txrx.c220 void mlx5e_completion_event(struct mlx5_core_cq *mcq, struct mlx5_eqe *eqe) in mlx5e_completion_event() argument
222 struct mlx5e_cq *cq = container_of(mcq, struct mlx5e_cq, mcq); in mlx5e_completion_event()
229 void mlx5e_cq_error_event(struct mlx5_core_cq *mcq, enum mlx5_event event) in mlx5e_cq_error_event() argument
231 struct mlx5e_cq *cq = container_of(mcq, struct mlx5e_cq, mcq); in mlx5e_cq_error_event()
236 __func__, mcq->cqn, event); in mlx5e_cq_error_event()
H A Dcq.c50 struct mlx5_core_cq *mcq; in mlx5_cq_tasklet_cb() local
57 list_for_each_entry_safe(mcq, temp, &ctx->process_list, in mlx5_cq_tasklet_cb()
59 list_del_init(&mcq->tasklet_ctx.list); in mlx5_cq_tasklet_cb()
60 mcq->tasklet_ctx.comp(mcq, NULL); in mlx5_cq_tasklet_cb()
61 mlx5_cq_put(mcq); in mlx5_cq_tasklet_cb()
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/
H A Den_dim.c38 struct mlx5_core_dev *mdev, struct mlx5_core_cq *mcq) in mlx5e_complete_dim_work()
40 mlx5_core_modify_cq_moderation(mdev, mcq, moder.usec, moder.pkts); in mlx5e_complete_dim_work()
51 mlx5e_complete_dim_work(dim, cur_moder, rq->mdev, &rq->cq.mcq); in mlx5e_rx_dim_work()
61 mlx5e_complete_dim_work(dim, cur_moder, sq->cq.mdev, &sq->cq.mcq); in mlx5e_tx_dim_work()
37 mlx5e_complete_dim_work(struct dim *dim, struct dim_cq_moder moder, struct mlx5_core_dev *mdev, struct mlx5_core_cq *mcq) mlx5e_complete_dim_work() argument
H A Den_txrx.c258 void mlx5e_completion_event(struct mlx5_core_cq *mcq, struct mlx5_eqe *eqe) in mlx5e_completion_event() argument
260 struct mlx5e_cq *cq = container_of(mcq, struct mlx5e_cq, mcq); in mlx5e_completion_event()
267 void mlx5e_cq_error_event(struct mlx5_core_cq *mcq, enum mlx5_event event) in mlx5e_cq_error_event() argument
269 struct mlx5e_cq *cq = container_of(mcq, struct mlx5e_cq, mcq); in mlx5e_cq_error_event()
273 __func__, mcq->cqn, event); in mlx5e_cq_error_event()
H A Dcq.c49 struct mlx5_core_cq *mcq; in mlx5_cq_tasklet_cb() local
56 list_for_each_entry_safe(mcq, temp, &ctx->process_list, in mlx5_cq_tasklet_cb()
58 list_del_init(&mcq->tasklet_ctx.list); in mlx5_cq_tasklet_cb()
59 mcq->tasklet_ctx.comp(mcq, NULL); in mlx5_cq_tasklet_cb()
60 mlx5_cq_put(mcq); in mlx5_cq_tasklet_cb()
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/
H A Dcq.c48 static void mlx5_ib_cq_event(struct mlx5_core_cq *mcq, enum mlx5_event type) in mlx5_ib_cq_event() argument
50 struct mlx5_ib_cq *cq = container_of(mcq, struct mlx5_ib_cq, mcq); in mlx5_ib_cq_event()
57 type, mcq->cqn); in mlx5_ib_cq_event()
84 cqe64 = (cq->mcq.cqe_sz == 64) ? cqe : cqe + 64; in get_sw_cqe()
96 return get_sw_cqe(cq, cq->mcq.cons_index); in next_cqe_sw()
461 cqe64 = (cq->mcq.cqe_sz == 64) ? cqe : cqe + 64; in mlx5_poll_one()
463 ++cq->mcq.cons_index; in mlx5_poll_one()
520 "Requestor" : "Responder", cq->mcq.cqn); in mlx5_poll_one()
557 cq->mcq in mlx5_poll_one()
1027 struct mlx5_ib_cq *mcq = to_mcq(cq); mlx5_ib_destroy_cq() local
1110 struct mlx5_ib_cq *mcq = to_mcq(cq); mlx5_ib_modify_cq() local
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx5/
H A Dcq.c48 static void mlx5_ib_cq_event(struct mlx5_core_cq *mcq, enum mlx5_event type) in mlx5_ib_cq_event() argument
50 struct mlx5_ib_cq *cq = container_of(mcq, struct mlx5_ib_cq, mcq); in mlx5_ib_cq_event()
57 type, mcq->cqn); in mlx5_ib_cq_event()
84 cqe64 = (cq->mcq.cqe_sz == 64) ? cqe : cqe + 64; in get_sw_cqe()
96 return get_sw_cqe(cq, cq->mcq.cons_index); in next_cqe_sw()
467 cqe64 = (cq->mcq.cqe_sz == 64) ? cqe : cqe + 64; in mlx5_poll_one()
469 ++cq->mcq.cons_index; in mlx5_poll_one()
526 "Requestor" : "Responder", cq->mcq.cqn); in mlx5_poll_one()
567 cq->mcq in mlx5_poll_one()
1052 struct mlx5_ib_cq *mcq = to_mcq(cq); mlx5_ib_destroy_cq() local
1135 struct mlx5_ib_cq *mcq = to_mcq(cq); mlx5_ib_modify_cq() local
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/lib/
H A Daso.c15 struct mlx5_core_cq mcq; member
48 struct mlx5_core_cq *mcq = &cq->mcq; in mlx5_aso_alloc_cq() local
60 mcq->cqe_sz = 64; in mlx5_aso_alloc_cq()
61 mcq->set_ci_db = cq->wq_ctrl.db.db; in mlx5_aso_alloc_cq()
62 mcq->arm_db = cq->wq_ctrl.db.db + 1; in mlx5_aso_alloc_cq()
79 struct mlx5_core_cq *mcq = &cq->mcq; in create_aso_cq() local
108 err = mlx5_core_create_cq(mdev, mcq, in, inlen, out, sizeof(out)); in create_aso_cq()
117 mlx5_core_destroy_cq(cq->mdev, &cq->mcq); in mlx5_aso_destroy_cq()
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/fpga/
H A Dconn.c361 mlx5_cq_arm(&conn->cq.mcq, MLX5_CQ_DB_REQ_NOT, in mlx5_fpga_conn_arm_cq()
400 static void mlx5_fpga_conn_cq_complete(struct mlx5_core_cq *mcq, in mlx5_fpga_conn_cq_complete() argument
405 conn = container_of(mcq, struct mlx5_fpga_conn, cq.mcq); in mlx5_fpga_conn_cq_complete()
465 err = mlx5_core_create_cq(mdev, &conn->cq.mcq, in, inlen, out, sizeof(out)); in mlx5_fpga_conn_create_cq()
471 conn->cq.mcq.cqe_sz = 64; in mlx5_fpga_conn_create_cq()
472 conn->cq.mcq.set_ci_db = conn->cq.wq_ctrl.db.db; in mlx5_fpga_conn_create_cq()
473 conn->cq.mcq.arm_db = conn->cq.wq_ctrl.db.db + 1; in mlx5_fpga_conn_create_cq()
474 *conn->cq.mcq.set_ci_db = 0; in mlx5_fpga_conn_create_cq()
475 *conn->cq.mcq in mlx5_fpga_conn_create_cq()
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/fpga/
H A Dconn.c361 mlx5_cq_arm(&conn->cq.mcq, MLX5_CQ_DB_REQ_NOT, in mlx5_fpga_conn_arm_cq()
400 static void mlx5_fpga_conn_cq_complete(struct mlx5_core_cq *mcq, in mlx5_fpga_conn_cq_complete() argument
405 conn = container_of(mcq, struct mlx5_fpga_conn, cq.mcq); in mlx5_fpga_conn_cq_complete()
465 err = mlx5_core_create_cq(mdev, &conn->cq.mcq, in, inlen, out, sizeof(out)); in mlx5_fpga_conn_create_cq()
471 conn->cq.mcq.cqe_sz = 64; in mlx5_fpga_conn_create_cq()
472 conn->cq.mcq.set_ci_db = conn->cq.wq_ctrl.db.db; in mlx5_fpga_conn_create_cq()
473 conn->cq.mcq.arm_db = conn->cq.wq_ctrl.db.db + 1; in mlx5_fpga_conn_create_cq()
474 *conn->cq.mcq.set_ci_db = 0; in mlx5_fpga_conn_create_cq()
475 *conn->cq.mcq in mlx5_fpga_conn_create_cq()
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en/
H A Dtxrx.h44 void mlx5e_completion_event(struct mlx5_core_cq *mcq, struct mlx5_eqe *eqe);
45 void mlx5e_cq_error_event(struct mlx5_core_cq *mcq, enum mlx5_event event);
230 struct mlx5_core_cq *mcq; in mlx5e_cq_arm() local
232 mcq = &cq->mcq; in mlx5e_cq_arm()
233 mlx5_cq_arm(mcq, MLX5_CQ_DB_REQ_NOT, mcq->uar->map, cq->wq.cc); in mlx5e_cq_arm()
313 cq->mcq.cqn, ci, qn, in mlx5e_dump_error_cqe()
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/en/
H A Dtxrx.h62 void mlx5e_completion_event(struct mlx5_core_cq *mcq, struct mlx5_eqe *eqe);
63 void mlx5e_cq_error_event(struct mlx5_core_cq *mcq, enum mlx5_event event);
278 struct mlx5_core_cq *mcq; in mlx5e_cq_arm() local
280 mcq = &cq->mcq; in mlx5e_cq_arm()
281 mlx5_cq_arm(mcq, MLX5_CQ_DB_REQ_NOT, mcq->uar->map, cq->wq.cc); in mlx5e_cq_arm()
365 cq->mcq.cqn, ci, qn, in mlx5e_dump_error_cqe()
/kernel/linux/linux-6.6/drivers/vfio/pci/mlx5/
H A Dcmd.c890 mlx5_core_destroy_cq(mdev, &cq->mcq); in mlx5vf_destroy_cq()
895 static void mlx5vf_cq_event(struct mlx5_core_cq *mcq, enum mlx5_event type) in mlx5vf_cq_event() argument
900 set_tracker_error(container_of(mcq, struct mlx5vf_pci_core_device, in mlx5vf_cq_event()
901 tracker.cq.mcq)); in mlx5vf_cq_event()
936 static void mlx5vf_cq_complete(struct mlx5_core_cq *mcq, in mlx5vf_cq_complete() argument
940 container_of(mcq, struct mlx5vf_pci_core_device, in mlx5vf_cq_complete()
941 tracker.cq.mcq); in mlx5vf_cq_complete()
965 cq->mcq.set_ci_db = cq->db.db; in mlx5vf_create_cq()
966 cq->mcq.arm_db = cq->db.db + 1; in mlx5vf_create_cq()
967 cq->mcq in mlx5vf_create_cq()
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/steering/
H A Ddr_send.c699 static void dr_cq_complete(struct mlx5_core_cq *mcq, in dr_cq_complete() argument
702 pr_err("CQ completion CQ: #%u\n", mcq->cqn); in dr_cq_complete()
764 cq->mcq.comp = dr_cq_complete; in dr_create_cq()
766 err = mlx5_core_create_cq(mdev, &cq->mcq, in, inlen, out, sizeof(out)); in dr_create_cq()
772 cq->mcq.cqe_sz = 64; in dr_create_cq()
773 cq->mcq.set_ci_db = cq->wq_ctrl.db.db; in dr_create_cq()
774 cq->mcq.arm_db = cq->wq_ctrl.db.db + 1; in dr_create_cq()
775 *cq->mcq.set_ci_db = 0; in dr_create_cq()
780 *cq->mcq.arm_db = cpu_to_be32(2 << 28); in dr_create_cq()
782 cq->mcq in dr_create_cq()
[all...]
/kernel/linux/linux-5.10/drivers/vdpa/mlx5/net/
H A Dmlx5_vnet.c54 struct mlx5_core_cq mcq; member
356 MLX5_SET(qpc, qpc, cqn_rcv, mvq->cq.mcq.cqn); in qp_prepare()
452 return get_sw_cqe(cq, cq->mcq.cons_index); in next_cqe_sw()
463 vcq->mcq.cons_index++; in mlx5_vdpa_poll_one()
469 mlx5_cq_set_ci(&mvq->cq.mcq); in mlx5_vdpa_handle_completions()
480 static void mlx5_vdpa_cq_comp(struct mlx5_core_cq *mcq, struct mlx5_eqe *eqe) in mlx5_vdpa_cq_comp() argument
482 struct mlx5_vdpa_virtqueue *mvq = container_of(mcq, struct mlx5_vdpa_virtqueue, cq.mcq); in mlx5_vdpa_cq_comp()
504 mlx5_cq_arm(&mvq->cq.mcq, MLX5_CQ_DB_REQ_NOT, uar_page, mvq->cq.mcq in mlx5_vdpa_cq_comp()
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/steering/
H A Ddr_send.c1052 static void dr_cq_complete(struct mlx5_core_cq *mcq, in dr_cq_complete() argument
1055 pr_err("CQ completion CQ: #%u\n", mcq->cqn); in dr_cq_complete()
1117 cq->mcq.comp = dr_cq_complete; in dr_create_cq()
1119 err = mlx5_core_create_cq(mdev, &cq->mcq, in, inlen, out, sizeof(out)); in dr_create_cq()
1125 cq->mcq.cqe_sz = 64; in dr_create_cq()
1126 cq->mcq.set_ci_db = cq->wq_ctrl.db.db; in dr_create_cq()
1127 cq->mcq.arm_db = cq->wq_ctrl.db.db + 1; in dr_create_cq()
1128 *cq->mcq.set_ci_db = 0; in dr_create_cq()
1133 *cq->mcq.arm_db = cpu_to_be32(2 << 28); in dr_create_cq()
1135 cq->mcq in dr_create_cq()
[all...]

Completed in 22 milliseconds

123