Home
last modified time | relevance | path

Searched refs:wqe_index (Results 1 - 25 of 39) sorted by relevance

12

/kernel/linux/linux-6.6/drivers/infiniband/sw/rxe/
H A Drxe_req.c41 unsigned int wqe_index; in req_retry() local
52 qp->req.wqe_index = cons; in req_retry()
56 for (wqe_index = cons; wqe_index != prod; in req_retry()
57 wqe_index = queue_next_index(q, wqe_index)) { in req_retry()
58 wqe = queue_addr_from_index(qp->sq.queue, wqe_index); in req_retry()
127 index = qp->req.wqe_index; in req_check_sq_drain_done()
165 unsigned int index = qp->req.wqe_index; in __req_next_wqe()
213 return qp->req.wqe_index ! in rxe_wqe_is_fenced()
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/sw/rxe/
H A Drxe_req.c44 unsigned int wqe_index; in req_retry() local
49 qp->req.wqe_index = consumer_index(qp->sq.queue); in req_retry()
53 for (wqe_index = consumer_index(qp->sq.queue); in req_retry()
54 wqe_index != producer_index(qp->sq.queue); in req_retry()
55 wqe_index = next_index(qp->sq.queue, wqe_index)) { in req_retry()
56 wqe = addr_from_index(qp->sq.queue, wqe_index); in req_retry()
123 if (wqe && ((qp->req.wqe_index != in req_next_wqe()
147 if (qp->req.wqe_index == producer_index(qp->sq.queue)) in req_next_wqe()
150 wqe = addr_from_index(qp->sq.queue, qp->req.wqe_index); in req_next_wqe()
[all...]
H A Drxe_verbs.h117 int wqe_index; member
/kernel/linux/linux-5.10/drivers/infiniband/hw/mthca/
H A Dmthca_cq.c373 struct mthca_qp *qp, int wqe_index, int is_send, in handle_error_cqe()
461 mthca_free_err_wqe(dev, qp, is_send, wqe_index, &dbd, &new_wqe); in handle_error_cqe()
486 int wqe_index; in mthca_poll_one() local
535 wqe_index = ((be32_to_cpu(cqe->wqe) - (*cur_qp)->send_wqe_offset) in mthca_poll_one()
537 entry->wr_id = (*cur_qp)->wrid[wqe_index + in mthca_poll_one()
543 wqe_index = wqe >> srq->wqe_shift; in mthca_poll_one()
544 entry->wr_id = srq->wrid[wqe_index]; in mthca_poll_one()
550 wqe_index = wqe >> wq->wqe_shift; in mthca_poll_one()
556 if (unlikely(wqe_index < 0)) in mthca_poll_one()
557 wqe_index in mthca_poll_one()
372 handle_error_cqe(struct mthca_dev *dev, struct mthca_cq *cq, struct mthca_qp *qp, int wqe_index, int is_send, struct mthca_err_cqe *cqe, struct ib_wc *entry, int *free_cqe) handle_error_cqe() argument
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/mthca/
H A Dmthca_cq.c373 struct mthca_qp *qp, int wqe_index, int is_send, in handle_error_cqe()
461 mthca_free_err_wqe(dev, qp, is_send, wqe_index, &dbd, &new_wqe); in handle_error_cqe()
486 int wqe_index; in mthca_poll_one() local
535 wqe_index = ((be32_to_cpu(cqe->wqe) - (*cur_qp)->send_wqe_offset) in mthca_poll_one()
537 entry->wr_id = (*cur_qp)->wrid[wqe_index + in mthca_poll_one()
543 wqe_index = wqe >> srq->wqe_shift; in mthca_poll_one()
544 entry->wr_id = srq->wrid[wqe_index]; in mthca_poll_one()
550 wqe_index = wqe >> wq->wqe_shift; in mthca_poll_one()
556 if (unlikely(wqe_index < 0)) in mthca_poll_one()
557 wqe_index in mthca_poll_one()
372 handle_error_cqe(struct mthca_dev *dev, struct mthca_cq *cq, struct mthca_qp *qp, int wqe_index, int is_send, struct mthca_err_cqe *cqe, struct ib_wc *entry, int *free_cqe) handle_error_cqe() argument
[all...]
/kernel/linux/linux-5.10/include/linux/mlx4/
H A Dcq.h57 __be16 wqe_index; member
66 __be16 wqe_index; member
82 __be16 wqe_index; member
/kernel/linux/linux-6.6/include/linux/mlx4/
H A Dcq.h57 __be16 wqe_index; member
66 __be16 wqe_index; member
82 __be16 wqe_index; member
/kernel/linux/linux-5.10/drivers/net/ethernet/ibm/ehea/
H A Dehea_qmr.h308 int *wqe_index) in ehea_get_swqe()
313 *wqe_index = (queue->current_q_offset) >> (7 + EHEA_SG_SQ); in ehea_get_swqe()
325 static inline struct ehea_cqe *ehea_poll_rq1(struct ehea_qp *qp, int *wqe_index) in ehea_poll_rq1() argument
329 *wqe_index = (queue->current_q_offset) >> (7 + EHEA_SG_RQ1); in ehea_poll_rq1()
307 ehea_get_swqe(struct ehea_qp *my_qp, int *wqe_index) ehea_get_swqe() argument
H A Dehea_main.c589 int arr_len, int wqe_index) in get_skb_by_index_ll()
595 x = wqe_index + 1; in get_skb_by_index_ll()
608 skb = skb_array[wqe_index]; in get_skb_by_index_ll()
609 skb_array[wqe_index] = NULL; in get_skb_by_index_ll()
665 int wqe_index, last_wqe_index, rq, port_reset; in ehea_proc_rwqes() local
670 cqe = ehea_poll_rq1(qp, &wqe_index); in ehea_proc_rwqes()
678 last_wqe_index = wqe_index; in ehea_proc_rwqes()
685 wqe_index); in ehea_proc_rwqes()
737 cqe = ehea_poll_rq1(qp, &wqe_index); in ehea_proc_rwqes()
880 int wqe_index; in ehea_poll() local
588 get_skb_by_index_ll(struct sk_buff **skb_array, int arr_len, int wqe_index) get_skb_by_index_ll() argument
2476 int wqe_index; ehea_purge_sq() local
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/ibm/ehea/
H A Dehea_qmr.h308 int *wqe_index) in ehea_get_swqe()
313 *wqe_index = (queue->current_q_offset) >> (7 + EHEA_SG_SQ); in ehea_get_swqe()
325 static inline struct ehea_cqe *ehea_poll_rq1(struct ehea_qp *qp, int *wqe_index) in ehea_poll_rq1() argument
329 *wqe_index = (queue->current_q_offset) >> (7 + EHEA_SG_RQ1); in ehea_poll_rq1()
307 ehea_get_swqe(struct ehea_qp *my_qp, int *wqe_index) ehea_get_swqe() argument
H A Dehea_main.c593 int arr_len, int wqe_index) in get_skb_by_index_ll()
599 x = wqe_index + 1; in get_skb_by_index_ll()
612 skb = skb_array[wqe_index]; in get_skb_by_index_ll()
613 skb_array[wqe_index] = NULL; in get_skb_by_index_ll()
669 int wqe_index, last_wqe_index, rq, port_reset; in ehea_proc_rwqes() local
674 cqe = ehea_poll_rq1(qp, &wqe_index); in ehea_proc_rwqes()
682 last_wqe_index = wqe_index; in ehea_proc_rwqes()
689 wqe_index); in ehea_proc_rwqes()
741 cqe = ehea_poll_rq1(qp, &wqe_index); in ehea_proc_rwqes()
884 int wqe_index; in ehea_poll() local
592 get_skb_by_index_ll(struct sk_buff **skb_array, int arr_len, int wqe_index) get_skb_by_index_ll() argument
2480 int wqe_index; ehea_purge_sq() local
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/
H A Dodp.c62 u16 wqe_index; member
1132 u16 wqe_index = pfault->wqe.wqe_index; in mlx5_ib_mr_initiator_pfault_handler() local
1145 mlx5_ib_err(dev, "Got WQE with zero DS. wqe_index=%x, qpn=%x\n", in mlx5_ib_mr_initiator_pfault_handler()
1146 wqe_index, qpn); in mlx5_ib_mr_initiator_pfault_handler()
1270 u16 wqe_index = pfault->wqe.wqe_index; in mlx5_ib_mr_wqe_pfault_handler() local
1301 ret = mlx5_ib_read_wqe_sq(qp, wqe_index, wqe, PAGE_SIZE, in mlx5_ib_mr_wqe_pfault_handler()
1308 ret = mlx5_ib_read_wqe_rq(qp, wqe_index, wqe, PAGE_SIZE, in mlx5_ib_mr_wqe_pfault_handler()
1317 ret = mlx5_ib_read_wqe_srq(srq, wqe_index, wq in mlx5_ib_mr_wqe_pfault_handler()
[all...]
H A Dsrq.c409 void mlx5_ib_free_srq_wqe(struct mlx5_ib_srq *srq, int wqe_index) in mlx5_ib_free_srq_wqe() argument
417 next->next_wqe_index = cpu_to_be16(wqe_index); in mlx5_ib_free_srq_wqe()
418 srq->tail = wqe_index; in mlx5_ib_free_srq_wqe()
H A Dmlx5_ib.h1132 void mlx5_ib_free_srq_wqe(struct mlx5_ib_srq *srq, int wqe_index);
1160 int mlx5_ib_read_wqe_sq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer,
1162 int mlx5_ib_read_wqe_rq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer,
1164 int mlx5_ib_read_wqe_srq(struct mlx5_ib_srq *srq, int wqe_index, void *buffer,
H A Dqp.c94 * @wqe_index: index of WQE to copy from
107 size_t buflen, int wqe_index, in mlx5_ib_read_user_wqe_common()
112 size_t offset = wq_offset + ((wqe_index % wq_wqe_cnt) << wq_wqe_shift); in mlx5_ib_read_user_wqe_common()
133 static int mlx5_ib_read_kernel_wqe_sq(struct mlx5_ib_qp *qp, int wqe_index, in mlx5_ib_read_kernel_wqe_sq() argument
142 wqe_index = wqe_index & qp->sq.fbc.sz_m1; in mlx5_ib_read_kernel_wqe_sq()
145 p = mlx5_frag_buf_get_wqe(&qp->sq.fbc, wqe_index); in mlx5_ib_read_kernel_wqe_sq()
161 wqe_index = (wqe_index + 1) & qp->sq.fbc.sz_m1; in mlx5_ib_read_kernel_wqe_sq()
162 p = mlx5_frag_buf_get_wqe(&qp->sq.fbc, wqe_index); in mlx5_ib_read_kernel_wqe_sq()
106 mlx5_ib_read_user_wqe_common(struct ib_umem *umem, void *buffer, size_t buflen, int wqe_index, int wq_offset, int wq_wqe_cnt, int wq_wqe_shift, int bcnt, size_t *bytes_copied) mlx5_ib_read_user_wqe_common() argument
168 mlx5_ib_read_user_wqe_sq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_user_wqe_sq() argument
219 mlx5_ib_read_wqe_sq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_wqe_sq() argument
235 mlx5_ib_read_user_wqe_rq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_user_wqe_rq() argument
255 mlx5_ib_read_wqe_rq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_wqe_rq() argument
272 mlx5_ib_read_user_wqe_srq(struct mlx5_ib_srq *srq, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_user_wqe_srq() argument
289 mlx5_ib_read_wqe_srq(struct mlx5_ib_srq *srq, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_wqe_srq() argument
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx5/
H A Dodp.c64 u16 wqe_index; member
1059 u16 wqe_index = pfault->wqe.wqe_index; in mlx5_ib_mr_initiator_pfault_handler() local
1072 mlx5_ib_err(dev, "Got WQE with zero DS. wqe_index=%x, qpn=%x\n", in mlx5_ib_mr_initiator_pfault_handler()
1073 wqe_index, qpn); in mlx5_ib_mr_initiator_pfault_handler()
1197 u16 wqe_index = pfault->wqe.wqe_index; in mlx5_ib_mr_wqe_pfault_handler() local
1228 ret = mlx5_ib_read_wqe_sq(qp, wqe_index, wqe, PAGE_SIZE, in mlx5_ib_mr_wqe_pfault_handler()
1235 ret = mlx5_ib_read_wqe_rq(qp, wqe_index, wqe, PAGE_SIZE, in mlx5_ib_mr_wqe_pfault_handler()
1244 ret = mlx5_ib_read_wqe_srq(srq, wqe_index, wq in mlx5_ib_mr_wqe_pfault_handler()
[all...]
H A Dsrq.c388 void mlx5_ib_free_srq_wqe(struct mlx5_ib_srq *srq, int wqe_index) in mlx5_ib_free_srq_wqe() argument
396 next->next_wqe_index = cpu_to_be16(wqe_index); in mlx5_ib_free_srq_wqe()
397 srq->tail = wqe_index; in mlx5_ib_free_srq_wqe()
H A Dmlx5_ib.h1265 void mlx5_ib_free_srq_wqe(struct mlx5_ib_srq *srq, int wqe_index);
1292 int mlx5_ib_read_wqe_sq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer,
1294 int mlx5_ib_read_wqe_rq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer,
1296 int mlx5_ib_read_wqe_srq(struct mlx5_ib_srq *srq, int wqe_index, void *buffer,
H A Dqp.c107 * @wqe_index: index of WQE to copy from
120 size_t buflen, int wqe_index, in mlx5_ib_read_user_wqe_common()
125 size_t offset = wq_offset + ((wqe_index % wq_wqe_cnt) << wq_wqe_shift); in mlx5_ib_read_user_wqe_common()
146 static int mlx5_ib_read_kernel_wqe_sq(struct mlx5_ib_qp *qp, int wqe_index, in mlx5_ib_read_kernel_wqe_sq() argument
155 wqe_index = wqe_index & qp->sq.fbc.sz_m1; in mlx5_ib_read_kernel_wqe_sq()
158 p = mlx5_frag_buf_get_wqe(&qp->sq.fbc, wqe_index); in mlx5_ib_read_kernel_wqe_sq()
174 wqe_index = (wqe_index + 1) & qp->sq.fbc.sz_m1; in mlx5_ib_read_kernel_wqe_sq()
175 p = mlx5_frag_buf_get_wqe(&qp->sq.fbc, wqe_index); in mlx5_ib_read_kernel_wqe_sq()
119 mlx5_ib_read_user_wqe_common(struct ib_umem *umem, void *buffer, size_t buflen, int wqe_index, int wq_offset, int wq_wqe_cnt, int wq_wqe_shift, int bcnt, size_t *bytes_copied) mlx5_ib_read_user_wqe_common() argument
181 mlx5_ib_read_user_wqe_sq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_user_wqe_sq() argument
232 mlx5_ib_read_wqe_sq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_wqe_sq() argument
248 mlx5_ib_read_user_wqe_rq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_user_wqe_rq() argument
268 mlx5_ib_read_wqe_rq(struct mlx5_ib_qp *qp, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_wqe_rq() argument
285 mlx5_ib_read_user_wqe_srq(struct mlx5_ib_srq *srq, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_user_wqe_srq() argument
302 mlx5_ib_read_wqe_srq(struct mlx5_ib_srq *srq, int wqe_index, void *buffer, size_t buflen, size_t *bc) mlx5_ib_read_wqe_srq() argument
[all...]
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx4/
H A Den_tx.c401 u16 wqe_index; in mlx4_en_handle_err_cqe() local
409 wqe_index = be16_to_cpu(err_cqe->wqe_index) & ring->size_mask; in mlx4_en_handle_err_cqe()
410 tx_info = &ring->tx_info[wqe_index]; in mlx4_en_handle_err_cqe()
413 wqe_index, desc_size); in mlx4_en_handle_err_cqe()
414 tx_desc = ring->buf + (wqe_index << LOG_TXBB_SIZE); in mlx4_en_handle_err_cqe()
476 new_index = be16_to_cpu(cqe->wqe_index) & size_mask; in mlx4_en_process_tx_cq()
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx4/
H A Den_tx.c406 u16 wqe_index; in mlx4_en_handle_err_cqe() local
414 wqe_index = be16_to_cpu(err_cqe->wqe_index) & ring->size_mask; in mlx4_en_handle_err_cqe()
415 tx_info = &ring->tx_info[wqe_index]; in mlx4_en_handle_err_cqe()
418 wqe_index, desc_size); in mlx4_en_handle_err_cqe()
419 tx_desc = ring->buf + (wqe_index << LOG_TXBB_SIZE); in mlx4_en_handle_err_cqe()
481 new_index = be16_to_cpu(cqe->wqe_index) & size_mask; in mlx4_en_process_tx_cq()
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx4/
H A Dsrq.c289 void mlx4_ib_free_srq_wqe(struct mlx4_ib_srq *srq, int wqe_index) in mlx4_ib_free_srq_wqe() argument
297 next->next_wqe_index = cpu_to_be16(wqe_index); in mlx4_ib_free_srq_wqe()
298 srq->tail = wqe_index; in mlx4_ib_free_srq_wqe()
H A Dcq.c517 be32_to_cpu(cqe->my_qpn), be16_to_cpu(cqe->wqe_index), in mlx4_ib_handle_error_cqe()
734 wqe_ctr = be16_to_cpu(cqe->wqe_index); in mlx4_ib_poll_one()
741 wqe_ctr = be16_to_cpu(cqe->wqe_index); in mlx4_ib_poll_one()
746 wqe_ctr = be16_to_cpu(cqe->wqe_index); in mlx4_ib_poll_one()
944 mlx4_ib_free_srq_wqe(srq, be16_to_cpu(cqe->wqe_index)); in __mlx4_ib_cq_clean()
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx4/
H A Dsrq.c293 void mlx4_ib_free_srq_wqe(struct mlx4_ib_srq *srq, int wqe_index) in mlx4_ib_free_srq_wqe() argument
301 next->next_wqe_index = cpu_to_be16(wqe_index); in mlx4_ib_free_srq_wqe()
302 srq->tail = wqe_index; in mlx4_ib_free_srq_wqe()
H A Dcq.c517 be32_to_cpu(cqe->my_qpn), be16_to_cpu(cqe->wqe_index), in mlx4_ib_handle_error_cqe()
734 wqe_ctr = be16_to_cpu(cqe->wqe_index); in mlx4_ib_poll_one()
741 wqe_ctr = be16_to_cpu(cqe->wqe_index); in mlx4_ib_poll_one()
746 wqe_ctr = be16_to_cpu(cqe->wqe_index); in mlx4_ib_poll_one()
944 mlx4_ib_free_srq_wqe(srq, be16_to_cpu(cqe->wqe_index)); in __mlx4_ib_cq_clean()

Completed in 40 milliseconds

12