Home
last modified time | relevance | path

Searched refs:wqe_size (Results 1 - 25 of 79) sorted by relevance

1234

/kernel/linux/linux-5.10/drivers/net/ethernet/huawei/hinic/
H A Dhinic_hw_qp.h182 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size,
186 unsigned int wqe_size, u16 *prod_idx);
188 void hinic_sq_return_wqe(struct hinic_sq *sq, unsigned int wqe_size);
192 unsigned int wqe_size);
196 unsigned int wqe_size, u16 *cons_idx);
200 unsigned int *wqe_size, u16 *cons_idx);
202 void hinic_sq_put_wqe(struct hinic_sq *sq, unsigned int wqe_size);
208 unsigned int wqe_size, u16 *prod_idx);
214 unsigned int wqe_size,
218 unsigned int wqe_size,
[all...]
H A Dhinic_hw_qp.c636 * @wqe_size: wqe size
639 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size, in hinic_sq_write_db() argument
645 prod_idx += ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_sq_write_db()
656 * @wqe_size: wqe size
662 unsigned int wqe_size, u16 *prod_idx) in hinic_sq_get_wqe()
664 struct hinic_hw_wqe *hw_wqe = hinic_get_wqe(sq->wq, wqe_size, in hinic_sq_get_wqe()
676 * @wqe_size: the size of the wqe
678 void hinic_sq_return_wqe(struct hinic_sq *sq, unsigned int wqe_size) in hinic_sq_return_wqe() argument
680 hinic_return_wqe(sq->wq, wqe_size); in hinic_sq_return_wqe()
689 * @wqe_size
661 hinic_sq_get_wqe(struct hinic_sq *sq, unsigned int wqe_size, u16 *prod_idx) hinic_sq_get_wqe() argument
691 hinic_sq_write_wqe(struct hinic_sq *sq, u16 prod_idx, struct hinic_sq_wqe *sq_wqe, struct sk_buff *skb, unsigned int wqe_size) hinic_sq_write_wqe() argument
715 hinic_sq_read_wqebb(struct hinic_sq *sq, struct sk_buff **skb, unsigned int *wqe_size, u16 *cons_idx) hinic_sq_read_wqebb() argument
753 hinic_sq_read_wqe(struct hinic_sq *sq, struct sk_buff **skb, unsigned int wqe_size, u16 *cons_idx) hinic_sq_read_wqe() argument
770 hinic_sq_put_wqe(struct hinic_sq *sq, unsigned int wqe_size) hinic_sq_put_wqe() argument
800 hinic_rq_get_wqe(struct hinic_rq *rq, unsigned int wqe_size, u16 *prod_idx) hinic_rq_get_wqe() argument
841 hinic_rq_read_wqe(struct hinic_rq *rq, unsigned int wqe_size, struct sk_buff **skb, u16 *cons_idx) hinic_rq_read_wqe() argument
876 hinic_rq_read_next_wqe(struct hinic_rq *rq, unsigned int wqe_size, struct sk_buff **skb, u16 *cons_idx) hinic_rq_read_next_wqe() argument
903 hinic_rq_put_wqe(struct hinic_rq *rq, u16 cons_idx, unsigned int wqe_size) hinic_rq_put_wqe() argument
[all...]
H A Dhinic_tx.c496 unsigned int wqe_size; in hinic_lb_xmit_frame() local
508 wqe_size = HINIC_SQ_WQE_SIZE(nr_sges); in hinic_lb_xmit_frame()
510 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_lb_xmit_frame()
514 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_lb_xmit_frame()
526 wqe_size = 0; in hinic_lb_xmit_frame()
532 hinic_sq_write_wqe(txq->sq, prod_idx, sq_wqe, skb, wqe_size); in hinic_lb_xmit_frame()
537 hinic_sq_write_db(txq->sq, prod_idx, wqe_size, 0); in hinic_lb_xmit_frame()
557 unsigned int wqe_size; in hinic_xmit_frame() local
589 wqe_size = HINIC_SQ_WQE_SIZE(nr_sges); in hinic_xmit_frame()
591 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, in hinic_xmit_frame()
668 unsigned int wqe_size; free_all_tx_skbs() local
704 unsigned int wqe_size; free_tx_poll() local
[all...]
H A Dhinic_hw_wq.h96 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size,
99 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size);
101 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size);
103 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size,
109 unsigned int wqe_size);
H A Dhinic_hw_wq.c736 * @wqe_size: wqe size
741 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_get_wqe() argument
749 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) >> wq->wqebb_size_shift; in hinic_get_wqe()
788 * @wqe_size: wqe size
790 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_return_wqe() argument
792 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_return_wqe()
802 * @wqe_size: wqe size
804 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_put_wqe() argument
806 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) in hinic_put_wqe()
817 * @wqe_size
822 hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size, u16 *cons_idx) hinic_read_wqe() argument
889 hinic_write_wqe(struct hinic_wq *wq, struct hinic_hw_wqe *wqe, unsigned int wqe_size) hinic_write_wqe() argument
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/huawei/hinic/
H A Dhinic_hw_qp.h181 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size,
185 unsigned int wqe_size, u16 *prod_idx);
187 void hinic_sq_return_wqe(struct hinic_sq *sq, unsigned int wqe_size);
191 unsigned int wqe_size);
195 unsigned int wqe_size, u16 *cons_idx);
199 unsigned int *wqe_size, u16 *cons_idx);
201 void hinic_sq_put_wqe(struct hinic_sq *sq, unsigned int wqe_size);
207 unsigned int wqe_size, u16 *prod_idx);
213 unsigned int wqe_size,
217 unsigned int wqe_size,
[all...]
H A Dhinic_hw_qp.c632 * @wqe_size: wqe size
635 void hinic_sq_write_db(struct hinic_sq *sq, u16 prod_idx, unsigned int wqe_size, in hinic_sq_write_db() argument
641 prod_idx += ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_sq_write_db()
652 * @wqe_size: wqe size
658 unsigned int wqe_size, u16 *prod_idx) in hinic_sq_get_wqe()
660 struct hinic_hw_wqe *hw_wqe = hinic_get_wqe(sq->wq, wqe_size, in hinic_sq_get_wqe()
672 * @wqe_size: the size of the wqe
674 void hinic_sq_return_wqe(struct hinic_sq *sq, unsigned int wqe_size) in hinic_sq_return_wqe() argument
676 hinic_return_wqe(sq->wq, wqe_size); in hinic_sq_return_wqe()
685 * @wqe_size
657 hinic_sq_get_wqe(struct hinic_sq *sq, unsigned int wqe_size, u16 *prod_idx) hinic_sq_get_wqe() argument
687 hinic_sq_write_wqe(struct hinic_sq *sq, u16 prod_idx, struct hinic_sq_wqe *sq_wqe, struct sk_buff *skb, unsigned int wqe_size) hinic_sq_write_wqe() argument
711 hinic_sq_read_wqebb(struct hinic_sq *sq, struct sk_buff **skb, unsigned int *wqe_size, u16 *cons_idx) hinic_sq_read_wqebb() argument
749 hinic_sq_read_wqe(struct hinic_sq *sq, struct sk_buff **skb, unsigned int wqe_size, u16 *cons_idx) hinic_sq_read_wqe() argument
766 hinic_sq_put_wqe(struct hinic_sq *sq, unsigned int wqe_size) hinic_sq_put_wqe() argument
796 hinic_rq_get_wqe(struct hinic_rq *rq, unsigned int wqe_size, u16 *prod_idx) hinic_rq_get_wqe() argument
837 hinic_rq_read_wqe(struct hinic_rq *rq, unsigned int wqe_size, struct sk_buff **skb, u16 *cons_idx) hinic_rq_read_wqe() argument
872 hinic_rq_read_next_wqe(struct hinic_rq *rq, unsigned int wqe_size, struct sk_buff **skb, u16 *cons_idx) hinic_rq_read_next_wqe() argument
899 hinic_rq_put_wqe(struct hinic_rq *rq, u16 cons_idx, unsigned int wqe_size) hinic_rq_put_wqe() argument
[all...]
H A Dhinic_tx.c498 unsigned int wqe_size; in hinic_lb_xmit_frame() local
510 wqe_size = HINIC_SQ_WQE_SIZE(nr_sges); in hinic_lb_xmit_frame()
512 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_lb_xmit_frame()
516 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, &prod_idx); in hinic_lb_xmit_frame()
528 wqe_size = 0; in hinic_lb_xmit_frame()
534 hinic_sq_write_wqe(txq->sq, prod_idx, sq_wqe, skb, wqe_size); in hinic_lb_xmit_frame()
539 hinic_sq_write_db(txq->sq, prod_idx, wqe_size, 0); in hinic_lb_xmit_frame()
559 unsigned int wqe_size; in hinic_xmit_frame() local
591 wqe_size = HINIC_SQ_WQE_SIZE(nr_sges); in hinic_xmit_frame()
593 sq_wqe = hinic_sq_get_wqe(txq->sq, wqe_size, in hinic_xmit_frame()
670 unsigned int wqe_size; free_all_tx_skbs() local
706 unsigned int wqe_size; free_tx_poll() local
[all...]
H A Dhinic_hw_wq.h96 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size,
99 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size);
101 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size);
103 struct hinic_hw_wqe *hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size,
109 unsigned int wqe_size);
H A Dhinic_hw_wq.c735 * @wqe_size: wqe size
740 struct hinic_hw_wqe *hinic_get_wqe(struct hinic_wq *wq, unsigned int wqe_size, in hinic_get_wqe() argument
748 num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) >> wq->wqebb_size_shift; in hinic_get_wqe()
787 * @wqe_size: wqe size
789 void hinic_return_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_return_wqe() argument
791 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) / wq->wqebb_size; in hinic_return_wqe()
801 * @wqe_size: wqe size
803 void hinic_put_wqe(struct hinic_wq *wq, unsigned int wqe_size) in hinic_put_wqe() argument
805 int num_wqebbs = ALIGN(wqe_size, wq->wqebb_size) in hinic_put_wqe()
816 * @wqe_size
821 hinic_read_wqe(struct hinic_wq *wq, unsigned int wqe_size, u16 *cons_idx) hinic_read_wqe() argument
888 hinic_write_wqe(struct hinic_wq *wq, struct hinic_hw_wqe *wqe, unsigned int wqe_size) hinic_write_wqe() argument
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/i40iw/
H A Di40iw_uk.c60 qp->sq_wrtrk_array[wqe_idx].wqe_size = I40IW_QP_WQE_MIN_SIZE; in i40iw_nop_1()
132 * @wqe_size: size of sq wqe
136 u8 wqe_size, in i40iw_qp_get_next_send_wqe()
155 if ((offset + wqe_size) > I40IW_QP_WQE_MAX_SIZE) { in i40iw_qp_get_next_send_wqe()
169 if (((*wqe_idx & 3) == 1) && (wqe_size == I40IW_WQE_SIZE_64)) { in i40iw_qp_get_next_send_wqe()
179 wqe_size / I40IW_QP_WQE_MIN_SIZE, ret_code); in i40iw_qp_get_next_send_wqe()
195 qp->sq_wrtrk_array[*wqe_idx].wqe_size = wqe_size; in i40iw_qp_get_next_send_wqe()
256 u8 wqe_size; in i40iw_rdma_write() local
270 ret_code = i40iw_fragcnt_to_wqesize_sq(op_info->num_lo_sges, &wqe_size); in i40iw_rdma_write()
134 i40iw_qp_get_next_send_wqe(struct i40iw_qp_uk *qp, u32 *wqe_idx, u8 wqe_size, u32 total_size, u64 wr_id ) i40iw_qp_get_next_send_wqe() argument
324 u8 wqe_size; i40iw_rdma_read() local
373 u8 wqe_size; i40iw_send() local
434 u8 wqe_size; i40iw_inline_rdma_write() local
509 u8 wqe_size; i40iw_inline_send() local
1160 i40iw_fragcnt_to_wqesize_sq(u32 frag_cnt, u8 *wqe_size) i40iw_fragcnt_to_wqesize_sq() argument
1191 i40iw_fragcnt_to_wqesize_rq(u32 frag_cnt, u8 *wqe_size) i40iw_fragcnt_to_wqesize_rq() argument
1220 i40iw_inline_data_size_to_wqesize(u32 data_size, u8 *wqe_size) i40iw_inline_data_size_to_wqesize() argument
[all...]
H A Di40iw_user.h328 u8 wqe_size; member
408 u8 wqe_size,
423 enum i40iw_status_code i40iw_fragcnt_to_wqesize_sq(u32 frag_cnt, u8 *wqe_size);
424 enum i40iw_status_code i40iw_fragcnt_to_wqesize_rq(u32 frag_cnt, u8 *wqe_size);
426 u8 *wqe_size);
/kernel/linux/linux-5.10/drivers/infiniband/hw/qedr/
H A Dqedr_hsi_rdma.h310 u8 wqe_size; member
338 u8 wqe_size; member
374 u8 wqe_size; member
420 u8 wqe_size; member
475 u8 wqe_size; member
498 u8 wqe_size; member
548 u8 wqe_size; member
602 u8 wqe_size; member
628 u8 wqe_size; member
663 u8 wqe_size; member
720 u8 wqe_size; global() member
743 u8 wqe_size; global() member
[all...]
H A Dverbs.c3290 struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_inline_data()
3327 (*wqe_size)++; in qedr_prepare_sq_inline_data()
3375 static u32 qedr_prepare_sq_sges(struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_sges() argument
3390 if (wqe_size) in qedr_prepare_sq_sges()
3391 *wqe_size += wr->num_sge; in qedr_prepare_sq_sges()
3412 return qedr_prepare_sq_inline_data(dev, qp, &rwqe->wqe_size, wr, in qedr_prepare_sq_rdma_data()
3416 return qedr_prepare_sq_sges(qp, &rwqe->wqe_size, wr); in qedr_prepare_sq_rdma_data()
3431 return qedr_prepare_sq_inline_data(dev, qp, &swqe->wqe_size, wr, in qedr_prepare_sq_send_data()
3435 return qedr_prepare_sq_sges(qp, &swqe->wqe_size, wr); in qedr_prepare_sq_send_data()
3588 swqe->wqe_size in __qedr_post_send()
3289 qedr_prepare_sq_inline_data(struct qedr_dev *dev, struct qedr_qp *qp, u8 *wqe_size, const struct ib_send_wr *wr, const struct ib_send_wr **bad_wr, u8 *bits, u8 bit) qedr_prepare_sq_inline_data() argument
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/qedr/
H A Dqedr_hsi_rdma.h310 u8 wqe_size; member
338 u8 wqe_size; member
374 u8 wqe_size; member
420 u8 wqe_size; member
475 u8 wqe_size; member
498 u8 wqe_size; member
548 u8 wqe_size; member
602 u8 wqe_size; member
628 u8 wqe_size; member
663 u8 wqe_size; member
720 u8 wqe_size; global() member
743 u8 wqe_size; global() member
[all...]
H A Dverbs.c3290 struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_inline_data()
3327 (*wqe_size)++; in qedr_prepare_sq_inline_data()
3375 static u32 qedr_prepare_sq_sges(struct qedr_qp *qp, u8 *wqe_size, in qedr_prepare_sq_sges() argument
3390 if (wqe_size) in qedr_prepare_sq_sges()
3391 *wqe_size += wr->num_sge; in qedr_prepare_sq_sges()
3412 return qedr_prepare_sq_inline_data(dev, qp, &rwqe->wqe_size, wr, in qedr_prepare_sq_rdma_data()
3416 return qedr_prepare_sq_sges(qp, &rwqe->wqe_size, wr); in qedr_prepare_sq_rdma_data()
3431 return qedr_prepare_sq_inline_data(dev, qp, &swqe->wqe_size, wr, in qedr_prepare_sq_send_data()
3435 return qedr_prepare_sq_sges(qp, &swqe->wqe_size, wr); in qedr_prepare_sq_send_data()
3588 swqe->wqe_size in __qedr_post_send()
3289 qedr_prepare_sq_inline_data(struct qedr_dev *dev, struct qedr_qp *qp, u8 *wqe_size, const struct ib_send_wr *wr, const struct ib_send_wr **bad_wr, u8 *bits, u8 bit) qedr_prepare_sq_inline_data() argument
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/sw/rxe/
H A Drxe_srq.c49 int wqe_size; in rxe_srq_from_init() local
59 wqe_size = sizeof(struct rxe_recv_wqe) + in rxe_srq_from_init()
65 q = rxe_queue_init(rxe, &srq->rq.max_wr, wqe_size, in rxe_srq_from_init()
157 int wqe_size; in rxe_srq_from_attr() local
167 wqe_size = sizeof(struct rxe_recv_wqe) + in rxe_srq_from_attr()
170 err = rxe_queue_resize(q, &attr->max_wr, wqe_size, in rxe_srq_from_attr()
H A Drxe_qp.c191 int wqe_size; in rxe_init_sq() local
195 wqe_size = max_t(int, init->cap.max_send_sge * sizeof(struct ib_sge), in rxe_init_sq()
197 qp->sq.max_sge = wqe_size / sizeof(struct ib_sge); in rxe_init_sq()
198 qp->sq.max_inline = wqe_size; in rxe_init_sq()
199 wqe_size += sizeof(struct rxe_send_wqe); in rxe_init_sq()
201 qp->sq.queue = rxe_queue_init(rxe, &qp->sq.max_wr, wqe_size, in rxe_init_sq()
284 int wqe_size; in rxe_init_rq() local
289 wqe_size = sizeof(struct rxe_recv_wqe) + in rxe_init_rq()
292 qp->rq.queue = rxe_queue_init(rxe, &qp->rq.max_wr, wqe_size, in rxe_init_rq()
/kernel/linux/linux-6.6/drivers/infiniband/hw/erdma/
H A Derdma_qp.c288 u32 wqe_size, wqebb_cnt, hw_op, flags, sgl_offset; in erdma_push_one_sqe() local
341 wqe_size = sizeof(struct erdma_write_sqe); in erdma_push_one_sqe()
342 sgl_offset = wqe_size; in erdma_push_one_sqe()
370 wqe_size = sizeof(struct erdma_readreq_sqe) + in erdma_push_one_sqe()
389 wqe_size = sizeof(struct erdma_send_sqe); in erdma_push_one_sqe()
390 sgl_offset = wqe_size; in erdma_push_one_sqe()
414 wqe_size = sizeof(struct erdma_reg_mr_sqe) + in erdma_push_one_sqe()
418 wqe_size = sizeof(struct erdma_reg_mr_sqe); in erdma_push_one_sqe()
428 wqe_size = sizeof(struct erdma_reg_mr_sqe); in erdma_push_one_sqe()
457 wqe_size in erdma_push_one_sqe()
[all...]
/kernel/linux/linux-6.6/drivers/net/ethernet/mellanox/mlx5/core/en/
H A Dtxrx.h109 static inline void *mlx5e_fetch_wqe(struct mlx5_wq_cyc *wq, u16 pi, size_t wqe_size) in mlx5e_fetch_wqe() argument
114 memset(wqe, 0, wqe_size); in mlx5e_fetch_wqe()
465 static inline u16 mlx5e_stop_room_for_wqe(struct mlx5_core_dev *mdev, u16 wqe_size) in mlx5e_stop_room_for_wqe() argument
477 WARN_ONCE(wqe_size > mlx5e_get_max_sq_wqebbs(mdev), in mlx5e_stop_room_for_wqe()
478 "wqe_size %u is greater than max SQ WQEBBs %u", in mlx5e_stop_room_for_wqe()
479 wqe_size, mlx5e_get_max_sq_wqebbs(mdev)); in mlx5e_stop_room_for_wqe()
481 return MLX5E_STOP_ROOM(wqe_size); in mlx5e_stop_room_for_wqe()
496 static inline bool mlx5e_icosq_can_post_wqe(struct mlx5e_icosq *sq, u16 wqe_size) in mlx5e_icosq_can_post_wqe() argument
498 u16 room = sq->reserved_room + MLX5E_STOP_ROOM(wqe_size); in mlx5e_icosq_can_post_wqe()
/kernel/linux/linux-5.10/drivers/net/ethernet/mellanox/mlx5/core/en/
H A Dtxrx.h73 static inline void *mlx5e_fetch_wqe(struct mlx5_wq_cyc *wq, u16 pi, size_t wqe_size) in mlx5e_fetch_wqe() argument
78 memset(wqe, 0, wqe_size); in mlx5e_fetch_wqe()
411 static inline u16 mlx5e_stop_room_for_wqe(u16 wqe_size) in mlx5e_stop_room_for_wqe() argument
424 if (__builtin_constant_p(wqe_size)) in mlx5e_stop_room_for_wqe()
425 BUILD_BUG_ON(wqe_size > MLX5_SEND_WQE_MAX_WQEBBS); in mlx5e_stop_room_for_wqe()
427 WARN_ON_ONCE(wqe_size > MLX5_SEND_WQE_MAX_WQEBBS); in mlx5e_stop_room_for_wqe()
429 return wqe_size * 2 - 1; in mlx5e_stop_room_for_wqe()
/kernel/linux/linux-5.10/drivers/infiniband/sw/rxe/
H A Drxe_qp.c207 int wqe_size; in rxe_qp_init_req() local
226 wqe_size = max_t(int, init->cap.max_send_sge * sizeof(struct ib_sge), in rxe_qp_init_req()
229 wqe_size / sizeof(struct ib_sge); in rxe_qp_init_req()
230 qp->sq.max_inline = init->cap.max_inline_data = wqe_size; in rxe_qp_init_req()
231 wqe_size += sizeof(struct rxe_send_wqe); in rxe_qp_init_req()
233 qp->sq.queue = rxe_queue_init(rxe, &qp->sq.max_wr, wqe_size); in rxe_qp_init_req()
270 int wqe_size; in rxe_qp_init_resp() local
276 wqe_size = rcv_wqe_size(qp->rq.max_sge); in rxe_qp_init_resp()
278 pr_debug("qp#%d max_wr = %d, max_sge = %d, wqe_size = %d\n", in rxe_qp_init_resp()
279 qp_num(qp), qp->rq.max_wr, qp->rq.max_sge, wqe_size); in rxe_qp_init_resp()
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/bnxt_re/
H A Dqplib_fp.h93 u16 wqe_size; member
252 u16 wqe_size; member
567 return (que->wqe_size * que->max_wqe) / sizeof(struct sq_sge); in bnxt_qplib_get_depth()
582 static inline u32 bnxt_qplib_set_rq_max_slot(u32 wqe_size) in bnxt_qplib_set_rq_max_slot() argument
584 return (wqe_size / sizeof(struct sq_sge)); in bnxt_qplib_set_rq_max_slot()
/kernel/linux/linux-6.6/drivers/infiniband/hw/bnxt_re/
H A Dqplib_fp.h95 u16 wqe_size; member
254 u16 wqe_size; member
575 return (que->wqe_size * que->max_wqe) / sizeof(struct sq_sge); in bnxt_qplib_get_depth()
590 static inline u32 bnxt_qplib_set_rq_max_slot(u32 wqe_size) in bnxt_qplib_set_rq_max_slot() argument
592 return (wqe_size / sizeof(struct sq_sge)); in bnxt_qplib_set_rq_max_slot()
/kernel/linux/linux-5.10/drivers/infiniband/hw/ocrdma/
H A Docrdma_verbs.c366 dev->attr.wqe_size) : 0; in _ocrdma_alloc_pd()
498 resp.wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
500 resp.dpp_wqe_size = dev->attr.wqe_size; in ocrdma_alloc_ucontext()
1921 const struct ib_send_wr *wr, u32 wqe_size) in ocrdma_build_inline_sges()
1942 wqe_size += roundup(hdr->total_len, OCRDMA_WQE_ALIGN_BYTES); in ocrdma_build_inline_sges()
1944 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
1949 wqe_size += (wr->num_sge * sizeof(struct ocrdma_sge)); in ocrdma_build_inline_sges()
1951 wqe_size += sizeof(struct ocrdma_sge); in ocrdma_build_inline_sges()
1954 hdr->cw |= ((wqe_size / OCRDMA_WQE_STRID in ocrdma_build_inline_sges()
1918 ocrdma_build_inline_sges(struct ocrdma_qp *qp, struct ocrdma_hdr_wqe *hdr, struct ocrdma_sge *sge, const struct ib_send_wr *wr, u32 wqe_size) ocrdma_build_inline_sges() argument
1963 u32 wqe_size = sizeof(*hdr); ocrdma_build_send() local
1983 u32 wqe_size = sizeof(*hdr) + sizeof(*ext_rw); ocrdma_build_write() local
2000 u32 wqe_size = ((wr->num_sge + 1) * sizeof(struct ocrdma_sge)) + ocrdma_build_read() local
2033 u32 wqe_size = sizeof(*fast_reg) + sizeof(*hdr); ocrdma_build_reg() local
2202 u32 wqe_size = 0; ocrdma_build_rqe() local
[all...]

Completed in 34 milliseconds

1234