Lines Matching refs:wqe
42 static u32 restart_sge(struct rvt_sge_state *ss, struct rvt_swqe *wqe,
47 len = ((psn - wqe->psn) & QIB_PSN_MASK) * pmtu;
48 return rvt_restart_sge(ss, wqe, len);
221 struct rvt_swqe *wqe;
251 wqe = rvt_get_swqe_ptr(qp, qp->s_last);
252 rvt_send_complete(qp, wqe, qp->s_last != qp->s_acked ?
275 wqe = rvt_get_swqe_ptr(qp, qp->s_cur);
296 if ((wqe->wr.send_flags & IB_SEND_FENCE) &&
302 qp->s_psn = wqe->psn;
309 len = wqe->length;
312 switch (wqe->wr.opcode) {
316 if (!rvt_rc_credit_avail(qp, wqe))
323 if (wqe->wr.opcode == IB_WR_SEND)
328 ohdr->u.imm_data = wqe->wr.ex.imm_data;
331 if (wqe->wr.send_flags & IB_SEND_SOLICITED)
344 if (!rvt_rc_credit_avail(qp, wqe))
348 cpu_to_be64(wqe->rdma_wr.remote_addr);
350 cpu_to_be32(wqe->rdma_wr.rkey);
358 if (wqe->rdma_wr.wr.opcode == IB_WR_RDMA_WRITE)
364 wqe->rdma_wr.wr.ex.imm_data;
366 if (wqe->rdma_wr.wr.send_flags & IB_SEND_SOLICITED)
391 cpu_to_be64(wqe->rdma_wr.remote_addr);
393 cpu_to_be32(wqe->rdma_wr.rkey);
420 if (wqe->atomic_wr.wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP) {
422 put_ib_ateth_swap(wqe->atomic_wr.swap,
424 put_ib_ateth_compare(wqe->atomic_wr.compare_add,
428 put_ib_ateth_swap(wqe->atomic_wr.compare_add,
432 put_ib_ateth_vaddr(wqe->atomic_wr.remote_addr,
435 wqe->atomic_wr.rkey);
447 qp->s_sge.sge = wqe->sg_list[0];
448 qp->s_sge.sg_list = wqe->sg_list + 1;
449 qp->s_sge.num_sge = wqe->wr.num_sge;
450 qp->s_sge.total_len = wqe->length;
451 qp->s_len = wqe->length;
457 if (wqe->wr.opcode == IB_WR_RDMA_READ)
458 qp->s_psn = wqe->lpsn + 1;
473 qp->s_len = restart_sge(&qp->s_sge, wqe, qp->s_psn, pmtu);
486 if (wqe->wr.opcode == IB_WR_SEND)
491 ohdr->u.imm_data = wqe->wr.ex.imm_data;
494 if (wqe->wr.send_flags & IB_SEND_SOLICITED)
512 qp->s_len = restart_sge(&qp->s_sge, wqe, qp->s_psn, pmtu);
525 if (wqe->wr.opcode == IB_WR_RDMA_WRITE)
530 ohdr->u.imm_data = wqe->wr.ex.imm_data;
532 if (wqe->wr.send_flags & IB_SEND_SOLICITED)
551 len = ((qp->s_psn - wqe->psn) & QIB_PSN_MASK) * pmtu;
553 cpu_to_be64(wqe->rdma_wr.remote_addr + len);
555 cpu_to_be32(wqe->rdma_wr.rkey);
556 ohdr->u.rc.reth.length = cpu_to_be32(wqe->length - len);
560 qp->s_psn = wqe->lpsn + 1;
569 delta = (((int) bth2 - (int) wqe->psn) << 8) >> 8;
738 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, n);
747 if (qib_cmp24(psn, wqe->psn) <= 0) {
753 opcode = wqe->wr.opcode;
761 wqe = rvt_get_swqe_ptr(qp, n);
762 diff = qib_cmp24(psn, wqe->psn);
774 opcode = wqe->wr.opcode;
822 struct rvt_swqe *wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
830 rvt_send_complete(qp, wqe, IB_WC_RETRY_EXC_ERR);
839 if (wqe->wr.opcode == IB_WR_RDMA_READ)
858 struct rvt_swqe *wqe;
863 wqe = rvt_get_swqe_ptr(qp, n);
864 if (qib_cmp24(psn, wqe->lpsn) <= 0) {
865 if (wqe->wr.opcode == IB_WR_RDMA_READ)
866 qp->s_sending_psn = wqe->lpsn + 1;
884 struct rvt_swqe *wqe;
918 wqe = rvt_get_swqe_ptr(qp, qp->s_last);
919 if (qib_cmp24(wqe->lpsn, qp->s_sending_psn) >= 0 &&
923 wqe,
924 ib_qib_wc_opcode[wqe->wr.opcode],
951 struct rvt_swqe *wqe,
959 if (qib_cmp24(wqe->lpsn, qp->s_sending_psn) < 0 ||
962 wqe,
963 ib_qib_wc_opcode[wqe->wr.opcode],
969 update_last_psn(qp, wqe->lpsn);
980 wqe = rvt_get_swqe_ptr(qp, qp->s_cur);
983 qp->s_psn = wqe->psn;
990 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
992 return wqe;
1011 struct rvt_swqe *wqe;
1025 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
1032 while ((diff = qib_cmp24(ack_psn, wqe->lpsn)) >= 0) {
1039 if (wqe->wr.opcode == IB_WR_RDMA_READ &&
1054 if ((wqe->wr.opcode == IB_WR_RDMA_READ &&
1056 ((wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP ||
1057 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD) &&
1076 if (wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP ||
1077 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD) {
1078 u64 *vaddr = wqe->sg_list[0].vaddr;
1082 (wqe->wr.opcode == IB_WR_RDMA_READ ||
1083 wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP ||
1084 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD)) {
1098 wqe = do_rc_completion(qp, wqe, ibp);
1195 rvt_send_complete(qp, wqe, status);
1226 struct rvt_swqe *wqe;
1231 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
1233 while (qib_cmp24(psn, wqe->lpsn) > 0) {
1234 if (wqe->wr.opcode == IB_WR_RDMA_READ ||
1235 wqe->wr.opcode == IB_WR_ATOMIC_CMP_AND_SWP ||
1236 wqe->wr.opcode == IB_WR_ATOMIC_FETCH_AND_ADD)
1238 wqe = do_rc_completion(qp, wqe, ibp);
1275 struct rvt_swqe *wqe;
1339 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
1355 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
1356 if (unlikely(wqe->wr.opcode != IB_WR_RDMA_READ))
1364 wqe, psn, pmtu);
1371 if (unlikely(wqe->wr.opcode != IB_WR_RDMA_READ))
1421 wqe = rvt_get_swqe_ptr(qp, qp->s_acked);
1423 wqe, psn, pmtu);
1430 if (unlikely(wqe->wr.opcode != IB_WR_RDMA_READ))
1466 rvt_send_complete(qp, wqe, status);