Lines Matching defs:ibqp
330 if (qp->ibqp.qp_num == 0)
333 ctxt = hfi1_get_qp_map(dd, qp->ibqp.qp_num >> dd->qos_shift);
418 if (qp->ibqp.qp_type == IB_QPT_RC && HFI1_CAP_IS_KSET(TID_RDMA)) {
618 ibp = to_iport(qp->ibqp.device, qp->port_num);
620 dd = dd_from_ibdev(qp->ibqp.device);
1982 struct hfi1_ibport *ibp = to_iport(qp->ibqp.device, qp->port_num);
1984 struct hfi1_ibdev *dev = to_idev(qp->ibqp.device);
2225 struct hfi1_ibport *ibp = to_iport(qp->ibqp.device, qp->port_num);
2267 if (next > rvt_size_atomic(ib_to_rvt(qp->ibqp.device)))
2683 ibp = to_iport(qp->ibqp.device, qp->port_num);
3160 if (qp->ibqp.qp_type != IB_QPT_RC || !HFI1_CAP_IS_KSET(TID_RDMA))
3188 if (++i == rvt_max_atomic(ib_to_rvt(qp->ibqp.device)))
3569 rvt_size_atomic(ib_to_rvt(qp->ibqp.device)))
3598 if (qp->r_head_ack_queue > rvt_size_atomic(ib_to_rvt(qp->ibqp.device)))
3649 struct hfi1_ibport *ibp = to_iport(qp->ibqp.device, qp->port_num);
3695 rvt_size_atomic(ib_to_rvt(qp->ibqp.device));
3699 if (next > rvt_size_atomic(ib_to_rvt(qp->ibqp.device)))
3974 struct rvt_dev_info *rdi = ib_to_rvt(qp->ibqp.device);
3981 dd_dev_warn(dd_from_ibdev(qp->ibqp.device), "[QP%u] %s %d\n",
3982 qp->ibqp.qp_num, __func__, __LINE__);
3999 if (qp->ibqp.event_handler) {
4002 ev.device = qp->ibqp.device;
4003 ev.element.qp = &qp->ibqp;
4005 qp->ibqp.event_handler(&ev, qp->ibqp.qp_context);
4266 struct hfi1_ibdev *dev = to_idev(qp->ibqp.device);
4553 to_iport(qp->ibqp.device,
4741 struct ib_qp *ibqp = &qp->ibqp;
4742 struct rvt_dev_info *rdi = ib_to_rvt(ibqp->device);
4756 struct ib_qp *ibqp = &qp->ibqp;
4757 struct rvt_dev_info *rdi = ib_to_rvt(ibqp->device);
4863 struct hfi1_ibdev *dev = to_idev(qp->ibqp.device);
5180 struct hfi1_ibdev *dev = to_idev(qp->ibqp.device);
5342 ps.dev = to_idev(qp->ibqp.device);
5343 ps.ibp = to_iport(qp->ibqp.device, qp->port_num);
5408 to_iport(qp->ibqp.device, qp->port_num);
5457 struct hfi1_ibdev *dev = to_idev(qp->ibqp.device);