Lines Matching refs:lnk
523 void smc_wr_remember_qp_attr(struct smc_link *lnk)
525 struct ib_qp_attr *attr = &lnk->qp_attr;
530 ib_query_qp(lnk->roce_qp, attr,
550 lnk->wr_tx_cnt = min_t(size_t, SMC_WR_BUF_CNT,
551 lnk->qp_attr.cap.max_send_wr);
552 lnk->wr_rx_cnt = min_t(size_t, SMC_WR_BUF_CNT * 3,
553 lnk->qp_attr.cap.max_recv_wr);
556 static void smc_wr_init_sge(struct smc_link *lnk)
558 int sges_per_buf = (lnk->lgr->smc_version == SMC_V2) ? 2 : 1;
559 bool send_inline = (lnk->qp_attr.cap.max_inline_data > SMC_WR_TX_SIZE);
562 for (i = 0; i < lnk->wr_tx_cnt; i++) {
563 lnk->wr_tx_sges[i].addr = send_inline ? (uintptr_t)(&lnk->wr_tx_bufs[i]) :
564 lnk->wr_tx_dma_addr + i * SMC_WR_BUF_SIZE;
565 lnk->wr_tx_sges[i].length = SMC_WR_TX_SIZE;
566 lnk->wr_tx_sges[i].lkey = lnk->roce_pd->local_dma_lkey;
567 lnk->wr_tx_rdma_sges[i].tx_rdma_sge[0].wr_tx_rdma_sge[0].lkey =
568 lnk->roce_pd->local_dma_lkey;
569 lnk->wr_tx_rdma_sges[i].tx_rdma_sge[0].wr_tx_rdma_sge[1].lkey =
570 lnk->roce_pd->local_dma_lkey;
571 lnk->wr_tx_rdma_sges[i].tx_rdma_sge[1].wr_tx_rdma_sge[0].lkey =
572 lnk->roce_pd->local_dma_lkey;
573 lnk->wr_tx_rdma_sges[i].tx_rdma_sge[1].wr_tx_rdma_sge[1].lkey =
574 lnk->roce_pd->local_dma_lkey;
575 lnk->wr_tx_ibs[i].next = NULL;
576 lnk->wr_tx_ibs[i].sg_list = &lnk->wr_tx_sges[i];
577 lnk->wr_tx_ibs[i].num_sge = 1;
578 lnk->wr_tx_ibs[i].opcode = IB_WR_SEND;
579 lnk->wr_tx_ibs[i].send_flags =
582 lnk->wr_tx_ibs[i].send_flags |= IB_SEND_INLINE;
583 lnk->wr_tx_rdmas[i].wr_tx_rdma[0].wr.opcode = IB_WR_RDMA_WRITE;
584 lnk->wr_tx_rdmas[i].wr_tx_rdma[1].wr.opcode = IB_WR_RDMA_WRITE;
585 lnk->wr_tx_rdmas[i].wr_tx_rdma[0].wr.sg_list =
586 lnk->wr_tx_rdma_sges[i].tx_rdma_sge[0].wr_tx_rdma_sge;
587 lnk->wr_tx_rdmas[i].wr_tx_rdma[1].wr.sg_list =
588 lnk->wr_tx_rdma_sges[i].tx_rdma_sge[1].wr_tx_rdma_sge;
591 if (lnk->lgr->smc_version == SMC_V2) {
592 lnk->wr_tx_v2_sge->addr = lnk->wr_tx_v2_dma_addr;
593 lnk->wr_tx_v2_sge->length = SMC_WR_BUF_V2_SIZE;
594 lnk->wr_tx_v2_sge->lkey = lnk->roce_pd->local_dma_lkey;
596 lnk->wr_tx_v2_ib->next = NULL;
597 lnk->wr_tx_v2_ib->sg_list = lnk->wr_tx_v2_sge;
598 lnk->wr_tx_v2_ib->num_sge = 1;
599 lnk->wr_tx_v2_ib->opcode = IB_WR_SEND;
600 lnk->wr_tx_v2_ib->send_flags =
610 for (i = 0; i < lnk->wr_rx_cnt; i++) {
613 lnk->wr_rx_sges[x].addr =
614 lnk->wr_rx_dma_addr + i * SMC_WR_BUF_SIZE;
615 lnk->wr_rx_sges[x].length = SMC_WR_TX_SIZE;
616 lnk->wr_rx_sges[x].lkey = lnk->roce_pd->local_dma_lkey;
617 if (lnk->lgr->smc_version == SMC_V2) {
618 lnk->wr_rx_sges[x + 1].addr =
619 lnk->wr_rx_v2_dma_addr + SMC_WR_TX_SIZE;
620 lnk->wr_rx_sges[x + 1].length =
622 lnk->wr_rx_sges[x + 1].lkey =
623 lnk->roce_pd->local_dma_lkey;
625 lnk->wr_rx_ibs[i].next = NULL;
626 lnk->wr_rx_ibs[i].sg_list = &lnk->wr_rx_sges[x];
627 lnk->wr_rx_ibs[i].num_sge = sges_per_buf;
629 lnk->wr_reg.wr.next = NULL;
630 lnk->wr_reg.wr.num_sge = 0;
631 lnk->wr_reg.wr.send_flags = IB_SEND_SIGNALED;
632 lnk->wr_reg.wr.opcode = IB_WR_REG_MR;
633 lnk->wr_reg.access = IB_ACCESS_LOCAL_WRITE | IB_ACCESS_REMOTE_WRITE;
636 void smc_wr_free_link(struct smc_link *lnk)
640 if (!lnk->smcibdev)
642 ibdev = lnk->smcibdev->ibdev;
644 smc_wr_drain_cq(lnk);
645 smc_wr_wakeup_reg_wait(lnk);
646 smc_wr_wakeup_tx_wait(lnk);
648 smc_wr_tx_wait_no_pending_sends(lnk);
649 percpu_ref_kill(&lnk->wr_reg_refs);
650 wait_for_completion(&lnk->reg_ref_comp);
651 percpu_ref_kill(&lnk->wr_tx_refs);
652 wait_for_completion(&lnk->tx_ref_comp);
654 if (lnk->wr_rx_dma_addr) {
655 ib_dma_unmap_single(ibdev, lnk->wr_rx_dma_addr,
656 SMC_WR_BUF_SIZE * lnk->wr_rx_cnt,
658 lnk->wr_rx_dma_addr = 0;
660 if (lnk->wr_rx_v2_dma_addr) {
661 ib_dma_unmap_single(ibdev, lnk->wr_rx_v2_dma_addr,
664 lnk->wr_rx_v2_dma_addr = 0;
666 if (lnk->wr_tx_dma_addr) {
667 ib_dma_unmap_single(ibdev, lnk->wr_tx_dma_addr,
668 SMC_WR_BUF_SIZE * lnk->wr_tx_cnt,
670 lnk->wr_tx_dma_addr = 0;
672 if (lnk->wr_tx_v2_dma_addr) {
673 ib_dma_unmap_single(ibdev, lnk->wr_tx_v2_dma_addr,
676 lnk->wr_tx_v2_dma_addr = 0;
691 void smc_wr_free_link_mem(struct smc_link *lnk)
693 kfree(lnk->wr_tx_v2_ib);
694 lnk->wr_tx_v2_ib = NULL;
695 kfree(lnk->wr_tx_v2_sge);
696 lnk->wr_tx_v2_sge = NULL;
697 kfree(lnk->wr_tx_v2_pend);
698 lnk->wr_tx_v2_pend = NULL;
699 kfree(lnk->wr_tx_compl);
700 lnk->wr_tx_compl = NULL;
701 kfree(lnk->wr_tx_pends);
702 lnk->wr_tx_pends = NULL;
703 bitmap_free(lnk->wr_tx_mask);
704 lnk->wr_tx_mask = NULL;
705 kfree(lnk->wr_tx_sges);
706 lnk->wr_tx_sges = NULL;
707 kfree(lnk->wr_tx_rdma_sges);
708 lnk->wr_tx_rdma_sges = NULL;
709 kfree(lnk->wr_rx_sges);
710 lnk->wr_rx_sges = NULL;
711 kfree(lnk->wr_tx_rdmas);
712 lnk->wr_tx_rdmas = NULL;
713 kfree(lnk->wr_rx_ibs);
714 lnk->wr_rx_ibs = NULL;
715 kfree(lnk->wr_tx_ibs);
716 lnk->wr_tx_ibs = NULL;
717 kfree(lnk->wr_tx_bufs);
718 lnk->wr_tx_bufs = NULL;
719 kfree(lnk->wr_rx_bufs);
720 lnk->wr_rx_bufs = NULL;
853 struct smc_link *lnk = container_of(ref, struct smc_link, wr_tx_refs);
855 complete(&lnk->tx_ref_comp);
860 struct smc_link *lnk = container_of(ref, struct smc_link, wr_reg_refs);
862 complete(&lnk->reg_ref_comp);
865 int smc_wr_create_link(struct smc_link *lnk)
867 struct ib_device *ibdev = lnk->smcibdev->ibdev;
870 smc_wr_tx_set_wr_id(&lnk->wr_tx_id, 0);
871 lnk->wr_rx_id = 0;
872 lnk->wr_rx_dma_addr = ib_dma_map_single(
873 ibdev, lnk->wr_rx_bufs, SMC_WR_BUF_SIZE * lnk->wr_rx_cnt,
875 if (ib_dma_mapping_error(ibdev, lnk->wr_rx_dma_addr)) {
876 lnk->wr_rx_dma_addr = 0;
880 if (lnk->lgr->smc_version == SMC_V2) {
881 lnk->wr_rx_v2_dma_addr = ib_dma_map_single(ibdev,
882 lnk->lgr->wr_rx_buf_v2, SMC_WR_BUF_V2_SIZE,
884 if (ib_dma_mapping_error(ibdev, lnk->wr_rx_v2_dma_addr)) {
885 lnk->wr_rx_v2_dma_addr = 0;
889 lnk->wr_tx_v2_dma_addr = ib_dma_map_single(ibdev,
890 lnk->lgr->wr_tx_buf_v2, SMC_WR_BUF_V2_SIZE,
892 if (ib_dma_mapping_error(ibdev, lnk->wr_tx_v2_dma_addr)) {
893 lnk->wr_tx_v2_dma_addr = 0;
898 lnk->wr_tx_dma_addr = ib_dma_map_single(
899 ibdev, lnk->wr_tx_bufs, SMC_WR_BUF_SIZE * lnk->wr_tx_cnt,
901 if (ib_dma_mapping_error(ibdev, lnk->wr_tx_dma_addr)) {
905 smc_wr_init_sge(lnk);
906 bitmap_zero(lnk->wr_tx_mask, SMC_WR_BUF_CNT);
907 init_waitqueue_head(&lnk->wr_tx_wait);
908 rc = percpu_ref_init(&lnk->wr_tx_refs, smcr_wr_tx_refs_free, 0, GFP_KERNEL);
911 init_completion(&lnk->tx_ref_comp);
912 init_waitqueue_head(&lnk->wr_reg_wait);
913 rc = percpu_ref_init(&lnk->wr_reg_refs, smcr_wr_reg_refs_free, 0, GFP_KERNEL);
916 init_completion(&lnk->reg_ref_comp);
917 init_waitqueue_head(&lnk->wr_rx_empty_wait);
921 if (lnk->wr_rx_v2_dma_addr) {
922 ib_dma_unmap_single(ibdev, lnk->wr_rx_v2_dma_addr,
925 lnk->wr_rx_v2_dma_addr = 0;
927 if (lnk->wr_tx_v2_dma_addr) {
928 ib_dma_unmap_single(ibdev, lnk->wr_tx_v2_dma_addr,
931 lnk->wr_tx_v2_dma_addr = 0;
933 ib_dma_unmap_single(ibdev, lnk->wr_rx_dma_addr,
934 SMC_WR_BUF_SIZE * lnk->wr_rx_cnt,
936 lnk->wr_rx_dma_addr = 0;