/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx4/ |
H A D | mr.c | 67 err = mlx4_mr_alloc(to_mdev(pd->device)->dev, to_mpd(pd)->pdn, 0, in mlx4_ib_get_dma_mr() 428 err = mlx4_mr_alloc(dev->dev, to_mpd(pd)->pdn, virt_addr, length, in mlx4_ib_reg_user_mr() 480 to_mpd(pd)->pdn); in mlx4_ib_rereg_user_mr() 619 err = mlx4_mw_alloc(dev->dev, to_mpd(ibmw->pd)->pdn, in mlx4_ib_alloc_mw() 659 err = mlx4_mr_alloc(dev->dev, to_mpd(pd)->pdn, 0, 0, 0, in mlx4_ib_alloc_mr()
|
H A D | ah.c | 48 ah->av.ib.port_pd = cpu_to_be32(to_mpd(ib_ah->pd)->pdn | in create_ib_ah() 118 ah->av.eth.port_pd = cpu_to_be32(to_mpd(ib_ah->pd)->pdn | in create_iboe_ah()
|
H A D | srq.c | 181 err = mlx4_srq_alloc(dev->dev, to_mpd(ib_srq->pd)->pdn, cqn, xrcdn, in mlx4_ib_create_srq()
|
H A D | mlx4_ib.h | 678 static inline struct mlx4_ib_pd *to_mpd(struct ib_pd *ibpd) in to_mpd() function
|
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx4/ |
H A D | mr.c | 67 err = mlx4_mr_alloc(to_mdev(pd->device)->dev, to_mpd(pd)->pdn, 0, in mlx4_ib_get_dma_mr() 428 err = mlx4_mr_alloc(dev->dev, to_mpd(pd)->pdn, virt_addr, length, in mlx4_ib_reg_user_mr() 479 to_mpd(pd)->pdn); in mlx4_ib_rereg_user_mr() 619 err = mlx4_mw_alloc(dev->dev, to_mpd(ibmw->pd)->pdn, in mlx4_ib_alloc_mw() 659 err = mlx4_mr_alloc(dev->dev, to_mpd(pd)->pdn, 0, 0, 0, in mlx4_ib_alloc_mr()
|
H A D | ah.c | 48 ah->av.ib.port_pd = cpu_to_be32(to_mpd(ib_ah->pd)->pdn | in create_ib_ah() 118 ah->av.eth.port_pd = cpu_to_be32(to_mpd(ib_ah->pd)->pdn | in create_iboe_ah()
|
H A D | srq.c | 185 err = mlx4_srq_alloc(dev->dev, to_mpd(ib_srq->pd)->pdn, cqn, xrcdn, in mlx4_ib_create_srq()
|
H A D | mlx4_ib.h | 680 static inline struct mlx4_ib_pd *to_mpd(struct ib_pd *ibpd) in to_mpd() function
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/ |
H A D | std_types.c | 20 struct mlx5_ib_pd *mpd = to_mpd(pd); in MLX5_IB_METHOD_PD_QUERY()
|
H A D | qp.c | 970 uid = (attr->qp_type != IB_QPT_XRC_INI) ? to_mpd(pd)->uid : 0; in _create_user_qp() 1178 MLX5_SET(create_tis_in, in, uid, to_mpd(pd)->uid); in create_raw_packet_qp_tis() 1189 mlx5_cmd_destroy_tis(dev->mdev, sq->tisn, to_mpd(pd)->uid); in destroy_raw_packet_qp_tis() 1230 MLX5_SET(create_sq_in, in, uid, to_mpd(pd)->uid); in create_raw_packet_qp_sq() 1319 MLX5_SET(create_rq_in, in, uid, to_mpd(pd)->uid); in create_raw_packet_qp_rq() 1368 mlx5_cmd_destroy_tir(dev->mdev, rq->tirn, to_mpd(pd)->uid); in destroy_raw_packet_qp_tir() 1387 MLX5_SET(create_tir_in, in, uid, to_mpd(pd)->uid); in create_raw_packet_qp_tir() 1434 u16 uid = to_mpd(pd)->uid; in create_raw_packet_qp() 1550 to_mpd(qp->ibqp.pd)->uid); in destroy_rss_raw_qp_tir() 1613 MLX5_SET(create_tir_in, in, uid, to_mpd(p in create_rss_raw_qp_tir() [all...] |
H A D | srq.c | 115 in->uid = (in->type != IB_SRQT_XRC) ? to_mpd(pd)->uid : 0; in create_srq_user() 299 in.pd = to_mpd(ib_srq->pd)->pdn; in mlx5_ib_create_srq()
|
H A D | mr.c | 71 MLX5_SET(mkc, mkc, pd, to_mpd(pd)->pdn); in set_mkc_access_pd_addr_fields() 1011 mr->mmkey.pd = to_mpd(pd)->pdn; in alloc_mr_from_cache() 1390 mr = mlx5_ib_alloc_implicit_mr(to_mpd(pd), udata, access_flags); in mlx5_ib_reg_user_mr() 1601 mr->mmkey.pd = to_mpd(pd)->pdn; in mlx5_ib_rereg_user_mr() 1861 err = mlx5_core_create_psv(dev->mdev, to_mpd(pd)->pdn, 2, psv_index); in mlx5_alloc_integrity_descs() 2031 MLX5_SET(mkc, mkc, pd, to_mpd(ibmw->pd)->pdn); in mlx5_ib_alloc_mw()
|
H A D | wr.c | 426 MLX5_SET(mkc, seg, pd, to_mpd(umrwr->pd)->pdn); in set_reg_mkey_segment() 786 u32 pdn = to_mpd(qp->ibqp.pd)->pdn; in set_pi_umr_wr() 862 struct mlx5_ib_pd *pd = to_mpd(qp->ibqp.pd); in set_reg_wr()
|
/kernel/linux/linux-5.10/drivers/infiniband/hw/mthca/ |
H A D | mthca_provider.c | 359 struct mthca_pd *pd = to_mpd(ibpd); in mthca_alloc_pd() 378 mthca_pd_free(to_mdev(pd->device), to_mpd(pd)); in mthca_dealloc_pd() 389 return mthca_create_ah(to_mdev(ibah->device), to_mpd(ibah->pd), in mthca_ah_create() 427 err = mthca_alloc_srq(to_mdev(ibsrq->device), to_mpd(ibsrq->pd), in mthca_create_srq() 515 err = mthca_alloc_qp(to_mdev(pd->device), to_mpd(pd), in mthca_create_qp() 549 err = mthca_alloc_sqp(to_mdev(pd->device), to_mpd(pd), in mthca_create_qp() 839 to_mpd(pd)->pd_num, in mthca_get_dma_mr() 928 err = mthca_mr_alloc(dev, to_mpd(pd)->pd_num, PAGE_SHIFT, virt, length, in mthca_reg_user_mr()
|
H A D | mthca_provider.h | 291 static inline struct mthca_pd *to_mpd(struct ib_pd *ibpd) in to_mpd() function
|
H A D | mthca_qp.c | 709 qp_context->pd = cpu_to_be32(to_mpd(ibqp->pd)->pd_num); in __mthca_modify_qp() 1489 atomic_dec(&(to_mpd(qp->ibqp.pd)->sqp_count)); in mthca_free_qp() 1558 data->lkey = cpu_to_be32(to_mpd(qp->ibqp.pd)->ntmr.ibmr.lkey); in build_mlx_header()
|
/kernel/linux/linux-6.6/drivers/infiniband/hw/mthca/ |
H A D | mthca_provider.c | 351 struct mthca_pd *pd = to_mpd(ibpd); in mthca_alloc_pd() 370 mthca_pd_free(to_mdev(pd->device), to_mpd(pd)); in mthca_dealloc_pd() 381 return mthca_create_ah(to_mdev(ibah->device), to_mpd(ibah->pd), in mthca_ah_create() 419 err = mthca_alloc_srq(to_mdev(ibsrq->device), to_mpd(ibsrq->pd), in mthca_create_srq() 500 err = mthca_alloc_qp(dev, to_mpd(ibqp->pd), in mthca_create_qp() 525 err = mthca_alloc_sqp(dev, to_mpd(ibqp->pd), in mthca_create_qp() 813 to_mpd(pd)->pd_num, in mthca_get_dma_mr() 902 err = mthca_mr_alloc(dev, to_mpd(pd)->pd_num, PAGE_SHIFT, virt, length, in mthca_reg_user_mr()
|
H A D | mthca_provider.h | 291 static inline struct mthca_pd *to_mpd(struct ib_pd *ibpd) in to_mpd() function
|
H A D | mthca_qp.c | 709 qp_context->pd = cpu_to_be32(to_mpd(ibqp->pd)->pd_num); in __mthca_modify_qp() 1492 atomic_dec(&(to_mpd(qp->ibqp.pd)->sqp_count)); in mthca_free_qp() 1561 data->lkey = cpu_to_be32(to_mpd(qp->ibqp.pd)->ntmr.ibmr.lkey); in build_mlx_header()
|
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx5/ |
H A D | qp.c | 1030 uid = (attr->qp_type != IB_QPT_XRC_INI) ? to_mpd(pd)->uid : 0; in _create_user_qp() 1238 MLX5_SET(create_tis_in, in, uid, to_mpd(pd)->uid); in create_raw_packet_qp_tis() 1252 mlx5_cmd_destroy_tis(dev->mdev, sq->tisn, to_mpd(pd)->uid); in destroy_raw_packet_qp_tis() 1379 MLX5_SET(create_sq_in, in, uid, to_mpd(pd)->uid); in create_raw_packet_qp_sq() 1466 MLX5_SET(create_rq_in, in, uid, to_mpd(pd)->uid); in create_raw_packet_qp_rq() 1516 mlx5_cmd_destroy_tir(dev->mdev, rq->tirn, to_mpd(pd)->uid); in destroy_raw_packet_qp_tir() 1535 MLX5_SET(create_tir_in, in, uid, to_mpd(pd)->uid); in create_raw_packet_qp_tir() 1582 u16 uid = to_mpd(pd)->uid; in create_raw_packet_qp() 1700 to_mpd(qp->ibqp.pd)->uid); in destroy_rss_raw_qp_tir() 1763 MLX5_SET(create_tir_in, in, uid, to_mpd(p in create_rss_raw_qp_tir() [all...] |
H A D | srq.c | 92 in->uid = (in->type != IB_SRQT_XRC) ? to_mpd(pd)->uid : 0; in create_srq_user() 278 in.pd = to_mpd(ib_srq->pd)->pdn; in mlx5_ib_create_srq()
|
H A D | umr.c | 371 MLX5_SET(mkc, &wqe.mkey_seg, pd, to_mpd(dev->umrc.pd)->pdn); in mlx5r_umr_revoke_mr() 410 MLX5_SET(mkc, &wqe.mkey_seg, pd, to_mpd(pd)->pdn); in mlx5r_umr_rereg_pd_access() 546 MLX5_SET(mkc, mkey_seg, pd, to_mpd(mr->ibmr.pd)->pdn); in mlx5r_umr_set_update_xlt_mkey_seg()
|
H A D | std_types.c | 22 struct mlx5_ib_pd *mpd = to_mpd(pd); in MLX5_IB_METHOD_PD_QUERY()
|
H A D | mr.c | 81 MLX5_SET(mkc, mkc, pd, to_mpd(pd)->pdn); in set_mkc_access_pd_addr_fields() 1452 mr = mlx5_ib_alloc_implicit_mr(to_mpd(pd), access_flags); in create_user_odp_mr() 2059 err = mlx5_core_create_psv(dev->mdev, to_mpd(pd)->pdn, 2, psv_index); in mlx5_alloc_integrity_descs() 2227 MLX5_SET(mkc, mkc, pd, to_mpd(ibmw->pd)->pdn); in mlx5_ib_alloc_mw()
|
H A D | wr.c | 565 u32 pdn = to_mpd(qp->ibqp.pd)->pdn; in set_pi_umr_wr() 641 struct mlx5_ib_pd *pd = to_mpd(qp->ibqp.pd); in set_reg_wr()
|