Home
last modified time | relevance | path

Searched refs:mr (Results 1 - 25 of 581) sorted by relevance

12345678910>>...24

/kernel/linux/linux-5.10/drivers/infiniband/sw/rdmavt/
H A Dmr.c53 #include "mr.h"
117 static void rvt_deinit_mregion(struct rvt_mregion *mr) in rvt_deinit_mregion() argument
119 int i = mr->mapsz; in rvt_deinit_mregion()
121 mr->mapsz = 0; in rvt_deinit_mregion()
123 kfree(mr->map[--i]); in rvt_deinit_mregion()
124 percpu_ref_exit(&mr->refcount); in rvt_deinit_mregion()
129 struct rvt_mregion *mr = container_of(ref, struct rvt_mregion, in __rvt_mregion_complete() local
132 complete(&mr->comp); in __rvt_mregion_complete()
135 static int rvt_init_mregion(struct rvt_mregion *mr, struct ib_pd *pd, in rvt_init_mregion() argument
141 mr in rvt_init_mregion()
177 rvt_alloc_lkey(struct rvt_mregion *mr, int dma_region) rvt_alloc_lkey() argument
247 rvt_free_lkey(struct rvt_mregion *mr) rvt_free_lkey() argument
281 struct rvt_mr *mr; __rvt_alloc_mr() local
314 __rvt_free_mr(struct rvt_mr *mr) __rvt_free_mr() argument
330 struct rvt_mr *mr; rvt_get_dma_mr() local
381 struct rvt_mr *mr; rvt_reg_user_mr() local
450 struct rvt_mregion *mr = (struct rvt_mregion *)v; rvt_dereg_clean_qp_cb() local
465 rvt_dereg_clean_qps(struct rvt_mregion *mr) rvt_dereg_clean_qps() argument
483 rvt_check_refs(struct rvt_mregion *mr, const char *t) rvt_check_refs() argument
512 rvt_mr_has_lkey(struct rvt_mregion *mr, u32 lkey) rvt_mr_has_lkey() argument
552 struct rvt_mr *mr = to_imr(ibmr); rvt_dereg_mr() local
579 struct rvt_mr *mr; rvt_alloc_mr() local
600 struct rvt_mr *mr = to_imr(ibmr); rvt_set_page() local
632 struct rvt_mr *mr = to_imr(ibmr); rvt_map_mr_sg() local
658 struct rvt_mr *mr = to_imr(ibmr); rvt_fast_reg_mr() local
692 struct rvt_mregion *mr; rvt_invalidate_rkey() local
761 struct rvt_mregion *mr; rvt_lkey_ok() local
872 struct rvt_mregion *mr; rvt_rkey_ok() local
[all...]
H A Dtrace_mr.h57 #include "mr.h"
63 TP_PROTO(struct rvt_mregion *mr, u16 m, u16 n, void *v, size_t len),
64 TP_ARGS(mr, m, n, v, len),
66 RDI_DEV_ENTRY(ib_to_rvt(mr->pd->device))
79 RDI_DEV_ASSIGN(ib_to_rvt(mr->pd->device));
82 __entry->iova = mr->iova;
83 __entry->user_base = mr->user_base;
84 __entry->lkey = mr->lkey;
88 __entry->length = mr->length;
89 __entry->offset = mr
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/sw/rdmavt/
H A Dmr.c11 #include "mr.h"
75 static void rvt_deinit_mregion(struct rvt_mregion *mr) in rvt_deinit_mregion() argument
77 int i = mr->mapsz; in rvt_deinit_mregion()
79 mr->mapsz = 0; in rvt_deinit_mregion()
81 kfree(mr->map[--i]); in rvt_deinit_mregion()
82 percpu_ref_exit(&mr->refcount); in rvt_deinit_mregion()
87 struct rvt_mregion *mr = container_of(ref, struct rvt_mregion, in __rvt_mregion_complete() local
90 complete(&mr->comp); in __rvt_mregion_complete()
93 static int rvt_init_mregion(struct rvt_mregion *mr, struct ib_pd *pd, in rvt_init_mregion() argument
99 mr in rvt_init_mregion()
135 rvt_alloc_lkey(struct rvt_mregion *mr, int dma_region) rvt_alloc_lkey() argument
205 rvt_free_lkey(struct rvt_mregion *mr) rvt_free_lkey() argument
239 struct rvt_mr *mr; __rvt_alloc_mr() local
272 __rvt_free_mr(struct rvt_mr *mr) __rvt_free_mr() argument
288 struct rvt_mr *mr; rvt_get_dma_mr() local
340 struct rvt_mr *mr; rvt_reg_user_mr() local
409 struct rvt_mregion *mr = (struct rvt_mregion *)v; rvt_dereg_clean_qp_cb() local
424 rvt_dereg_clean_qps(struct rvt_mregion *mr) rvt_dereg_clean_qps() argument
442 rvt_check_refs(struct rvt_mregion *mr, const char *t) rvt_check_refs() argument
471 rvt_mr_has_lkey(struct rvt_mregion *mr, u32 lkey) rvt_mr_has_lkey() argument
511 struct rvt_mr *mr = to_imr(ibmr); rvt_dereg_mr() local
538 struct rvt_mr *mr; rvt_alloc_mr() local
559 struct rvt_mr *mr = to_imr(ibmr); rvt_set_page() local
591 struct rvt_mr *mr = to_imr(ibmr); rvt_map_mr_sg() local
617 struct rvt_mr *mr = to_imr(ibmr); rvt_fast_reg_mr() local
651 struct rvt_mregion *mr; rvt_invalidate_rkey() local
720 struct rvt_mregion *mr; rvt_lkey_ok() local
831 struct rvt_mregion *mr; rvt_rkey_ok() local
[all...]
H A Dtrace_mr.h15 #include "mr.h"
21 TP_PROTO(struct rvt_mregion *mr, u16 m, u16 n, void *v, size_t len),
22 TP_ARGS(mr, m, n, v, len),
24 RDI_DEV_ENTRY(ib_to_rvt(mr->pd->device))
37 RDI_DEV_ASSIGN(ib_to_rvt(mr->pd->device));
40 __entry->iova = mr->iova;
41 __entry->user_base = mr->user_base;
42 __entry->lkey = mr->lkey;
46 __entry->length = mr->length;
47 __entry->offset = mr
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/sw/rxe/
H A Drxe_mr.c27 int mr_check_range(struct rxe_mr *mr, u64 iova, size_t length) in mr_check_range() argument
29 switch (mr->ibmr.type) { in mr_check_range()
35 if (iova < mr->ibmr.iova || in mr_check_range()
36 iova + length > mr->ibmr.iova + mr->ibmr.length) { in mr_check_range()
37 rxe_dbg_mr(mr, "iova/length out of range"); in mr_check_range()
43 rxe_dbg_mr(mr, "mr type not supported\n"); in mr_check_range()
48 static void rxe_mr_init(int access, struct rxe_mr *mr) in rxe_mr_init() argument
50 u32 key = mr in rxe_mr_init()
67 rxe_mr_init_dma(int access, struct rxe_mr *mr) rxe_mr_init_dma() argument
75 rxe_mr_iova_to_index(struct rxe_mr *mr, u64 iova) rxe_mr_iova_to_index() argument
80 rxe_mr_iova_to_page_offset(struct rxe_mr *mr, u64 iova) rxe_mr_iova_to_page_offset() argument
94 rxe_mr_fill_pages_from_sgt(struct rxe_mr *mr, struct sg_table *sgt) rxe_mr_fill_pages_from_sgt() argument
129 rxe_mr_init_user(struct rxe_dev *rxe, u64 start, u64 length, u64 iova, int access, struct rxe_mr *mr) rxe_mr_init_user() argument
159 rxe_mr_alloc(struct rxe_mr *mr, int num_buf) rxe_mr_alloc() argument
188 rxe_mr_init_fast(int max_pages, struct rxe_mr *mr) rxe_mr_init_fast() argument
210 struct rxe_mr *mr = to_rmr(ibmr); rxe_set_page() local
234 struct rxe_mr *mr = to_rmr(ibmr); rxe_map_mr_sg() local
245 rxe_mr_copy_xarray(struct rxe_mr *mr, u64 iova, void *addr, unsigned int length, enum rxe_mr_copy_dir dir) rxe_mr_copy_xarray() argument
277 rxe_mr_copy_dma(struct rxe_mr *mr, u64 dma_addr, void *addr, unsigned int length, enum rxe_mr_copy_dir dir) rxe_mr_copy_dma() argument
304 rxe_mr_copy(struct rxe_mr *mr, u64 iova, void *addr, unsigned int length, enum rxe_mr_copy_dir dir) rxe_mr_copy() argument
344 struct rxe_mr *mr = NULL; copy_data() local
424 rxe_flush_pmem_iova(struct rxe_mr *mr, u64 iova, unsigned int length) rxe_flush_pmem_iova() argument
471 rxe_mr_do_atomic_op(struct rxe_mr *mr, u64 iova, int opcode, u64 compare, u64 swap_add, u64 *orig_val) rxe_mr_do_atomic_op() argument
529 rxe_mr_do_atomic_write(struct rxe_mr *mr, u64 iova, u64 value) rxe_mr_do_atomic_write() argument
577 rxe_mr_do_atomic_write(struct rxe_mr *mr, u64 iova, u64 value) rxe_mr_do_atomic_write() argument
619 struct rxe_mr *mr; lookup_mr() local
641 struct rxe_mr *mr; rxe_invalidate_mr() local
690 struct rxe_mr *mr = to_rmr(wqe->wr.wr.reg.mr); rxe_reg_fast_mr() local
724 struct rxe_mr *mr = container_of(elem, typeof(*mr), elem); rxe_mr_cleanup() local
[all...]
H A Drxe_mw.c51 struct rxe_mw *mw, struct rxe_mr *mr, int access) in rxe_check_bind_mw()
83 if (unlikely(!mr || wqe->wr.wr.mw.length == 0)) { in rxe_check_bind_mw()
91 if (!mr) in rxe_check_bind_mw()
94 if (unlikely(mr->access & IB_ZERO_BASED)) { in rxe_check_bind_mw()
100 if (unlikely(!(mr->access & IB_ACCESS_MW_BIND))) { in rxe_check_bind_mw()
109 !(mr->access & IB_ACCESS_LOCAL_WRITE))) { in rxe_check_bind_mw()
117 if (unlikely(wqe->wr.wr.mw.length > mr->ibmr.length)) { in rxe_check_bind_mw()
123 if (unlikely((wqe->wr.wr.mw.addr < mr->ibmr.iova) || in rxe_check_bind_mw()
125 (mr->ibmr.iova + mr in rxe_check_bind_mw()
50 rxe_check_bind_mw(struct rxe_qp *qp, struct rxe_send_wqe *wqe, struct rxe_mw *mw, struct rxe_mr *mr, int access) rxe_check_bind_mw() argument
135 rxe_do_bind_mw(struct rxe_qp *qp, struct rxe_send_wqe *wqe, struct rxe_mw *mw, struct rxe_mr *mr, int access) rxe_do_bind_mw() argument
168 struct rxe_mr *mr; rxe_bind_mw() local
239 struct rxe_mr *mr; rxe_do_invalidate_mw() local
320 struct rxe_mr *mr = mw->mr; rxe_mw_cleanup() local
[all...]
/kernel/linux/linux-6.6/net/sunrpc/xprtrdma/
H A Dfrwr_ops.c49 struct rpcrdma_mr *mr) in frwr_cid_init()
51 struct rpc_rdma_cid *cid = &mr->mr_cid; in frwr_cid_init()
54 cid->ci_completion_id = mr->mr_ibmr->res.id; in frwr_cid_init()
57 static void frwr_mr_unmap(struct rpcrdma_xprt *r_xprt, struct rpcrdma_mr *mr) in frwr_mr_unmap() argument
59 if (mr->mr_device) { in frwr_mr_unmap()
60 trace_xprtrdma_mr_unmap(mr); in frwr_mr_unmap()
61 ib_dma_unmap_sg(mr->mr_device, mr->mr_sg, mr->mr_nents, in frwr_mr_unmap()
62 mr in frwr_mr_unmap()
48 frwr_cid_init(struct rpcrdma_ep *ep, struct rpcrdma_mr *mr) frwr_cid_init() argument
72 frwr_mr_release(struct rpcrdma_mr *mr) frwr_mr_release() argument
85 frwr_mr_put(struct rpcrdma_mr *mr) frwr_mr_put() argument
107 struct rpcrdma_mr *mr; frwr_reset() local
121 frwr_mr_init(struct rpcrdma_xprt *r_xprt, struct rpcrdma_mr *mr) frwr_mr_init() argument
284 frwr_map(struct rpcrdma_xprt *r_xprt, struct rpcrdma_mr_seg *seg, int nsegs, bool writing, __be32 xid, struct rpcrdma_mr *mr) frwr_map() argument
361 struct rpcrdma_mr *mr = container_of(cqe, struct rpcrdma_mr, mr_cqe); frwr_wc_fastreg() local
387 struct rpcrdma_mr *mr; frwr_send() local
430 struct rpcrdma_mr *mr; frwr_reminv() local
441 frwr_mr_done(struct ib_wc *wc, struct rpcrdma_mr *mr) frwr_mr_done() argument
456 struct rpcrdma_mr *mr = container_of(cqe, struct rpcrdma_mr, mr_cqe); frwr_wc_localinv() local
475 struct rpcrdma_mr *mr = container_of(cqe, struct rpcrdma_mr, mr_cqe); frwr_wc_localinv_wake() local
501 struct rpcrdma_mr *mr; frwr_unmap_sync() local
572 struct rpcrdma_mr *mr = container_of(cqe, struct rpcrdma_mr, mr_cqe); frwr_wc_localinv_done() local
606 struct rpcrdma_mr *mr; frwr_unmap_async() local
673 struct rpcrdma_mr *mr; frwr_wp_create() local
[all...]
/kernel/linux/linux-6.6/drivers/vdpa/mlx5/core/
H A Dmr.c35 static void populate_mtts(struct mlx5_vdpa_direct_mr *mr, __be64 *mtt) in populate_mtts() argument
38 int nsg = mr->nsg; in populate_mtts()
44 for_each_sg(mr->sg_head.sgl, sg, mr->nent, i) { in populate_mtts()
47 nsg--, dma_addr += BIT(mr->log_size), dma_len -= BIT(mr->log_size)) in populate_mtts()
52 static int create_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr) in create_direct_mr() argument
59 inlen = MLX5_ST_SZ_BYTES(create_mkey_in) + roundup(MLX5_ST_SZ_BYTES(mtt) * mr->nsg, 16); in create_direct_mr()
66 MLX5_SET(mkc, mkc, lw, !!(mr->perm & VHOST_MAP_WO)); in create_direct_mr()
67 MLX5_SET(mkc, mkc, lr, !!(mr in create_direct_mr()
89 destroy_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr) destroy_direct_mr() argument
94 map_start(struct vhost_iotlb_map *map, struct mlx5_vdpa_direct_mr *mr) map_start() argument
99 map_end(struct vhost_iotlb_map *map, struct mlx5_vdpa_direct_mr *mr) map_end() argument
104 maplen(struct vhost_iotlb_map *map, struct mlx5_vdpa_direct_mr *mr) maplen() argument
182 create_indirect_key(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) create_indirect_key() argument
223 map_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr, struct vhost_iotlb *iotlb) map_direct_mr() argument
295 unmap_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr) unmap_direct_mr() argument
307 struct mlx5_vdpa_mr *mr = &mvdev->mr; add_direct_chain() local
359 struct mlx5_vdpa_mr *mr = &mvdev->mr; create_user_mr() local
420 create_dma_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) create_dma_mr() argument
448 destroy_dma_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) destroy_dma_mr() argument
479 destroy_user_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) destroy_user_mr() argument
502 struct mlx5_vdpa_mr *mr = &mvdev->mr; _mlx5_vdpa_destroy_dvq_mr() local
520 struct mlx5_vdpa_mr *mr = &mvdev->mr; mlx5_vdpa_destroy_mr_asid() local
550 struct mlx5_vdpa_mr *mr = &mvdev->mr; _mlx5_vdpa_create_dvq_mr() local
607 struct mlx5_vdpa_mr *mr = &mvdev->mr; mlx5_vdpa_handle_set_map() local
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx5/
H A Dmr.c109 static void clean_mr(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr);
110 static void dereg_mr(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr);
119 static int destroy_mkey(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr) in destroy_mkey() argument
121 WARN_ON(xa_load(&dev->odp_mkeys, mlx5_base_mkey(mr->mmkey.key))); in destroy_mkey()
123 return mlx5_core_destroy_mkey(dev->mdev, &mr->mmkey); in destroy_mkey()
126 static inline bool mlx5_ib_pas_fits_in_mr(struct mlx5_ib_mr *mr, u64 start, in mlx5_ib_pas_fits_in_mr() argument
129 return ((u64)1 << mr->order) * MLX5_ADAPTER_PAGE_SIZE >= in mlx5_ib_pas_fits_in_mr()
135 struct mlx5_ib_mr *mr = in create_mkey_callback() local
137 struct mlx5_ib_dev *dev = mr->dev; in create_mkey_callback()
138 struct mlx5_cache_ent *ent = mr in create_mkey_callback()
170 struct mlx5_ib_mr *mr; alloc_cache_mr() local
194 struct mlx5_ib_mr *mr; add_keys() local
242 struct mlx5_ib_mr *mr; create_cache_mr() local
278 struct mlx5_ib_mr *mr; remove_cache_mr_locked() local
567 struct mlx5_ib_mr *mr; mlx5_mr_cache_alloc() local
601 struct mlx5_ib_mr *mr = NULL; get_cache_mr() local
629 detach_mr_from_cache(struct mlx5_ib_mr *mr) detach_mr_from_cache() argument
639 mlx5_mr_cache_free(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr) mlx5_mr_cache_free() argument
665 struct mlx5_ib_mr *mr; clean_keys() local
815 struct mlx5_ib_mr *mr; mlx5_ib_get_dma_mr() local
989 struct mlx5_ib_mr *mr; alloc_mr_from_cache() local
1020 mlx5_ib_update_xlt(struct mlx5_ib_mr *mr, u64 idx, int npages, int page_shift, int flags) mlx5_ib_update_xlt() argument
1176 struct mlx5_ib_mr *mr; reg_create() local
1254 set_mr_fields(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr, u64 length, int access_flags) set_mr_fields() argument
1268 struct mlx5_ib_mr *mr; mlx5_ib_get_dm_mr() local
1362 struct mlx5_ib_mr *mr = NULL; mlx5_ib_reg_user_mr() local
1477 mlx5_mr_cache_invalidate(struct mlx5_ib_mr *mr) mlx5_mr_cache_invalidate() argument
1494 rereg_umr(struct ib_pd *pd, struct mlx5_ib_mr *mr, int access_flags, int flags) rereg_umr() argument
1522 struct mlx5_ib_mr *mr = to_mmr(ib_mr); mlx5_ib_rereg_user_mr() local
1632 mlx5_alloc_priv_descs(struct ib_device *device, struct mlx5_ib_mr *mr, int ndescs, int desc_size) mlx5_alloc_priv_descs() argument
1664 mlx5_free_priv_descs(struct mlx5_ib_mr *mr) mlx5_free_priv_descs() argument
1677 clean_mr(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr) clean_mr() argument
1699 dereg_mr(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr) dereg_mr() argument
1756 _mlx5_alloc_mkey_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr, int ndescs, int desc_size, int page_shift, int access_mode, u32 *in, int inlen) _mlx5_alloc_mkey_descs() argument
1795 struct mlx5_ib_mr *mr; mlx5_ib_alloc_pi_mr() local
1832 mlx5_alloc_mem_reg_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr, int ndescs, u32 *in, int inlen) mlx5_alloc_mem_reg_descs() argument
1840 mlx5_alloc_sg_gaps_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr, int ndescs, u32 *in, int inlen) mlx5_alloc_sg_gaps_descs() argument
1847 mlx5_alloc_integrity_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr, int max_num_sg, int max_num_meta_sg, u32 *in, int inlen) mlx5_alloc_integrity_descs() argument
1932 struct mlx5_ib_mr *mr; __mlx5_ib_alloc_mr() local
2136 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_ib_map_pa_mr_sg_pi() local
2165 mlx5_ib_sg_to_klms(struct mlx5_ib_mr *mr, struct scatterlist *sgl, unsigned short sg_nents, unsigned int *sg_offset_p, struct scatterlist *meta_sgl, unsigned short meta_sg_nents, unsigned int *meta_sg_offset_p) mlx5_ib_sg_to_klms() argument
2226 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_set_page() local
2240 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_set_page_pi() local
2259 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_ib_map_mtt_mr_sg_pi() local
2324 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_ib_map_klm_mr_sg_pi() local
2357 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_ib_map_mr_sg_pi() local
2416 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_ib_map_mr_sg() local
[all...]
/kernel/linux/linux-5.10/drivers/vdpa/mlx5/core/
H A Dmr.c34 static void populate_mtts(struct mlx5_vdpa_direct_mr *mr, __be64 *mtt) in populate_mtts() argument
37 int nsg = mr->nsg; in populate_mtts()
43 for_each_sg(mr->sg_head.sgl, sg, mr->nent, i) { in populate_mtts()
46 nsg--, dma_addr += BIT(mr->log_size), dma_len -= BIT(mr->log_size)) in populate_mtts()
51 static int create_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr) in create_direct_mr() argument
58 inlen = MLX5_ST_SZ_BYTES(create_mkey_in) + roundup(MLX5_ST_SZ_BYTES(mtt) * mr->nsg, 16); in create_direct_mr()
65 MLX5_SET(mkc, mkc, lw, !!(mr->perm & VHOST_MAP_WO)); in create_direct_mr()
66 MLX5_SET(mkc, mkc, lr, !!(mr in create_direct_mr()
88 destroy_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr) destroy_direct_mr() argument
93 map_start(struct vhost_iotlb_map *map, struct mlx5_vdpa_direct_mr *mr) map_start() argument
98 map_end(struct vhost_iotlb_map *map, struct mlx5_vdpa_direct_mr *mr) map_end() argument
103 maplen(struct vhost_iotlb_map *map, struct mlx5_vdpa_direct_mr *mr) maplen() argument
181 create_indirect_key(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_mr *mr) create_indirect_key() argument
222 map_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr, struct vhost_iotlb *iotlb) map_direct_mr() argument
294 unmap_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr) unmap_direct_mr() argument
306 struct mlx5_vdpa_mr *mr = &mvdev->mr; add_direct_chain() local
360 struct mlx5_vdpa_mr *mr = &mvdev->mr; _mlx5_vdpa_create_mr() local
426 struct mlx5_vdpa_mr *mr = &mvdev->mr; mlx5_vdpa_create_mr() local
437 struct mlx5_vdpa_mr *mr = &mvdev->mr; mlx5_vdpa_destroy_mr() local
459 struct mlx5_vdpa_mr *mr = &mvdev->mr; mlx5_vdpa_handle_set_map() local
[all...]
/kernel/linux/linux-5.10/net/sunrpc/xprtrdma/
H A Dfrwr_ops.c54 * @mr: MR allocated by frwr_mr_init
57 void frwr_release_mr(struct rpcrdma_mr *mr) in frwr_release_mr() argument
61 rc = ib_dereg_mr(mr->frwr.fr_mr); in frwr_release_mr()
63 trace_xprtrdma_frwr_dereg(mr, rc); in frwr_release_mr()
64 kfree(mr->mr_sg); in frwr_release_mr()
65 kfree(mr); in frwr_release_mr()
68 static void frwr_mr_recycle(struct rpcrdma_mr *mr) in frwr_mr_recycle() argument
70 struct rpcrdma_xprt *r_xprt = mr->mr_xprt; in frwr_mr_recycle()
72 trace_xprtrdma_mr_recycle(mr); in frwr_mr_recycle()
74 if (mr in frwr_mr_recycle()
101 struct rpcrdma_mr *mr; frwr_reset() local
115 frwr_mr_init(struct rpcrdma_xprt *r_xprt, struct rpcrdma_mr *mr) frwr_mr_init() argument
281 frwr_map(struct rpcrdma_xprt *r_xprt, struct rpcrdma_mr_seg *seg, int nsegs, bool writing, __be32 xid, struct rpcrdma_mr *mr) frwr_map() argument
390 struct rpcrdma_mr *mr; frwr_send() local
419 struct rpcrdma_mr *mr; frwr_reminv() local
430 __frwr_release_mr(struct ib_wc *wc, struct rpcrdma_mr *mr) __frwr_release_mr() argument
449 struct rpcrdma_mr *mr = container_of(frwr, struct rpcrdma_mr, frwr); frwr_wc_localinv() local
470 struct rpcrdma_mr *mr = container_of(frwr, struct rpcrdma_mr, frwr); frwr_wc_localinv_wake() local
496 struct rpcrdma_mr *mr; frwr_unmap_sync() local
573 struct rpcrdma_mr *mr = container_of(frwr, struct rpcrdma_mr, frwr); frwr_wc_localinv_done() local
602 struct rpcrdma_mr *mr; frwr_unmap_async() local
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/vmw_pvrdma/
H A Dpvrdma_mr.c61 struct pvrdma_user_mr *mr; in pvrdma_get_dma_mr() local
71 "unsupported dma mr access flags %#x\n", acc); in pvrdma_get_dma_mr()
75 mr = kzalloc(sizeof(*mr), GFP_KERNEL); in pvrdma_get_dma_mr()
76 if (!mr) in pvrdma_get_dma_mr()
89 kfree(mr); in pvrdma_get_dma_mr()
93 mr->mmr.mr_handle = resp->mr_handle; in pvrdma_get_dma_mr()
94 mr->ibmr.lkey = resp->lkey; in pvrdma_get_dma_mr()
95 mr->ibmr.rkey = resp->rkey; in pvrdma_get_dma_mr()
97 return &mr in pvrdma_get_dma_mr()
116 struct pvrdma_user_mr *mr = NULL; pvrdma_reg_user_mr() local
208 struct pvrdma_user_mr *mr; pvrdma_alloc_mr() local
279 struct pvrdma_user_mr *mr = to_vmr(ibmr); pvrdma_dereg_mr() local
304 struct pvrdma_user_mr *mr = to_vmr(ibmr); pvrdma_set_page() local
316 struct pvrdma_user_mr *mr = to_vmr(ibmr); pvrdma_map_mr_sg() local
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/vmw_pvrdma/
H A Dpvrdma_mr.c61 struct pvrdma_user_mr *mr; in pvrdma_get_dma_mr() local
71 "unsupported dma mr access flags %#x\n", acc); in pvrdma_get_dma_mr()
75 mr = kzalloc(sizeof(*mr), GFP_KERNEL); in pvrdma_get_dma_mr()
76 if (!mr) in pvrdma_get_dma_mr()
89 kfree(mr); in pvrdma_get_dma_mr()
93 mr->mmr.mr_handle = resp->mr_handle; in pvrdma_get_dma_mr()
94 mr->ibmr.lkey = resp->lkey; in pvrdma_get_dma_mr()
95 mr->ibmr.rkey = resp->rkey; in pvrdma_get_dma_mr()
97 return &mr in pvrdma_get_dma_mr()
116 struct pvrdma_user_mr *mr = NULL; pvrdma_reg_user_mr() local
208 struct pvrdma_user_mr *mr; pvrdma_alloc_mr() local
279 struct pvrdma_user_mr *mr = to_vmr(ibmr); pvrdma_dereg_mr() local
304 struct pvrdma_user_mr *mr = to_vmr(ibmr); pvrdma_set_page() local
316 struct pvrdma_user_mr *mr = to_vmr(ibmr); pvrdma_map_mr_sg() local
[all...]
/kernel/linux/linux-5.10/drivers/scsi/
H A Dmesh.c305 volatile struct mesh_regs __iomem *mr = ms->mesh; in mesh_dump_regs() local
311 ms, mr, md); in mesh_dump_regs()
314 (mr->count_hi << 8) + mr->count_lo, mr->sequence, in mesh_dump_regs()
315 (mr->bus_status1 << 8) + mr->bus_status0, mr->fifo_count, in mesh_dump_regs()
316 mr->exception, mr in mesh_dump_regs()
339 mesh_flush_io(volatile struct mesh_regs __iomem *mr) mesh_flush_io() argument
360 volatile struct mesh_regs __iomem *mr = ms->mesh; mesh_init() local
407 volatile struct mesh_regs __iomem *mr = ms->mesh; mesh_start_cmd() local
641 volatile struct mesh_regs __iomem *mr = ms->mesh; set_sdtr() local
682 volatile struct mesh_regs __iomem *mr = ms->mesh; start_phase() local
826 volatile struct mesh_regs __iomem *mr = ms->mesh; get_msgin() local
858 volatile struct mesh_regs __iomem *mr = ms->mesh; reselected() local
991 volatile struct mesh_regs __iomem *mr = ms->mesh; handle_reset() local
1034 volatile struct mesh_regs __iomem *mr = ms->mesh; handle_error() local
1130 volatile struct mesh_regs __iomem *mr = ms->mesh; handle_exception() local
1324 volatile struct mesh_regs __iomem *mr = ms->mesh; halt_dma() local
1369 volatile struct mesh_regs __iomem *mr = ms->mesh; phase_mismatch() local
1448 volatile struct mesh_regs __iomem *mr = ms->mesh; cmd_complete() local
1662 volatile struct mesh_regs __iomem *mr = ms->mesh; mesh_interrupt() local
1710 volatile struct mesh_regs __iomem *mr = ms->mesh; mesh_host_reset() local
1821 volatile struct mesh_regs __iomem *mr; mesh_shutdown() local
[all...]
/kernel/linux/linux-6.6/drivers/scsi/
H A Dmesh.c305 volatile struct mesh_regs __iomem *mr = ms->mesh; in mesh_dump_regs() local
311 ms, mr, md); in mesh_dump_regs()
314 (mr->count_hi << 8) + mr->count_lo, mr->sequence, in mesh_dump_regs()
315 (mr->bus_status1 << 8) + mr->bus_status0, mr->fifo_count, in mesh_dump_regs()
316 mr->exception, mr in mesh_dump_regs()
339 mesh_flush_io(volatile struct mesh_regs __iomem *mr) mesh_flush_io() argument
351 volatile struct mesh_regs __iomem *mr = ms->mesh; mesh_init() local
398 volatile struct mesh_regs __iomem *mr = ms->mesh; mesh_start_cmd() local
635 volatile struct mesh_regs __iomem *mr = ms->mesh; set_sdtr() local
676 volatile struct mesh_regs __iomem *mr = ms->mesh; start_phase() local
820 volatile struct mesh_regs __iomem *mr = ms->mesh; get_msgin() local
852 volatile struct mesh_regs __iomem *mr = ms->mesh; reselected() local
985 volatile struct mesh_regs __iomem *mr = ms->mesh; handle_reset() local
1028 volatile struct mesh_regs __iomem *mr = ms->mesh; handle_error() local
1124 volatile struct mesh_regs __iomem *mr = ms->mesh; handle_exception() local
1318 volatile struct mesh_regs __iomem *mr = ms->mesh; halt_dma() local
1363 volatile struct mesh_regs __iomem *mr = ms->mesh; phase_mismatch() local
1442 volatile struct mesh_regs __iomem *mr = ms->mesh; cmd_complete() local
1657 volatile struct mesh_regs __iomem *mr = ms->mesh; mesh_interrupt() local
1705 volatile struct mesh_regs __iomem *mr = ms->mesh; mesh_host_reset() local
1816 volatile struct mesh_regs __iomem *mr; mesh_shutdown() local
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx5/
H A Dmr.c126 static int destroy_mkey(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr) in destroy_mkey() argument
128 WARN_ON(xa_load(&dev->odp_mkeys, mlx5_base_mkey(mr->mmkey.key))); in destroy_mkey()
130 return mlx5_core_destroy_mkey(dev->mdev, mr->mmkey.key); in destroy_mkey()
138 mlx5_ib_warn(dev, "async reg mr failed. status %d\n", status); in create_mkey_warn()
749 struct mlx5_ib_mr *mr; in _mlx5_mr_cache_alloc() local
752 mr = kzalloc(sizeof(*mr), GFP_KERNEL); in _mlx5_mr_cache_alloc()
753 if (!mr) in _mlx5_mr_cache_alloc()
763 err = create_cache_mkey(ent, &mr->mmkey.key); in _mlx5_mr_cache_alloc()
768 kfree(mr); in _mlx5_mr_cache_alloc()
1068 struct mlx5_ib_mr *mr; mlx5_ib_get_dma_mr() local
1129 set_mr_fields(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr, u64 length, int access_flags, u64 iova) set_mr_fields() argument
1160 struct mlx5_ib_mr *mr; alloc_cacheable_mr() local
1210 struct mlx5_ib_mr *mr; reg_create() local
1298 struct mlx5_ib_mr *mr; mlx5_ib_get_dm_mr() local
1392 struct mlx5_ib_mr *mr = NULL; create_real_mr() local
1437 struct mlx5_ib_mr *mr; create_user_odp_mr() local
1514 struct mlx5_ib_mr *mr = umem_dmabuf->private; mlx5_ib_dmabuf_invalidate_cb() local
1536 struct mlx5_ib_mr *mr = NULL; mlx5_ib_reg_user_mr_dmabuf() local
1603 can_use_umr_rereg_pas(struct mlx5_ib_mr *mr, struct ib_umem *new_umem, int new_access_flags, u64 iova, unsigned long *page_size) can_use_umr_rereg_pas() argument
1624 umr_rereg_pas(struct mlx5_ib_mr *mr, struct ib_pd *pd, int access_flags, int flags, struct ib_umem *new_umem, u64 iova, unsigned long page_size) umr_rereg_pas() argument
1677 struct mlx5_ib_mr *mr = to_mmr(ib_mr); mlx5_ib_rereg_user_mr() local
1768 mlx5_alloc_priv_descs(struct ib_device *device, struct mlx5_ib_mr *mr, int ndescs, int desc_size) mlx5_alloc_priv_descs() argument
1806 mlx5_free_priv_descs(struct mlx5_ib_mr *mr) mlx5_free_priv_descs() argument
1820 cache_ent_find_and_store(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr) cache_ent_find_and_store() argument
1865 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_ib_dereg_mr() local
1954 _mlx5_alloc_mkey_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr, int ndescs, int desc_size, int page_shift, int access_mode, u32 *in, int inlen) _mlx5_alloc_mkey_descs() argument
1993 struct mlx5_ib_mr *mr; mlx5_ib_alloc_pi_mr() local
2030 mlx5_alloc_mem_reg_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr, int ndescs, u32 *in, int inlen) mlx5_alloc_mem_reg_descs() argument
2038 mlx5_alloc_sg_gaps_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr, int ndescs, u32 *in, int inlen) mlx5_alloc_sg_gaps_descs() argument
2045 mlx5_alloc_integrity_descs(struct ib_pd *pd, struct mlx5_ib_mr *mr, int max_num_sg, int max_num_meta_sg, u32 *in, int inlen) mlx5_alloc_integrity_descs() argument
2130 struct mlx5_ib_mr *mr; __mlx5_ib_alloc_mr() local
2329 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_ib_map_pa_mr_sg_pi() local
2358 mlx5_ib_sg_to_klms(struct mlx5_ib_mr *mr, struct scatterlist *sgl, unsigned short sg_nents, unsigned int *sg_offset_p, struct scatterlist *meta_sgl, unsigned short meta_sg_nents, unsigned int *meta_sg_offset_p) mlx5_ib_sg_to_klms() argument
2419 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_set_page() local
2433 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_set_page_pi() local
2452 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_ib_map_mtt_mr_sg_pi() local
2517 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_ib_map_klm_mr_sg_pi() local
2550 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_ib_map_mr_sg_pi() local
2609 struct mlx5_ib_mr *mr = to_mmr(ibmr); mlx5_ib_map_mr_sg() local
[all...]
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/subdev/fb/
H A Dgddr5.c75 ram->mr[0] &= ~0xf7f; in nvkm_gddr5_calc()
76 ram->mr[0] |= (WR & 0x0f) << 8; in nvkm_gddr5_calc()
77 ram->mr[0] |= (CL & 0x0f) << 3; in nvkm_gddr5_calc()
78 ram->mr[0] |= (WL & 0x07) << 0; in nvkm_gddr5_calc()
80 ram->mr[1] &= ~0x0bf; in nvkm_gddr5_calc()
81 ram->mr[1] |= (xd & 0x01) << 7; in nvkm_gddr5_calc()
82 ram->mr[1] |= (at[0] & 0x03) << 4; in nvkm_gddr5_calc()
83 ram->mr[1] |= (dt & 0x03) << 2; in nvkm_gddr5_calc()
84 ram->mr[1] |= (ds & 0x03) << 0; in nvkm_gddr5_calc()
89 ram->mr1_nuts = ram->mr[ in nvkm_gddr5_calc()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/nvkm/subdev/fb/
H A Dgddr5.c75 ram->mr[0] &= ~0xf7f; in nvkm_gddr5_calc()
76 ram->mr[0] |= (WR & 0x0f) << 8; in nvkm_gddr5_calc()
77 ram->mr[0] |= (CL & 0x0f) << 3; in nvkm_gddr5_calc()
78 ram->mr[0] |= (WL & 0x07) << 0; in nvkm_gddr5_calc()
80 ram->mr[1] &= ~0x0bf; in nvkm_gddr5_calc()
81 ram->mr[1] |= (xd & 0x01) << 7; in nvkm_gddr5_calc()
82 ram->mr[1] |= (at[0] & 0x03) << 4; in nvkm_gddr5_calc()
83 ram->mr[1] |= (dt & 0x03) << 2; in nvkm_gddr5_calc()
84 ram->mr[1] |= (ds & 0x03) << 0; in nvkm_gddr5_calc()
89 ram->mr1_nuts = ram->mr[ in nvkm_gddr5_calc()
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/mlx4/
H A Dmr.c60 struct mlx4_ib_mr *mr; in mlx4_ib_get_dma_mr() local
63 mr = kzalloc(sizeof(*mr), GFP_KERNEL); in mlx4_ib_get_dma_mr()
64 if (!mr) in mlx4_ib_get_dma_mr()
68 ~0ull, convert_access(acc), 0, 0, &mr->mmr); in mlx4_ib_get_dma_mr()
72 err = mlx4_mr_enable(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr()
76 mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key; in mlx4_ib_get_dma_mr()
77 mr in mlx4_ib_get_dma_mr()
411 struct mlx4_ib_mr *mr; mlx4_ib_reg_user_mr() local
458 mlx4_ib_rereg_user_mr(struct ib_mr *mr, int flags, u64 start, u64 length, u64 virt_addr, int mr_access_flags, struct ib_pd *pd, struct ib_udata *udata) mlx4_ib_rereg_user_mr() argument
549 mlx4_alloc_priv_pages(struct ib_device *device, struct mlx4_ib_mr *mr, int max_pages) mlx4_alloc_priv_pages() argument
584 mlx4_free_priv_pages(struct mlx4_ib_mr *mr) mlx4_free_priv_pages() argument
598 struct mlx4_ib_mr *mr = to_mmr(ibmr); mlx4_ib_dereg_mr() local
648 struct mlx4_ib_mr *mr; mlx4_ib_alloc_mr() local
690 struct mlx4_ib_mr *mr = to_mmr(ibmr); mlx4_set_page() local
703 struct mlx4_ib_mr *mr = to_mmr(ibmr); mlx4_ib_map_mr_sg() local
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/mlx4/
H A Dmr.c60 struct mlx4_ib_mr *mr; in mlx4_ib_get_dma_mr() local
63 mr = kzalloc(sizeof(*mr), GFP_KERNEL); in mlx4_ib_get_dma_mr()
64 if (!mr) in mlx4_ib_get_dma_mr()
68 ~0ull, convert_access(acc), 0, 0, &mr->mmr); in mlx4_ib_get_dma_mr()
72 err = mlx4_mr_enable(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr()
76 mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key; in mlx4_ib_get_dma_mr()
77 mr in mlx4_ib_get_dma_mr()
411 struct mlx4_ib_mr *mr; mlx4_ib_reg_user_mr() local
458 mlx4_ib_rereg_user_mr(struct ib_mr *mr, int flags, u64 start, u64 length, u64 virt_addr, int mr_access_flags, struct ib_pd *pd, struct ib_udata *udata) mlx4_ib_rereg_user_mr() argument
549 mlx4_alloc_priv_pages(struct ib_device *device, struct mlx4_ib_mr *mr, int max_pages) mlx4_alloc_priv_pages() argument
584 mlx4_free_priv_pages(struct mlx4_ib_mr *mr) mlx4_free_priv_pages() argument
598 struct mlx4_ib_mr *mr = to_mmr(ibmr); mlx4_ib_dereg_mr() local
648 struct mlx4_ib_mr *mr; mlx4_ib_alloc_mr() local
690 struct mlx4_ib_mr *mr = to_mmr(ibmr); mlx4_set_page() local
703 struct mlx4_ib_mr *mr = to_mmr(ibmr); mlx4_ib_map_mr_sg() local
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/core/
H A Duverbs_std_types_mr.c95 struct ib_mr *mr; in UVERBS_METHOD_DM_MR_REG() local
127 mr = pd->device->ops.reg_dm_mr(pd, dm, &attr, attrs); in UVERBS_METHOD_DM_MR_REG()
128 if (IS_ERR(mr)) in UVERBS_METHOD_DM_MR_REG()
129 return PTR_ERR(mr); in UVERBS_METHOD_DM_MR_REG()
131 mr->device = pd->device; in UVERBS_METHOD_DM_MR_REG()
132 mr->pd = pd; in UVERBS_METHOD_DM_MR_REG()
133 mr->type = IB_MR_TYPE_DM; in UVERBS_METHOD_DM_MR_REG()
134 mr->dm = dm; in UVERBS_METHOD_DM_MR_REG()
135 mr->uobject = uobj; in UVERBS_METHOD_DM_MR_REG()
139 rdma_restrack_new(&mr in UVERBS_METHOD_DM_MR_REG()
159 struct ib_mr *mr = UVERBS_METHOD_QUERY_MR() local
197 struct ib_mr *mr; UVERBS_METHOD_REG_DMABUF_MR() local
[all...]
H A Dmr_pool.c10 struct ib_mr *mr; in ib_mr_pool_get() local
14 mr = list_first_entry_or_null(list, struct ib_mr, qp_entry); in ib_mr_pool_get()
15 if (mr) { in ib_mr_pool_get()
16 list_del(&mr->qp_entry); in ib_mr_pool_get()
21 return mr; in ib_mr_pool_get()
25 void ib_mr_pool_put(struct ib_qp *qp, struct list_head *list, struct ib_mr *mr) in ib_mr_pool_put() argument
30 list_add(&mr->qp_entry, list); in ib_mr_pool_put()
39 struct ib_mr *mr; in ib_mr_pool_init() local
45 mr = ib_alloc_mr_integrity(qp->pd, max_num_sg, in ib_mr_pool_init()
48 mr in ib_mr_pool_init()
68 struct ib_mr *mr; ib_mr_pool_destroy() local
[all...]
/kernel/linux/linux-6.6/drivers/infiniband/hw/hns/
H A Dhns_roce_mr.c51 static int alloc_mr_key(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr) in alloc_mr_key() argument
58 /* Allocate a key for mr from mr_table */ in alloc_mr_key()
66 mr->key = hw_index_to_key(id); /* MR key */ in alloc_mr_key()
81 static void free_mr_key(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr) in free_mr_key() argument
83 unsigned long obj = key_to_hw_index(mr->key); in free_mr_key()
89 static int alloc_mr_pbl(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr, in alloc_mr_pbl() argument
93 bool is_fast = mr->type == MR_TYPE_FRMR; in alloc_mr_pbl()
97 mr->pbl_hop_num = is_fast ? 1 : hr_dev->caps.pbl_hop_num; in alloc_mr_pbl()
100 buf_attr.region[0].size = mr->size; in alloc_mr_pbl()
101 buf_attr.region[0].hopnum = mr in alloc_mr_pbl()
118 free_mr_pbl(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr) free_mr_pbl() argument
123 hns_roce_mr_free(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr) hns_roce_mr_free() argument
141 hns_roce_mr_enable(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr) hns_roce_mr_enable() argument
190 struct hns_roce_mr *mr; hns_roce_get_dma_mr() local
227 struct hns_roce_mr *mr; hns_roce_reg_user_mr() local
272 struct hns_roce_mr *mr = to_hr_mr(ibmr); hns_roce_rereg_user_mr() local
342 struct hns_roce_mr *mr = to_hr_mr(ibmr); hns_roce_dereg_mr() local
358 struct hns_roce_mr *mr; hns_roce_alloc_mr() local
407 struct hns_roce_mr *mr = to_hr_mr(ibmr); hns_roce_set_page() local
422 struct hns_roce_mr *mr = to_hr_mr(ibmr); hns_roce_map_mr_sg() local
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/core/
H A Dmr_pool.c10 struct ib_mr *mr; in ib_mr_pool_get() local
14 mr = list_first_entry_or_null(list, struct ib_mr, qp_entry); in ib_mr_pool_get()
15 if (mr) { in ib_mr_pool_get()
16 list_del(&mr->qp_entry); in ib_mr_pool_get()
21 return mr; in ib_mr_pool_get()
25 void ib_mr_pool_put(struct ib_qp *qp, struct list_head *list, struct ib_mr *mr) in ib_mr_pool_put() argument
30 list_add(&mr->qp_entry, list); in ib_mr_pool_put()
39 struct ib_mr *mr; in ib_mr_pool_init() local
45 mr = ib_alloc_mr_integrity(qp->pd, max_num_sg, in ib_mr_pool_init()
48 mr in ib_mr_pool_init()
68 struct ib_mr *mr; ib_mr_pool_destroy() local
[all...]
/kernel/linux/linux-5.10/drivers/infiniband/hw/hns/
H A Dhns_roce_mr.c69 static int alloc_mr_key(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr, in alloc_mr_key() argument
76 /* Allocate a key for mr from mr_table */ in alloc_mr_key()
85 mr->iova = iova; /* MR va starting addr */ in alloc_mr_key()
86 mr->size = size; /* MR addr range */ in alloc_mr_key()
87 mr->pd = pd; /* MR num */ in alloc_mr_key()
88 mr->access = access; /* MR access permit */ in alloc_mr_key()
89 mr->enabled = 0; /* MR active status */ in alloc_mr_key()
90 mr->key = hw_index_to_key(obj); /* MR key */ in alloc_mr_key()
104 static void free_mr_key(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr) in free_mr_key() argument
106 unsigned long obj = key_to_hw_index(mr in free_mr_key()
112 alloc_mr_pbl(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr, size_t length, struct ib_udata *udata, u64 start, int access) alloc_mr_pbl() argument
143 free_mr_pbl(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr) free_mr_pbl() argument
148 hns_roce_mr_free(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr) hns_roce_mr_free() argument
167 hns_roce_mr_enable(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr) hns_roce_mr_enable() argument
232 struct hns_roce_mr *mr; hns_roce_get_dma_mr() local
267 struct hns_roce_mr *mr; hns_roce_reg_user_mr() local
309 struct hns_roce_mr *mr = to_hr_mr(ibmr); rereg_mr_trans() local
336 struct hns_roce_mr *mr = to_hr_mr(ibmr); hns_roce_rereg_user_mr() local
403 struct hns_roce_mr *mr = to_hr_mr(ibmr); hns_roce_dereg_mr() local
421 struct hns_roce_mr *mr; hns_roce_alloc_mr() local
470 struct hns_roce_mr *mr = to_hr_mr(ibmr); hns_roce_set_page() local
485 struct hns_roce_mr *mr = to_hr_mr(ibmr); hns_roce_map_mr_sg() local
[all...]

Completed in 17 milliseconds

12345678910>>...24