/kernel/linux/linux-6.6/arch/powerpc/mm/book3s64/ |
H A D | iommu_api.c | 60 struct mm_iommu_table_group_mem_t *mem, *mem2; in mm_iommu_do_alloc() local 73 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in mm_iommu_do_alloc() 74 if (!mem) { in mm_iommu_do_alloc() 80 mem->pageshift = __ffs(dev_hpa | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 81 mem->dev_hpa = dev_hpa; in mm_iommu_do_alloc() 84 mem->dev_hpa = MM_IOMMU_TABLE_INVALID_HPA; in mm_iommu_do_alloc() 91 mem->pageshift = __ffs(ua | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 92 mem->hpas = vzalloc(array_size(entries, sizeof(mem in mm_iommu_do_alloc() 202 mm_iommu_unpin(struct mm_iommu_table_group_mem_t *mem) mm_iommu_unpin() argument 227 mm_iommu_do_free(struct mm_iommu_table_group_mem_t *mem) mm_iommu_do_free() argument 237 struct mm_iommu_table_group_mem_t *mem = container_of(head, mm_iommu_free() local 243 mm_iommu_release(struct mm_iommu_table_group_mem_t *mem) mm_iommu_release() argument 249 mm_iommu_put(struct mm_struct *mm, struct mm_iommu_table_group_mem_t *mem) mm_iommu_put() argument 291 struct mm_iommu_table_group_mem_t *mem, *ret = NULL; mm_iommu_lookup() local 311 struct mm_iommu_table_group_mem_t *mem, *ret = NULL; mm_iommu_get() local 330 mm_iommu_ua_to_hpa(struct mm_iommu_table_group_mem_t *mem, unsigned long ua, unsigned int pageshift, unsigned long *hpa) mm_iommu_ua_to_hpa() argument 357 struct mm_iommu_table_group_mem_t *mem; mm_iommu_is_devmem() local 383 mm_iommu_mapped_inc(struct mm_iommu_table_group_mem_t *mem) mm_iommu_mapped_inc() argument 393 mm_iommu_mapped_dec(struct mm_iommu_table_group_mem_t *mem) mm_iommu_mapped_dec() argument [all...] |
/kernel/linux/linux-5.10/drivers/infiniband/sw/rxe/ |
H A D | rxe_mr.c | 27 int mem_check_range(struct rxe_mem *mem, u64 iova, size_t length) in mem_check_range() argument 29 switch (mem->type) { in mem_check_range() 35 if (iova < mem->iova || in mem_check_range() 36 length > mem->length || in mem_check_range() 37 iova > mem->iova + mem->length - length) in mem_check_range() 50 static void rxe_mem_init(int access, struct rxe_mem *mem) in rxe_mem_init() argument 52 u32 lkey = mem->pelem.index << 8 | rxe_get_key(); in rxe_mem_init() 55 mem->ibmr.lkey = lkey; in rxe_mem_init() 56 mem in rxe_mem_init() 64 struct rxe_mem *mem = container_of(arg, typeof(*mem), pelem); rxe_mem_cleanup() local 77 rxe_mem_alloc(struct rxe_mem *mem, int num_buf) rxe_mem_alloc() argument 115 rxe_mem_init_dma(struct rxe_pd *pd, int access, struct rxe_mem *mem) rxe_mem_init_dma() argument 126 rxe_mem_init_user(struct rxe_pd *pd, u64 start, u64 length, u64 iova, int access, struct ib_udata *udata, struct rxe_mem *mem) rxe_mem_init_user() argument 205 rxe_mem_init_fast(struct rxe_pd *pd, int max_pages, struct rxe_mem *mem) rxe_mem_init_fast() argument 230 lookup_iova( struct rxe_mem *mem, u64 iova, int *m_out, int *n_out, size_t *offset_out) lookup_iova() argument 270 iova_to_vaddr(struct rxe_mem *mem, u64 iova, int length) iova_to_vaddr() argument 311 rxe_mem_copy(struct rxe_mem *mem, u64 iova, void *addr, int length, enum copy_direction dir, u32 *crcp) rxe_mem_copy() argument 415 struct rxe_mem *mem = NULL; copy_data() local 538 struct rxe_mem *mem; lookup_mem() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
H A D | mem.c | 23 #include "mem.h" 35 struct page **mem; member 57 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_addr() local 58 if (mem->pages == 1 && mem->mem) in nvkm_mem_addr() 59 return mem->dma[0]; in nvkm_mem_addr() 73 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_map_dma() local 75 .memory = &mem->memory, in nvkm_mem_map_dma() 77 .dma = mem in nvkm_mem_map_dma() 85 struct nvkm_mem *mem = nvkm_mem(memory); nvkm_mem_dtor() local 113 struct nvkm_mem *mem = nvkm_mem(memory); nvkm_mem_map_sgl() local 135 struct nvkm_mem *mem = nvkm_mem(memory); nvkm_mem_map_host() local 154 struct nvkm_mem *mem; nvkm_mem_new_host() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
H A D | mem.c | 23 #include "mem.h" 35 struct page **mem; member 57 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_addr() local 58 if (mem->pages == 1 && mem->mem) in nvkm_mem_addr() 59 return mem->dma[0]; in nvkm_mem_addr() 73 struct nvkm_mem *mem = nvkm_mem(memory); in nvkm_mem_map_dma() local 75 .memory = &mem->memory, in nvkm_mem_map_dma() 77 .dma = mem in nvkm_mem_map_dma() 85 struct nvkm_mem *mem = nvkm_mem(memory); nvkm_mem_dtor() local 113 struct nvkm_mem *mem = nvkm_mem(memory); nvkm_mem_map_sgl() local 135 struct nvkm_mem *mem = nvkm_mem(memory); nvkm_mem_map_host() local 154 struct nvkm_mem *mem; nvkm_mem_new_host() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/i915/ |
H A D | intel_region_lmem.c | 12 static int init_fake_lmem_bar(struct intel_memory_region *mem) in init_fake_lmem_bar() argument 14 struct drm_i915_private *i915 = mem->i915; in init_fake_lmem_bar() 21 mem->fake_mappable.start = 0; in init_fake_lmem_bar() 22 mem->fake_mappable.size = resource_size(&mem->region); in init_fake_lmem_bar() 23 mem->fake_mappable.color = I915_COLOR_UNEVICTABLE; in init_fake_lmem_bar() 25 ret = drm_mm_reserve_node(&ggtt->vm.mm, &mem->fake_mappable); in init_fake_lmem_bar() 29 mem->remap_addr = dma_map_resource(&i915->drm.pdev->dev, in init_fake_lmem_bar() 30 mem->region.start, in init_fake_lmem_bar() 31 mem in init_fake_lmem_bar() 52 release_fake_lmem_bar(struct intel_memory_region *mem) release_fake_lmem_bar() argument 67 region_lmem_release(struct intel_memory_region *mem) region_lmem_release() argument 75 region_lmem_init(struct intel_memory_region *mem) region_lmem_init() argument 108 struct intel_memory_region *mem; intel_setup_fake_lmem() local [all...] |
H A D | intel_memory_region.c | 34 intel_memory_region_free_pages(struct intel_memory_region *mem, in intel_memory_region_free_pages() argument 41 size += i915_buddy_block_size(&mem->mm, block); in intel_memory_region_free_pages() 42 i915_buddy_free(&mem->mm, block); in intel_memory_region_free_pages() 50 __intel_memory_region_put_pages_buddy(struct intel_memory_region *mem, in __intel_memory_region_put_pages_buddy() argument 53 mutex_lock(&mem->mm_lock); in __intel_memory_region_put_pages_buddy() 54 mem->avail += intel_memory_region_free_pages(mem, blocks); in __intel_memory_region_put_pages_buddy() 55 mutex_unlock(&mem->mm_lock); in __intel_memory_region_put_pages_buddy() 69 __intel_memory_region_get_pages_buddy(struct intel_memory_region *mem, in __intel_memory_region_get_pages_buddy() argument 77 GEM_BUG_ON(!IS_ALIGNED(size, mem in __intel_memory_region_get_pages_buddy() 134 __intel_memory_region_get_block_buddy(struct intel_memory_region *mem, resource_size_t size, unsigned int flags) __intel_memory_region_get_block_buddy() argument 151 intel_memory_region_init_buddy(struct intel_memory_region *mem) intel_memory_region_init_buddy() argument 157 intel_memory_region_release_buddy(struct intel_memory_region *mem) intel_memory_region_release_buddy() argument 170 struct intel_memory_region *mem; intel_memory_region_create() local 205 intel_memory_region_set_name(struct intel_memory_region *mem, const char *fmt, ...) intel_memory_region_set_name() argument 217 struct intel_memory_region *mem = __intel_memory_region_destroy() local 229 intel_memory_region_get(struct intel_memory_region *mem) intel_memory_region_get() argument 235 intel_memory_region_put(struct intel_memory_region *mem) intel_memory_region_put() argument 247 struct intel_memory_region *mem = ERR_PTR(-ENODEV); intel_memory_regions_hw_probe() local [all...] |
/kernel/linux/linux-5.10/arch/powerpc/mm/book3s64/ |
H A D | iommu_api.c | 60 struct mm_iommu_table_group_mem_t *mem, *mem2; in mm_iommu_do_alloc() local 73 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in mm_iommu_do_alloc() 74 if (!mem) { in mm_iommu_do_alloc() 80 mem->pageshift = __ffs(dev_hpa | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 81 mem->dev_hpa = dev_hpa; in mm_iommu_do_alloc() 84 mem->dev_hpa = MM_IOMMU_TABLE_INVALID_HPA; in mm_iommu_do_alloc() 91 mem->pageshift = __ffs(ua | (entries << PAGE_SHIFT)); in mm_iommu_do_alloc() 92 mem->hpas = vzalloc(array_size(entries, sizeof(mem in mm_iommu_do_alloc() 201 mm_iommu_unpin(struct mm_iommu_table_group_mem_t *mem) mm_iommu_unpin() argument 226 mm_iommu_do_free(struct mm_iommu_table_group_mem_t *mem) mm_iommu_do_free() argument 236 struct mm_iommu_table_group_mem_t *mem = container_of(head, mm_iommu_free() local 242 mm_iommu_release(struct mm_iommu_table_group_mem_t *mem) mm_iommu_release() argument 248 mm_iommu_put(struct mm_struct *mm, struct mm_iommu_table_group_mem_t *mem) mm_iommu_put() argument 290 struct mm_iommu_table_group_mem_t *mem, *ret = NULL; mm_iommu_lookup() local 308 struct mm_iommu_table_group_mem_t *mem, *ret = NULL; mm_iommu_lookup_rm() local 326 struct mm_iommu_table_group_mem_t *mem, *ret = NULL; mm_iommu_get() local 344 mm_iommu_ua_to_hpa(struct mm_iommu_table_group_mem_t *mem, unsigned long ua, unsigned int pageshift, unsigned long *hpa) mm_iommu_ua_to_hpa() argument 368 mm_iommu_ua_to_hpa_rm(struct mm_iommu_table_group_mem_t *mem, unsigned long ua, unsigned int pageshift, unsigned long *hpa) mm_iommu_ua_to_hpa_rm() argument 396 struct mm_iommu_table_group_mem_t *mem; mm_iommu_ua_mark_dirty_rm() local 421 struct mm_iommu_table_group_mem_t *mem; mm_iommu_is_devmem() local 445 mm_iommu_mapped_inc(struct mm_iommu_table_group_mem_t *mem) mm_iommu_mapped_inc() argument 455 mm_iommu_mapped_dec(struct mm_iommu_table_group_mem_t *mem) mm_iommu_mapped_dec() argument [all...] |
/kernel/linux/linux-5.10/drivers/staging/media/atomisp/pci/runtime/isp_param/src/ |
H A D | isp_param.c | 27 enum ia_css_isp_memories mem, in ia_css_isp_param_set_mem_init() 30 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_mem_init() 31 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_mem_init() 38 enum ia_css_isp_memories mem, in ia_css_isp_param_set_css_mem_init() 41 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_css_mem_init() 42 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_css_mem_init() 49 enum ia_css_isp_memories mem, in ia_css_isp_param_set_isp_mem_init() 52 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_isp_mem_init() 53 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_isp_mem_init() 61 enum ia_css_isp_memories mem) in ia_css_isp_param_get_mem_init() 24 ia_css_isp_param_set_mem_init( struct ia_css_isp_param_host_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem, char *address, size_t size) ia_css_isp_param_set_mem_init() argument 35 ia_css_isp_param_set_css_mem_init( struct ia_css_isp_param_css_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem, ia_css_ptr address, size_t size) ia_css_isp_param_set_css_mem_init() argument 46 ia_css_isp_param_set_isp_mem_init( struct ia_css_isp_param_isp_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem, u32 address, size_t size) ia_css_isp_param_set_isp_mem_init() argument 58 ia_css_isp_param_get_mem_init( const struct ia_css_isp_param_host_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem) ia_css_isp_param_get_mem_init() argument 67 ia_css_isp_param_get_css_mem_init( const struct ia_css_isp_param_css_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem) ia_css_isp_param_get_css_mem_init() argument 76 ia_css_isp_param_get_isp_mem_init( const struct ia_css_isp_param_isp_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem) ia_css_isp_param_get_isp_mem_init() argument 90 unsigned int pclass, mem; ia_css_init_memory_interface() local 111 unsigned int mem, pclass; ia_css_isp_param_allocate_isp_parameters() local 154 unsigned int mem, pclass; ia_css_isp_param_destroy_isp_parameters() local 189 unsigned int mem; ia_css_isp_param_copy_isp_mem_if_to_ddr() local [all...] |
/kernel/linux/linux-5.10/drivers/gpu/drm/nouveau/ |
H A D | nouveau_mem.c | 36 nouveau_mem_map(struct nouveau_mem *mem, in nouveau_mem_map() argument 54 args.nv50.kind = mem->kind; in nouveau_mem_map() 55 args.nv50.comp = mem->comp; in nouveau_mem_map() 62 if (mem->mem.type & NVIF_MEM_VRAM) in nouveau_mem_map() 68 args.gf100.kind = mem->kind; in nouveau_mem_map() 78 ret = nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, in nouveau_mem_map() 79 &mem->mem, in nouveau_mem_map() 85 nouveau_mem_fini(struct nouveau_mem *mem) nouveau_mem_fini() argument 97 struct nouveau_mem *mem = nouveau_mem(reg); nouveau_mem_host() local 135 struct nouveau_mem *mem = nouveau_mem(reg); nouveau_mem_vram() local 178 struct nouveau_mem *mem = nouveau_mem(reg); nouveau_mem_del() local 190 struct nouveau_mem *mem; nouveau_mem_new() local [all...] |
/kernel/linux/linux-6.6/drivers/staging/media/atomisp/pci/runtime/isp_param/src/ |
H A D | isp_param.c | 27 enum ia_css_isp_memories mem, in ia_css_isp_param_set_mem_init() 30 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_mem_init() 31 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_mem_init() 38 enum ia_css_isp_memories mem, in ia_css_isp_param_set_css_mem_init() 41 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_css_mem_init() 42 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_css_mem_init() 49 enum ia_css_isp_memories mem, in ia_css_isp_param_set_isp_mem_init() 52 mem_init->params[pclass][mem].address = address; in ia_css_isp_param_set_isp_mem_init() 53 mem_init->params[pclass][mem].size = (uint32_t)size; in ia_css_isp_param_set_isp_mem_init() 61 enum ia_css_isp_memories mem) in ia_css_isp_param_get_mem_init() 24 ia_css_isp_param_set_mem_init( struct ia_css_isp_param_host_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem, char *address, size_t size) ia_css_isp_param_set_mem_init() argument 35 ia_css_isp_param_set_css_mem_init( struct ia_css_isp_param_css_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem, ia_css_ptr address, size_t size) ia_css_isp_param_set_css_mem_init() argument 46 ia_css_isp_param_set_isp_mem_init( struct ia_css_isp_param_isp_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem, u32 address, size_t size) ia_css_isp_param_set_isp_mem_init() argument 58 ia_css_isp_param_get_mem_init( const struct ia_css_isp_param_host_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem) ia_css_isp_param_get_mem_init() argument 67 ia_css_isp_param_get_css_mem_init( const struct ia_css_isp_param_css_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem) ia_css_isp_param_get_css_mem_init() argument 76 ia_css_isp_param_get_isp_mem_init( const struct ia_css_isp_param_isp_segments *mem_init, enum ia_css_param_class pclass, enum ia_css_isp_memories mem) ia_css_isp_param_get_isp_mem_init() argument 90 unsigned int pclass, mem; ia_css_init_memory_interface() local 111 unsigned int mem, pclass; ia_css_isp_param_allocate_isp_parameters() local 154 unsigned int mem, pclass; ia_css_isp_param_destroy_isp_parameters() local 188 unsigned int mem; ia_css_isp_param_copy_isp_mem_if_to_ddr() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/nouveau/ |
H A D | nouveau_mem.c | 37 nouveau_mem_map(struct nouveau_mem *mem, in nouveau_mem_map() argument 53 args.nv50.kind = mem->kind; in nouveau_mem_map() 54 args.nv50.comp = mem->comp; in nouveau_mem_map() 61 if (mem->mem.type & NVIF_MEM_VRAM) in nouveau_mem_map() 67 args.gf100.kind = mem->kind; in nouveau_mem_map() 75 return nvif_vmm_map(vmm, vma->addr, mem->mem.size, &args, argc, &mem->mem, in nouveau_mem_map() 79 nouveau_mem_fini(struct nouveau_mem *mem) nouveau_mem_fini() argument 91 struct nouveau_mem *mem = nouveau_mem(reg); nouveau_mem_host() local 128 struct nouveau_mem *mem = nouveau_mem(reg); nouveau_mem_vram() local 168 struct nouveau_mem *mem = nouveau_mem(reg); nouveau_mem_del() local 179 struct nouveau_mem *mem; nouveau_mem_new() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/i915/ |
H A D | intel_memory_region.c | 36 static int __iopagetest(struct intel_memory_region *mem, in __iopagetest() argument 51 dev_err(mem->i915->drm.dev, in __iopagetest() 53 &mem->region, &mem->io_start, &offset, caller, in __iopagetest() 61 static int iopagetest(struct intel_memory_region *mem, in iopagetest() argument 70 va = ioremap_wc(mem->io_start + offset, PAGE_SIZE); in iopagetest() 72 dev_err(mem->i915->drm.dev, in iopagetest() 74 &mem->io_start, &offset, caller); in iopagetest() 79 err = __iopagetest(mem, va, PAGE_SIZE, val[i], offset, caller); in iopagetest() 83 err = __iopagetest(mem, v in iopagetest() 98 iomemtest(struct intel_memory_region *mem, bool test_all, const void *caller) iomemtest() argument 182 intel_memory_region_reserve(struct intel_memory_region *mem, resource_size_t offset, resource_size_t size) intel_memory_region_reserve() argument 204 intel_memory_region_memtest(struct intel_memory_region *mem, void *caller) intel_memory_region_memtest() argument 230 struct intel_memory_region *mem; intel_memory_region_create() local 270 intel_memory_region_set_name(struct intel_memory_region *mem, const char *fmt, ...) intel_memory_region_set_name() argument 294 intel_memory_region_destroy(struct intel_memory_region *mem) intel_memory_region_destroy() argument 314 struct intel_memory_region *mem = ERR_PTR(-ENODEV); intel_memory_regions_hw_probe() local [all...] |
H A D | intel_region_ttm.c | 54 int intel_region_to_ttm_type(const struct intel_memory_region *mem) in intel_region_to_ttm_type() argument 58 GEM_BUG_ON(mem->type != INTEL_MEMORY_LOCAL && in intel_region_to_ttm_type() 59 mem->type != INTEL_MEMORY_MOCK && in intel_region_to_ttm_type() 60 mem->type != INTEL_MEMORY_SYSTEM); in intel_region_to_ttm_type() 62 if (mem->type == INTEL_MEMORY_SYSTEM) in intel_region_to_ttm_type() 65 type = mem->instance + TTM_PL_PRIV; in intel_region_to_ttm_type() 73 * @mem: The region to initialize. 82 int intel_region_ttm_init(struct intel_memory_region *mem) in intel_region_ttm_init() argument 84 struct ttm_device *bdev = &mem->i915->bdev; in intel_region_ttm_init() 85 int mem_type = intel_region_to_ttm_type(mem); in intel_region_ttm_init() 108 intel_region_ttm_fini(struct intel_memory_region *mem) intel_region_ttm_fini() argument 163 intel_region_ttm_resource_to_rsgt(struct intel_memory_region *mem, struct ttm_resource *res, u32 page_alignment) intel_region_ttm_resource_to_rsgt() argument 198 intel_region_ttm_resource_alloc(struct intel_memory_region *mem, resource_size_t offset, resource_size_t size, unsigned int flags) intel_region_ttm_resource_alloc() argument 255 intel_region_ttm_resource_free(struct intel_memory_region *mem, struct ttm_resource *res) intel_region_ttm_resource_free() argument [all...] |
/kernel/linux/linux-5.10/drivers/pci/endpoint/ |
H A D | pci-epc-mem.c | 17 * @mem: address space of the endpoint controller 20 * Reimplement get_order() for mem->page_size since the generic get_order 23 static int pci_epc_mem_get_order(struct pci_epc_mem *mem, size_t size) in pci_epc_mem_get_order() argument 26 unsigned int page_shift = ilog2(mem->window.page_size); in pci_epc_mem_get_order() 51 struct pci_epc_mem *mem = NULL; in pci_epc_multi_mem_init() local 77 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in pci_epc_multi_mem_init() 78 if (!mem) { in pci_epc_multi_mem_init() 87 kfree(mem); in pci_epc_multi_mem_init() 92 mem in pci_epc_multi_mem_init() 140 struct pci_epc_mem *mem; pci_epc_mem_exit() local 172 struct pci_epc_mem *mem; pci_epc_mem_alloc_addr() local 211 struct pci_epc_mem *mem; pci_epc_get_matching_window() local 237 struct pci_epc_mem *mem; pci_epc_mem_free_addr() local [all...] |
/kernel/linux/linux-6.6/drivers/pci/endpoint/ |
H A D | pci-epc-mem.c | 17 * @mem: address space of the endpoint controller 20 * Reimplement get_order() for mem->page_size since the generic get_order 23 static int pci_epc_mem_get_order(struct pci_epc_mem *mem, size_t size) in pci_epc_mem_get_order() argument 26 unsigned int page_shift = ilog2(mem->window.page_size); in pci_epc_mem_get_order() 51 struct pci_epc_mem *mem = NULL; in pci_epc_multi_mem_init() local 77 mem = kzalloc(sizeof(*mem), GFP_KERNEL); in pci_epc_multi_mem_init() 78 if (!mem) { in pci_epc_multi_mem_init() 87 kfree(mem); in pci_epc_multi_mem_init() 92 mem in pci_epc_multi_mem_init() 150 struct pci_epc_mem *mem; pci_epc_mem_exit() local 182 struct pci_epc_mem *mem; pci_epc_mem_alloc_addr() local 221 struct pci_epc_mem *mem; pci_epc_get_matching_window() local 247 struct pci_epc_mem *mem; pci_epc_mem_free_addr() local [all...] |
/kernel/linux/linux-5.10/drivers/media/v4l2-core/ |
H A D | videobuf-dma-contig.c | 38 struct videobuf_dma_contig_memory *mem, in __videobuf_dc_alloc() 41 mem->size = size; in __videobuf_dc_alloc() 42 mem->vaddr = dma_alloc_coherent(dev, mem->size, &mem->dma_handle, in __videobuf_dc_alloc() 44 if (!mem->vaddr) { in __videobuf_dc_alloc() 45 dev_err(dev, "memory alloc size %ld failed\n", mem->size); in __videobuf_dc_alloc() 49 dev_dbg(dev, "dma mapped data is at %p (%ld)\n", mem->vaddr, mem->size); in __videobuf_dc_alloc() 55 struct videobuf_dma_contig_memory *mem) in __videobuf_dc_free() 37 __videobuf_dc_alloc(struct device *dev, struct videobuf_dma_contig_memory *mem, unsigned long size) __videobuf_dc_alloc() argument 54 __videobuf_dc_free(struct device *dev, struct videobuf_dma_contig_memory *mem) __videobuf_dc_free() argument 83 struct videobuf_dma_contig_memory *mem; videobuf_vm_close() local 140 videobuf_dma_contig_user_put(struct videobuf_dma_contig_memory *mem) videobuf_dma_contig_user_put() argument 156 videobuf_dma_contig_user_get(struct videobuf_dma_contig_memory *mem, struct videobuf_buffer *vb) videobuf_dma_contig_user_get() argument 210 struct videobuf_dma_contig_memory *mem; __videobuf_alloc() local 225 struct videobuf_dma_contig_memory *mem = buf->priv; __videobuf_to_vaddr() local 237 struct videobuf_dma_contig_memory *mem = vb->priv; __videobuf_iolock() local 276 struct videobuf_dma_contig_memory *mem; __videobuf_mmap_mapper() local 358 struct videobuf_dma_contig_memory *mem = buf->priv; videobuf_to_dma_contig() local 370 struct videobuf_dma_contig_memory *mem = buf->priv; videobuf_dma_contig_free() local [all...] |
H A D | videobuf-vmalloc.c | 72 struct videobuf_vmalloc_memory *mem; in videobuf_vm_close() local 88 mem = q->bufs[i]->priv; in videobuf_vm_close() 89 if (mem) { in videobuf_vm_close() 96 MAGIC_CHECK(mem->magic, MAGIC_VMAL_MEM); in videobuf_vm_close() 102 __func__, i, mem->vaddr); in videobuf_vm_close() 104 vfree(mem->vaddr); in videobuf_vm_close() 105 mem->vaddr = NULL; in videobuf_vm_close() 137 struct videobuf_vmalloc_memory *mem; in __videobuf_alloc_vb() local 140 vb = kzalloc(size + sizeof(*mem), GFP_KERNEL); in __videobuf_alloc_vb() 144 mem in __videobuf_alloc_vb() 158 struct videobuf_vmalloc_memory *mem = vb->priv; __videobuf_iolock() local 213 struct videobuf_vmalloc_memory *mem; __videobuf_mmap_mapper() local 294 struct videobuf_vmalloc_memory *mem = buf->priv; videobuf_to_vmalloc() local 304 struct videobuf_vmalloc_memory *mem = buf->priv; videobuf_vmalloc_free() local [all...] |
/kernel/linux/linux-6.6/drivers/media/v4l2-core/ |
H A D | videobuf-dma-contig.c | 38 struct videobuf_dma_contig_memory *mem, in __videobuf_dc_alloc() 41 mem->size = size; in __videobuf_dc_alloc() 42 mem->vaddr = dma_alloc_coherent(dev, mem->size, &mem->dma_handle, in __videobuf_dc_alloc() 44 if (!mem->vaddr) { in __videobuf_dc_alloc() 45 dev_err(dev, "memory alloc size %ld failed\n", mem->size); in __videobuf_dc_alloc() 49 dev_dbg(dev, "dma mapped data is at %p (%ld)\n", mem->vaddr, mem->size); in __videobuf_dc_alloc() 55 struct videobuf_dma_contig_memory *mem) in __videobuf_dc_free() 37 __videobuf_dc_alloc(struct device *dev, struct videobuf_dma_contig_memory *mem, unsigned long size) __videobuf_dc_alloc() argument 54 __videobuf_dc_free(struct device *dev, struct videobuf_dma_contig_memory *mem) __videobuf_dc_free() argument 83 struct videobuf_dma_contig_memory *mem; videobuf_vm_close() local 140 videobuf_dma_contig_user_put(struct videobuf_dma_contig_memory *mem) videobuf_dma_contig_user_put() argument 156 videobuf_dma_contig_user_get(struct videobuf_dma_contig_memory *mem, struct videobuf_buffer *vb) videobuf_dma_contig_user_get() argument 210 struct videobuf_dma_contig_memory *mem; __videobuf_alloc() local 225 struct videobuf_dma_contig_memory *mem = buf->priv; __videobuf_to_vaddr() local 237 struct videobuf_dma_contig_memory *mem = vb->priv; __videobuf_iolock() local 276 struct videobuf_dma_contig_memory *mem; __videobuf_mmap_mapper() local 358 struct videobuf_dma_contig_memory *mem = buf->priv; videobuf_to_dma_contig() local 370 struct videobuf_dma_contig_memory *mem = buf->priv; videobuf_dma_contig_free() local [all...] |
H A D | videobuf-vmalloc.c | 72 struct videobuf_vmalloc_memory *mem; in videobuf_vm_close() local 88 mem = q->bufs[i]->priv; in videobuf_vm_close() 89 if (mem) { in videobuf_vm_close() 96 MAGIC_CHECK(mem->magic, MAGIC_VMAL_MEM); in videobuf_vm_close() 102 __func__, i, mem->vaddr); in videobuf_vm_close() 104 vfree(mem->vaddr); in videobuf_vm_close() 105 mem->vaddr = NULL; in videobuf_vm_close() 137 struct videobuf_vmalloc_memory *mem; in __videobuf_alloc_vb() local 140 vb = kzalloc(size + sizeof(*mem), GFP_KERNEL); in __videobuf_alloc_vb() 144 mem in __videobuf_alloc_vb() 158 struct videobuf_vmalloc_memory *mem = vb->priv; __videobuf_iolock() local 213 struct videobuf_vmalloc_memory *mem; __videobuf_mmap_mapper() local 294 struct videobuf_vmalloc_memory *mem = buf->priv; videobuf_to_vmalloc() local 304 struct videobuf_vmalloc_memory *mem = buf->priv; videobuf_vmalloc_free() local [all...] |
/kernel/linux/linux-6.6/kernel/dma/ |
H A D | coherent.c | 31 struct dma_coherent_mem * mem) in dma_get_device_base() 33 if (mem->use_dev_dma_pfn_offset) in dma_get_device_base() 34 return phys_to_dma(dev, PFN_PHYS(mem->pfn_base)); in dma_get_device_base() 35 return mem->device_base; in dma_get_device_base() 77 static void _dma_release_coherent_memory(struct dma_coherent_mem *mem) in _dma_release_coherent_memory() argument 79 if (!mem) in _dma_release_coherent_memory() 82 memunmap(mem->virt_base); in _dma_release_coherent_memory() 83 bitmap_free(mem->bitmap); in _dma_release_coherent_memory() 84 kfree(mem); in _dma_release_coherent_memory() 88 struct dma_coherent_mem *mem) in dma_assign_coherent_memory() 30 dma_get_device_base(struct device *dev, struct dma_coherent_mem * mem) dma_get_device_base() argument 87 dma_assign_coherent_memory(struct device *dev, struct dma_coherent_mem *mem) dma_assign_coherent_memory() argument 120 struct dma_coherent_mem *mem; dma_declare_coherent_memory() local 141 __dma_alloc_from_coherent(struct device *dev, struct dma_coherent_mem *mem, ssize_t size, dma_addr_t *dma_handle) __dma_alloc_from_coherent() argument 190 struct dma_coherent_mem *mem = dev_get_coherent_memory(dev); dma_alloc_from_dev_coherent() local 199 __dma_release_from_coherent(struct dma_coherent_mem *mem, int order, void *vaddr) __dma_release_from_coherent() argument 229 struct dma_coherent_mem *mem = dev_get_coherent_memory(dev); dma_release_from_dev_coherent() local 234 __dma_mmap_from_coherent(struct dma_coherent_mem *mem, struct vm_area_struct *vma, void *vaddr, size_t size, int *ret) __dma_mmap_from_coherent() argument 274 struct dma_coherent_mem *mem = dev_get_coherent_memory(dev); dma_mmap_from_dev_coherent() local 313 struct dma_coherent_mem *mem; dma_init_global_coherent() local 339 struct dma_coherent_mem *mem; rmem_dma_device_init() local [all...] |
H A D | swiotlb.c | 228 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; in swiotlb_print_info() local 230 if (!mem->nslabs) { in swiotlb_print_info() 231 pr_warn("No low mem\n"); in swiotlb_print_info() 235 pr_info("mapped [mem %pa-%pa] (%luMB)\n", &mem->start, &mem->end, in swiotlb_print_info() 236 (mem->nslabs << IO_TLB_SHIFT) >> 20); in swiotlb_print_info() 257 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; in swiotlb_update_mem_attributes() local 260 if (!mem->nslabs || mem in swiotlb_update_mem_attributes() 266 swiotlb_init_io_tlb_pool(struct io_tlb_pool *mem, phys_addr_t start, unsigned long nslabs, bool late_alloc, unsigned int nareas) swiotlb_init_io_tlb_pool() argument 302 add_mem_pool(struct io_tlb_mem *mem, struct io_tlb_pool *pool) add_mem_pool() argument 353 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; swiotlb_init_remap() local 429 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; swiotlb_init_late() local 523 struct io_tlb_pool *mem = &io_tlb_default_mem.defpool; swiotlb_exit() local 732 struct io_tlb_mem *mem = swiotlb_dyn_alloc() local 773 struct io_tlb_mem *mem = dev->dma_io_tlb_mem; swiotlb_find_pool() local 838 struct io_tlb_pool *mem = swiotlb_find_pool(dev, tlb_addr); swiotlb_bounce() local 918 wrap_area_index(struct io_tlb_pool *mem, unsigned int index) wrap_area_index() argument 932 inc_used_and_hiwater(struct io_tlb_mem *mem, unsigned int nslots) inc_used_and_hiwater() argument 945 dec_used(struct io_tlb_mem *mem, unsigned int nslots) dec_used() argument 951 inc_used_and_hiwater(struct io_tlb_mem *mem, unsigned int nslots) inc_used_and_hiwater() argument 954 dec_used(struct io_tlb_mem *mem, unsigned int nslots) dec_used() argument 1122 struct io_tlb_mem *mem = dev->dma_io_tlb_mem; swiotlb_find_slots() local 1213 mem_used(struct io_tlb_mem *mem) mem_used() argument 1247 mem_used(struct io_tlb_mem *mem) mem_used() argument 1271 struct io_tlb_mem *mem = dev->dma_io_tlb_mem; swiotlb_tbl_map_single() local 1324 struct io_tlb_pool *mem = swiotlb_find_pool(dev, tlb_addr); swiotlb_release_slots() local 1502 struct io_tlb_mem *mem = dev->dma_io_tlb_mem; is_swiotlb_active() local 1538 struct io_tlb_mem *mem = data; io_tlb_used_get() local 1546 struct io_tlb_mem *mem = data; io_tlb_hiwater_get() local 1554 struct io_tlb_mem *mem = data; io_tlb_hiwater_set() local 1568 swiotlb_create_debugfs_files(struct io_tlb_mem *mem, const char *dirname) swiotlb_create_debugfs_files() argument 1595 swiotlb_create_debugfs_files(struct io_tlb_mem *mem, const char *dirname) swiotlb_create_debugfs_files() argument 1606 struct io_tlb_mem *mem = dev->dma_io_tlb_mem; swiotlb_alloc() local 1638 struct io_tlb_mem *mem = rmem->priv; rmem_swiotlb_device_init() local [all...] |
/kernel/linux/linux-5.10/kernel/dma/ |
H A D | coherent.c | 33 struct dma_coherent_mem * mem) in dma_get_device_base() 35 if (mem->use_dev_dma_pfn_offset) in dma_get_device_base() 36 return phys_to_dma(dev, PFN_PHYS(mem->pfn_base)); in dma_get_device_base() 37 return mem->device_base; in dma_get_device_base() 42 struct dma_coherent_mem **mem) in dma_init_coherent_memory() 77 *mem = dma_mem; in dma_init_coherent_memory() 87 static void _dma_release_coherent_memory(struct dma_coherent_mem *mem) in _dma_release_coherent_memory() argument 89 if (!mem) in _dma_release_coherent_memory() 92 memunmap(mem->virt_base); in _dma_release_coherent_memory() 93 kfree(mem in _dma_release_coherent_memory() 32 dma_get_device_base(struct device *dev, struct dma_coherent_mem * mem) dma_get_device_base() argument 40 dma_init_coherent_memory(phys_addr_t phys_addr, dma_addr_t device_addr, size_t size, struct dma_coherent_mem **mem) dma_init_coherent_memory() argument 97 dma_assign_coherent_memory(struct device *dev, struct dma_coherent_mem *mem) dma_assign_coherent_memory() argument 130 struct dma_coherent_mem *mem; dma_declare_coherent_memory() local 151 __dma_alloc_from_coherent(struct device *dev, struct dma_coherent_mem *mem, ssize_t size, dma_addr_t *dma_handle) __dma_alloc_from_coherent() argument 200 struct dma_coherent_mem *mem = dev_get_coherent_memory(dev); dma_alloc_from_dev_coherent() local 219 __dma_release_from_coherent(struct dma_coherent_mem *mem, int order, void *vaddr) __dma_release_from_coherent() argument 249 struct dma_coherent_mem *mem = dev_get_coherent_memory(dev); dma_release_from_dev_coherent() local 263 __dma_mmap_from_coherent(struct dma_coherent_mem *mem, struct vm_area_struct *vma, void *vaddr, size_t size, int *ret) __dma_mmap_from_coherent() argument 303 struct dma_coherent_mem *mem = dev_get_coherent_memory(dev); dma_mmap_from_dev_coherent() local 330 struct dma_coherent_mem *mem = rmem->priv; rmem_dma_device_init() local [all...] |
/kernel/linux/linux-6.6/drivers/base/ |
H A D | memory.c | 107 struct memory_block *mem = to_memory_block(dev); in memory_block_release() local 109 WARN_ON(mem->altmap); in memory_block_release() 110 kfree(mem); in memory_block_release() 123 struct memory_block *mem = to_memory_block(dev); in phys_index_show() local 125 return sysfs_emit(buf, "%08lx\n", memory_block_id(mem->start_section_nr)); in phys_index_show() 144 struct memory_block *mem = to_memory_block(dev); in state_show() local 151 switch (mem->state) { in state_show() 163 return sysfs_emit(buf, "ERROR-UNKNOWN-%ld\n", mem->state); in state_show() 175 static unsigned long memblk_nr_poison(struct memory_block *mem); 177 static inline unsigned long memblk_nr_poison(struct memory_block *mem) in memblk_nr_poison() argument 186 memory_block_online(struct memory_block *mem) memory_block_online() argument 242 memory_block_offline(struct memory_block *mem) memory_block_offline() argument 288 memory_block_action(struct memory_block *mem, unsigned long action) memory_block_action() argument 308 memory_block_change_state(struct memory_block *mem, unsigned long to_state, unsigned long from_state_req) memory_block_change_state() argument 328 struct memory_block *mem = to_memory_block(dev); memory_subsys_online() local 349 struct memory_block *mem = to_memory_block(dev); memory_subsys_offline() local 361 struct memory_block *mem = to_memory_block(dev); state_store() local 406 struct memory_block *mem = to_memory_block(dev); phys_device_show() local 431 struct memory_block *mem = to_memory_block(dev); valid_zones_show() local 620 struct memory_block *mem; find_memory_block_by_id() local 681 early_node_zone_for_memory_block(struct memory_block *mem, int nid) early_node_zone_for_memory_block() argument 728 memory_block_add_nid(struct memory_block *mem, int nid, enum meminit_context context) memory_block_add_nid() argument 761 struct memory_block *mem; add_memory_block() local 855 struct memory_block *mem; create_memory_block_devices() local 892 struct memory_block *mem; remove_memory_block_devices() local 991 struct memory_block *mem; walk_memory_blocks() local 1018 struct memory_block *mem = to_memory_block(dev); for_each_memory_block_cb() local 1212 struct memory_block *mem = find_memory_block_by_id(block_id); memblk_nr_poison_inc() local 1221 struct memory_block *mem = find_memory_block_by_id(block_id); memblk_nr_poison_sub() local 1227 memblk_nr_poison(struct memory_block *mem) memblk_nr_poison() argument [all...] |
/kernel/linux/linux-6.6/tools/testing/selftests/mm/ |
H A D | cow.c | 100 static int child_memcmp_fn(char *mem, size_t size, in child_memcmp_fn() argument 107 memcpy(old, mem, size); in child_memcmp_fn() 115 return memcmp(old, mem, size); in child_memcmp_fn() 118 static int child_vmsplice_memcmp_fn(char *mem, size_t size, in child_vmsplice_memcmp_fn() argument 122 .iov_base = mem, in child_vmsplice_memcmp_fn() 134 memcpy(old, mem, size); in child_vmsplice_memcmp_fn() 147 if (munmap(mem, size) < 0) in child_vmsplice_memcmp_fn() 165 typedef int (*child_fn)(char *mem, size_t size, struct comm_pipes *comm_pipes); 167 static void do_test_cow_in_parent(char *mem, size_t size, bool do_mprotect, in do_test_cow_in_parent() argument 185 exit(fn(mem, siz in do_test_cow_in_parent() 221 test_cow_in_parent(char *mem, size_t size) test_cow_in_parent() argument 226 test_cow_in_parent_mprotect(char *mem, size_t size) test_cow_in_parent_mprotect() argument 231 test_vmsplice_in_child(char *mem, size_t size) test_vmsplice_in_child() argument 236 test_vmsplice_in_child_mprotect(char *mem, size_t size) test_vmsplice_in_child_mprotect() argument 241 do_test_vmsplice_in_parent(char *mem, size_t size, bool before_fork) do_test_vmsplice_in_parent() argument 336 test_vmsplice_before_fork(char *mem, size_t size) test_vmsplice_before_fork() argument 341 test_vmsplice_after_fork(char *mem, size_t size) test_vmsplice_after_fork() argument 347 do_test_iouring(char *mem, size_t size, bool use_fork) do_test_iouring() argument 498 test_iouring_ro(char *mem, size_t size) test_iouring_ro() argument 503 test_iouring_fork(char *mem, size_t size) test_iouring_fork() argument 517 do_test_ro_pin(char *mem, size_t size, enum ro_pin_test test, bool fast) do_test_ro_pin() argument 647 test_ro_pin_on_shared(char *mem, size_t size) test_ro_pin_on_shared() argument 652 test_ro_fast_pin_on_shared(char *mem, size_t size) test_ro_fast_pin_on_shared() argument 657 test_ro_pin_on_ro_previously_shared(char *mem, size_t size) test_ro_pin_on_ro_previously_shared() argument 662 test_ro_fast_pin_on_ro_previously_shared(char *mem, size_t size) test_ro_fast_pin_on_ro_previously_shared() argument 667 test_ro_pin_on_ro_exclusive(char *mem, size_t size) test_ro_pin_on_ro_exclusive() argument 672 test_ro_fast_pin_on_ro_exclusive(char *mem, size_t size) test_ro_fast_pin_on_ro_exclusive() argument 681 char *mem; do_run_with_base_page() local 739 char *mem, *mmap_mem, *tmp, *mremap_mem = MAP_FAILED; do_run_with_thp() local 929 char *mem, *dummy; run_with_hugetlb() local 1135 do_test_anon_thp_collapse(char *mem, size_t size, enum anon_thp_collapse_test test) do_test_anon_thp_collapse() argument 1268 test_anon_thp_collapse_unshared(char *mem, size_t size) test_anon_thp_collapse_unshared() argument 1273 test_anon_thp_collapse_fully_shared(char *mem, size_t size) test_anon_thp_collapse_fully_shared() argument 1278 test_anon_thp_collapse_lower_shared(char *mem, size_t size) test_anon_thp_collapse_lower_shared() argument 1283 test_anon_thp_collapse_upper_shared(char *mem, size_t size) test_anon_thp_collapse_upper_shared() argument 1352 test_cow(char *mem, const char *smem, size_t size) test_cow() argument 1368 test_ro_pin(char *mem, const char *smem, size_t size) test_ro_pin() argument 1373 test_ro_fast_pin(char *mem, const char *smem, size_t size) test_ro_fast_pin() argument 1380 char *mem, *smem, tmp; run_with_zeropage() local 1410 char *mem, *smem, *mmap_mem, *mmap_smem, tmp; run_with_huge_zeropage() local 1469 char *mem, *smem, tmp; run_with_memfd() local 1513 char *mem, *smem, tmp; run_with_tmpfile() local 1566 char *mem, *smem, tmp; run_with_memfd_hugetlb() local [all...] |
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdgpu/ |
H A D | amdgpu_amdkfd_gpuvm.c | 77 struct kgd_mem *mem) in kfd_mem_is_attached() 81 list_for_each_entry(entry, &mem->attachments, list) in kfd_mem_is_attached() 114 uint64_t mem; in amdgpu_amdkfd_gpuvm_init_mem_limits() local 120 mem = si.freeram - si.freehigh; in amdgpu_amdkfd_gpuvm_init_mem_limits() 121 mem *= si.mem_unit; in amdgpu_amdkfd_gpuvm_init_mem_limits() 124 kfd_mem_limit.max_system_mem_limit = mem - (mem >> 4); in amdgpu_amdkfd_gpuvm_init_mem_limits() 250 "adev reference can't be null when alloc mem flags vram is set"); in amdgpu_amdkfd_unreserve_mem_limit() 301 * @mem: BO of peer device that is being DMA mapped. Provides parameters 307 struct kgd_mem *mem, struc in create_dmamap_sg_bo() 76 kfd_mem_is_attached(struct amdgpu_vm *avm, struct kgd_mem *mem) kfd_mem_is_attached() argument 306 create_dmamap_sg_bo(struct amdgpu_device *adev, struct kgd_mem *mem, struct amdgpu_bo **bo_out) create_dmamap_sg_bo() argument 464 get_pte_flags(struct amdgpu_device *adev, struct kgd_mem *mem) get_pte_flags() argument 510 kfd_mem_dmamap_userptr(struct kgd_mem *mem, struct kfd_mem_attachment *attachment) kfd_mem_dmamap_userptr() argument 603 kfd_mem_dmamap_sg_bo(struct kgd_mem *mem, struct kfd_mem_attachment *attachment) kfd_mem_dmamap_sg_bo() argument 658 kfd_mem_dmamap_attachment(struct kgd_mem *mem, struct kfd_mem_attachment *attachment) kfd_mem_dmamap_attachment() argument 677 kfd_mem_dmaunmap_userptr(struct kgd_mem *mem, struct kfd_mem_attachment *attachment) kfd_mem_dmaunmap_userptr() argument 726 kfd_mem_dmaunmap_sg_bo(struct kgd_mem *mem, struct kfd_mem_attachment *attachment) kfd_mem_dmaunmap_sg_bo() argument 754 kfd_mem_dmaunmap_attachment(struct kgd_mem *mem, struct kfd_mem_attachment *attachment) kfd_mem_dmaunmap_attachment() argument 774 kfd_mem_export_dmabuf(struct kgd_mem *mem) kfd_mem_export_dmabuf() argument 790 kfd_mem_attach_dmabuf(struct amdgpu_device *adev, struct kgd_mem *mem, struct amdgpu_bo **bo) kfd_mem_attach_dmabuf() argument 823 kfd_mem_attach(struct amdgpu_device *adev, struct kgd_mem *mem, struct amdgpu_vm *vm, bool is_aql) kfd_mem_attach() argument 964 add_kgd_mem_to_kfd_bo_list(struct kgd_mem *mem, struct amdkfd_process_info *process_info, bool userptr) add_kgd_mem_to_kfd_bo_list() argument 977 remove_kgd_mem_from_kfd_bo_list(struct kgd_mem *mem, struct amdkfd_process_info *process_info) remove_kgd_mem_from_kfd_bo_list() argument 997 init_user_pages(struct kgd_mem *mem, uint64_t user_addr, bool criu_resume) init_user_pages() argument 1088 reserve_bo_and_vm(struct kgd_mem *mem, struct amdgpu_vm *vm, struct bo_vm_reservation_context *ctx) reserve_bo_and_vm() argument 1129 reserve_bo_and_cond_vms(struct kgd_mem *mem, struct amdgpu_vm *vm, enum bo_vm_match map_type, struct bo_vm_reservation_context *ctx) reserve_bo_and_cond_vms() argument 1191 unmap_bo_from_gpuvm(struct kgd_mem *mem, struct kfd_mem_attachment *entry, struct amdgpu_sync *sync) unmap_bo_from_gpuvm() argument 1208 update_gpuvm_pte(struct kgd_mem *mem, struct kfd_mem_attachment *entry, struct amdgpu_sync *sync) update_gpuvm_pte() argument 1230 map_bo_to_gpuvm(struct kgd_mem *mem, struct kfd_mem_attachment *entry, struct amdgpu_sync *sync, bool no_update_pte) map_bo_to_gpuvm() argument 1631 amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu( struct amdgpu_device *adev, uint64_t va, uint64_t size, void *drm_priv, struct kgd_mem **mem, uint64_t *offset, uint32_t flags, bool criu_resume) amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() argument 1801 amdgpu_amdkfd_gpuvm_free_memory_of_gpu( struct amdgpu_device *adev, struct kgd_mem *mem, void *drm_priv, uint64_t *size) amdgpu_amdkfd_gpuvm_free_memory_of_gpu() argument 1915 amdgpu_amdkfd_gpuvm_map_memory_to_gpu( struct amdgpu_device *adev, struct kgd_mem *mem, void *drm_priv) amdgpu_amdkfd_gpuvm_map_memory_to_gpu() argument 2038 amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu( struct amdgpu_device *adev, struct kgd_mem *mem, void *drm_priv) amdgpu_amdkfd_gpuvm_unmap_memory_from_gpu() argument 2099 amdgpu_amdkfd_gpuvm_sync_memory( struct amdgpu_device *adev, struct kgd_mem *mem, bool intr) amdgpu_amdkfd_gpuvm_sync_memory() argument 2177 amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel(struct kgd_mem *mem, void **kptr, uint64_t *size) amdgpu_amdkfd_gpuvm_map_gtt_bo_to_kernel() argument 2237 amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel(struct kgd_mem *mem) amdgpu_amdkfd_gpuvm_unmap_gtt_bo_from_kernel() argument 2247 amdgpu_amdkfd_gpuvm_get_vm_fault_info(struct amdgpu_device *adev, struct kfd_vm_fault_info *mem) amdgpu_amdkfd_gpuvm_get_vm_fault_info() argument 2258 amdgpu_amdkfd_gpuvm_import_dmabuf(struct amdgpu_device *adev, struct dma_buf *dma_buf, uint64_t va, void *drm_priv, struct kgd_mem **mem, uint64_t *size, uint64_t *mmap_offset) amdgpu_amdkfd_gpuvm_import_dmabuf() argument 2328 amdgpu_amdkfd_gpuvm_export_dmabuf(struct kgd_mem *mem, struct dma_buf **dma_buf) amdgpu_amdkfd_gpuvm_export_dmabuf() argument 2355 amdgpu_amdkfd_evict_userptr(struct mmu_interval_notifier *mni, unsigned long cur_seq, struct kgd_mem *mem) amdgpu_amdkfd_evict_userptr() argument 2394 struct kgd_mem *mem, *tmp_mem; update_invalid_user_pages() local 2497 struct kgd_mem *mem, *tmp_mem; validate_invalid_user_pages() local 2590 struct kgd_mem *mem, *tmp_mem; confirm_valid_user_pages_locked() local 2731 struct kgd_mem *mem; amdgpu_amdkfd_gpuvm_restore_process_bos() local 2886 amdgpu_amdkfd_add_gws_to_process(void *info, void *gws, struct kgd_mem **mem) amdgpu_amdkfd_add_gws_to_process() argument 2950 amdgpu_amdkfd_remove_gws_from_process(void *info, void *mem) amdgpu_amdkfd_remove_gws_from_process() argument 2998 amdgpu_amdkfd_bo_mapped_to_dev(struct amdgpu_device *adev, struct kgd_mem *mem) amdgpu_amdkfd_bo_mapped_to_dev() argument [all...] |