Home
last modified time | relevance | path

Searched refs:pgmap (Results 1 - 25 of 70) sorted by relevance

123

/kernel/linux/linux-5.10/mm/
H A Dmemremap.c44 static void devmap_managed_enable_put(struct dev_pagemap *pgmap) in devmap_managed_enable_put() argument
46 if (pgmap->type == MEMORY_DEVICE_PRIVATE || in devmap_managed_enable_put()
47 pgmap->type == MEMORY_DEVICE_FS_DAX) in devmap_managed_enable_put()
51 static void devmap_managed_enable_get(struct dev_pagemap *pgmap) in devmap_managed_enable_get() argument
53 if (pgmap->type == MEMORY_DEVICE_PRIVATE || in devmap_managed_enable_get()
54 pgmap->type == MEMORY_DEVICE_FS_DAX) in devmap_managed_enable_get()
58 static void devmap_managed_enable_get(struct dev_pagemap *pgmap) in devmap_managed_enable_get() argument
61 static void devmap_managed_enable_put(struct dev_pagemap *pgmap) in devmap_managed_enable_put() argument
73 static unsigned long pfn_first(struct dev_pagemap *pgmap, int range_id) in pfn_first() argument
75 struct range *range = &pgmap in pfn_first()
83 pgmap_pfn_valid(struct dev_pagemap *pgmap, unsigned long pfn) pgmap_pfn_valid() argument
98 pfn_end(struct dev_pagemap *pgmap, int range_id) pfn_end() argument
115 dev_pagemap_kill(struct dev_pagemap *pgmap) dev_pagemap_kill() argument
123 dev_pagemap_cleanup(struct dev_pagemap *pgmap) dev_pagemap_cleanup() argument
139 pageunmap_range(struct dev_pagemap *pgmap, int range_id) pageunmap_range() argument
168 memunmap_pages(struct dev_pagemap *pgmap) memunmap_pages() argument
194 struct dev_pagemap *pgmap = dev_pagemap_percpu_release() local
200 pagemap_range(struct dev_pagemap *pgmap, struct mhp_params *params, int range_id, int nid) pagemap_range() argument
314 memremap_pages(struct dev_pagemap *pgmap, int nid) memremap_pages() argument
426 devm_memremap_pages(struct device *dev, struct dev_pagemap *pgmap) devm_memremap_pages() argument
443 devm_memunmap_pages(struct device *dev, struct dev_pagemap *pgmap) devm_memunmap_pages() argument
470 get_dev_pagemap(unsigned long pfn, struct dev_pagemap *pgmap) get_dev_pagemap() argument
[all...]
H A Dgup.c27 struct dev_pagemap *pgmap; member
396 struct dev_pagemap **pgmap) in follow_page_pte()
455 * case since they are only valid while holding the pgmap in follow_page_pte()
458 *pgmap = get_dev_pagemap(pte_pfn(pte), *pgmap); in follow_page_pte()
459 if (*pgmap) in follow_page_pte()
607 page = follow_devmap_pmd(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask()
613 return follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask()
633 return follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask()
663 follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask()
394 follow_page_pte(struct vm_area_struct *vma, unsigned long address, pmd_t *pmd, unsigned int flags, struct dev_pagemap **pgmap) follow_page_pte() argument
2166 struct dev_pagemap *pgmap = NULL; gup_pte_range() local
2264 struct dev_pagemap *pgmap = NULL; __gup_device_huge() local
[all...]
/kernel/linux/linux-6.6/mm/
H A Dmemremap.c45 static void devmap_managed_enable_put(struct dev_pagemap *pgmap) in devmap_managed_enable_put() argument
47 if (pgmap->type == MEMORY_DEVICE_FS_DAX) in devmap_managed_enable_put()
51 static void devmap_managed_enable_get(struct dev_pagemap *pgmap) in devmap_managed_enable_get() argument
53 if (pgmap->type == MEMORY_DEVICE_FS_DAX) in devmap_managed_enable_get()
57 static void devmap_managed_enable_get(struct dev_pagemap *pgmap) in devmap_managed_enable_get() argument
60 static void devmap_managed_enable_put(struct dev_pagemap *pgmap) in devmap_managed_enable_put() argument
72 static unsigned long pfn_first(struct dev_pagemap *pgmap, int range_id) in pfn_first() argument
74 struct range *range = &pgmap->ranges[range_id]; in pfn_first()
79 return pfn + vmem_altmap_offset(pgmap_altmap(pgmap)); in pfn_first()
82 bool pgmap_pfn_valid(struct dev_pagemap *pgmap, unsigne argument
97 pfn_end(struct dev_pagemap *pgmap, int range_id) pfn_end() argument
104 pfn_len(struct dev_pagemap *pgmap, unsigned long range_id) pfn_len() argument
110 pageunmap_range(struct dev_pagemap *pgmap, int range_id) pageunmap_range() argument
136 memunmap_pages(struct dev_pagemap *pgmap) memunmap_pages() argument
164 struct dev_pagemap *pgmap = container_of(ref, struct dev_pagemap, ref); dev_pagemap_percpu_release() local
169 pagemap_range(struct dev_pagemap *pgmap, struct mhp_params *params, int range_id, int nid) pagemap_range() argument
291 memremap_pages(struct dev_pagemap *pgmap, int nid) memremap_pages() argument
402 devm_memremap_pages(struct device *dev, struct dev_pagemap *pgmap) devm_memremap_pages() argument
419 devm_memunmap_pages(struct device *dev, struct dev_pagemap *pgmap) devm_memunmap_pages() argument
446 get_dev_pagemap(unsigned long pfn, struct dev_pagemap *pgmap) get_dev_pagemap() argument
[all...]
H A Dsparse-vmemmap.c373 struct dev_pagemap *pgmap) in reuse_compound_section()
375 unsigned long nr_pages = pgmap_vmemmap_nr(pgmap); in reuse_compound_section()
377 PHYS_PFN(pgmap->ranges[pgmap->nr_range].start); in reuse_compound_section()
402 struct dev_pagemap *pgmap) in vmemmap_populate_compound_pages()
408 if (reuse_compound_section(start_pfn, pgmap)) { in vmemmap_populate_compound_pages()
421 size = min(end - start, pgmap_vmemmap_nr(pgmap) * sizeof(struct page)); in vmemmap_populate_compound_pages()
454 struct dev_pagemap *pgmap) in __populate_section_memmap()
464 if (vmemmap_can_optimize(altmap, pgmap)) in __populate_section_memmap()
465 r = vmemmap_populate_compound_pages(pfn, start, end, nid, pgmap); in __populate_section_memmap()
372 reuse_compound_section(unsigned long start_pfn, struct dev_pagemap *pgmap) reuse_compound_section() argument
399 vmemmap_populate_compound_pages(unsigned long start_pfn, unsigned long start, unsigned long end, int node, struct dev_pagemap *pgmap) vmemmap_populate_compound_pages() argument
452 __populate_section_memmap(unsigned long pfn, unsigned long nr_pages, int nid, struct vmem_altmap *altmap, struct dev_pagemap *pgmap) __populate_section_memmap() argument
[all...]
H A Dgup.c29 struct dev_pagemap *pgmap; member
581 struct dev_pagemap **pgmap) in follow_page_pte()
618 * case since they are only valid while holding the pgmap in follow_page_pte()
621 *pgmap = get_dev_pagemap(pte_pfn(pte), *pgmap); in follow_page_pte()
622 if (*pgmap) in follow_page_pte()
709 page = follow_devmap_pmd(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask()
715 return follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask()
727 return follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask()
734 follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask()
579 follow_page_pte(struct vm_area_struct *vma, unsigned long address, pmd_t *pmd, unsigned int flags, struct dev_pagemap **pgmap) follow_page_pte() argument
2559 struct dev_pagemap *pgmap = NULL; gup_pte_range() local
2675 struct dev_pagemap *pgmap = NULL; __gup_device_huge() local
[all...]
H A Dsparse.c430 struct dev_pagemap *pgmap) in __populate_section_memmap()
633 struct dev_pagemap *pgmap) in populate_section_memmap()
635 return __populate_section_memmap(pfn, nr_pages, nid, altmap, pgmap); in populate_section_memmap()
705 struct dev_pagemap *pgmap) in populate_section_memmap()
830 struct dev_pagemap *pgmap) in section_activate()
862 memmap = populate_section_memmap(pfn, nr_pages, nid, altmap, pgmap); in section_activate()
877 * @pgmap: alternate compound page geometry for devmap mappings
892 struct dev_pagemap *pgmap) in sparse_add_section()
903 memmap = section_activate(nid, start_pfn, nr_pages, altmap, pgmap); in sparse_add_section()
428 __populate_section_memmap(unsigned long pfn, unsigned long nr_pages, int nid, struct vmem_altmap *altmap, struct dev_pagemap *pgmap) __populate_section_memmap() argument
631 populate_section_memmap(unsigned long pfn, unsigned long nr_pages, int nid, struct vmem_altmap *altmap, struct dev_pagemap *pgmap) populate_section_memmap() argument
703 populate_section_memmap(unsigned long pfn, unsigned long nr_pages, int nid, struct vmem_altmap *altmap, struct dev_pagemap *pgmap) populate_section_memmap() argument
828 section_activate(int nid, unsigned long pfn, unsigned long nr_pages, struct vmem_altmap *altmap, struct dev_pagemap *pgmap) section_activate() argument
890 sparse_add_section(int nid, unsigned long start_pfn, unsigned long nr_pages, struct vmem_altmap *altmap, struct dev_pagemap *pgmap) sparse_add_section() argument
H A Dmm_init.c973 struct dev_pagemap *pgmap) in __init_zone_device_page()
988 * ZONE_DEVICE pages union ->lru with a ->pgmap back pointer in __init_zone_device_page()
992 page->pgmap = pgmap; in __init_zone_device_page()
1014 if (pgmap->type == MEMORY_DEVICE_PRIVATE || in __init_zone_device_page()
1015 pgmap->type == MEMORY_DEVICE_COHERENT) in __init_zone_device_page()
1028 struct dev_pagemap *pgmap) in compound_nr_pages()
1030 if (!vmemmap_can_optimize(altmap, pgmap)) in compound_nr_pages()
1031 return pgmap_vmemmap_nr(pgmap); in compound_nr_pages()
1039 struct dev_pagemap *pgmap, in memmap_init_compound()
971 __init_zone_device_page(struct page *page, unsigned long pfn, unsigned long zone_idx, int nid, struct dev_pagemap *pgmap) __init_zone_device_page() argument
1027 compound_nr_pages(struct vmem_altmap *altmap, struct dev_pagemap *pgmap) compound_nr_pages() argument
1036 memmap_init_compound(struct page *head, unsigned long head_pfn, unsigned long zone_idx, int nid, struct dev_pagemap *pgmap, unsigned long nr_pages) memmap_init_compound() argument
1063 memmap_init_zone_device(struct zone *zone, unsigned long start_pfn, unsigned long nr_pages, struct dev_pagemap *pgmap) memmap_init_zone_device() argument
[all...]
/kernel/linux/linux-6.6/include/linux/
H A Dmemremap.h100 int (*memory_failure)(struct dev_pagemap *pgmap, unsigned long pfn,
142 static inline bool pgmap_has_memory_failure(struct dev_pagemap *pgmap) in pgmap_has_memory_failure() argument
144 return pgmap->ops && pgmap->ops->memory_failure; in pgmap_has_memory_failure()
147 static inline struct vmem_altmap *pgmap_altmap(struct dev_pagemap *pgmap) in pgmap_altmap() argument
149 if (pgmap->flags & PGMAP_ALTMAP_VALID) in pgmap_altmap()
150 return &pgmap->altmap; in pgmap_altmap()
154 static inline unsigned long pgmap_vmemmap_nr(struct dev_pagemap *pgmap) in pgmap_vmemmap_nr() argument
156 return 1 << pgmap->vmemmap_shift; in pgmap_vmemmap_nr()
163 page->pgmap in is_device_private_page()
203 devm_memremap_pages(struct device *dev, struct dev_pagemap *pgmap) devm_memremap_pages() argument
215 devm_memunmap_pages(struct device *dev, struct dev_pagemap *pgmap) devm_memunmap_pages() argument
220 get_dev_pagemap(unsigned long pfn, struct dev_pagemap *pgmap) get_dev_pagemap() argument
226 pgmap_pfn_valid(struct dev_pagemap *pgmap, unsigned long pfn) pgmap_pfn_valid() argument
248 put_dev_pagemap(struct dev_pagemap *pgmap) put_dev_pagemap() argument
[all...]
H A Dhuge_mm.h222 pmd_t *pmd, int flags, struct dev_pagemap **pgmap);
224 pud_t *pud, int flags, struct dev_pagemap **pgmap);
375 unsigned long addr, pmd_t *pmd, int flags, struct dev_pagemap **pgmap) in follow_devmap_pmd()
381 unsigned long addr, pud_t *pud, int flags, struct dev_pagemap **pgmap) in follow_devmap_pud()
374 follow_devmap_pmd(struct vm_area_struct *vma, unsigned long addr, pmd_t *pmd, int flags, struct dev_pagemap **pgmap) follow_devmap_pmd() argument
380 follow_devmap_pud(struct vm_area_struct *vma, unsigned long addr, pud_t *pud, int flags, struct dev_pagemap **pgmap) follow_devmap_pud() argument
/kernel/linux/linux-5.10/include/linux/
H A Dmemremap.h78 void (*kill)(struct dev_pagemap *pgmap);
83 void (*cleanup)(struct dev_pagemap *pgmap);
126 static inline struct vmem_altmap *pgmap_altmap(struct dev_pagemap *pgmap) in pgmap_altmap() argument
128 if (pgmap->flags & PGMAP_ALTMAP_VALID) in pgmap_altmap()
129 return &pgmap->altmap; in pgmap_altmap()
134 void *memremap_pages(struct dev_pagemap *pgmap, int nid);
135 void memunmap_pages(struct dev_pagemap *pgmap);
136 void *devm_memremap_pages(struct device *dev, struct dev_pagemap *pgmap);
137 void devm_memunmap_pages(struct device *dev, struct dev_pagemap *pgmap);
139 struct dev_pagemap *pgmap);
146 devm_memremap_pages(struct device *dev, struct dev_pagemap *pgmap) devm_memremap_pages() argument
158 devm_memunmap_pages(struct device *dev, struct dev_pagemap *pgmap) devm_memunmap_pages() argument
163 get_dev_pagemap(unsigned long pfn, struct dev_pagemap *pgmap) get_dev_pagemap() argument
169 pgmap_pfn_valid(struct dev_pagemap *pgmap, unsigned long pfn) pgmap_pfn_valid() argument
191 put_dev_pagemap(struct dev_pagemap *pgmap) put_dev_pagemap() argument
[all...]
/kernel/linux/linux-5.10/tools/testing/nvdimm/test/
H A Diomap.c101 struct dev_pagemap *pgmap = _pgmap; in nfit_test_kill() local
103 WARN_ON(!pgmap || !pgmap->ref); in nfit_test_kill()
105 if (pgmap->ops && pgmap->ops->kill) in nfit_test_kill()
106 pgmap->ops->kill(pgmap); in nfit_test_kill()
108 percpu_ref_kill(pgmap->ref); in nfit_test_kill()
110 if (pgmap->ops && pgmap in nfit_test_kill()
120 struct dev_pagemap *pgmap = dev_pagemap_percpu_release() local
126 __wrap_devm_memremap_pages(struct device *dev, struct dev_pagemap *pgmap) __wrap_devm_memremap_pages() argument
[all...]
/kernel/linux/linux-5.10/drivers/xen/
H A Dunpopulated-alloc.c20 struct dev_pagemap *pgmap; in fill_list() local
41 pgmap = kzalloc(sizeof(*pgmap), GFP_KERNEL); in fill_list()
42 if (!pgmap) { in fill_list()
47 pgmap->type = MEMORY_DEVICE_GENERIC; in fill_list()
48 pgmap->range = (struct range) { in fill_list()
52 pgmap->nr_range = 1; in fill_list()
53 pgmap->owner = res; in fill_list()
78 vaddr = memremap_pages(pgmap, NUMA_NO_NODE); in fill_list()
97 kfree(pgmap); in fill_list()
[all...]
/kernel/linux/linux-6.6/drivers/xen/
H A Dunpopulated-alloc.c36 struct dev_pagemap *pgmap; in fill_list() local
84 pgmap = kzalloc(sizeof(*pgmap), GFP_KERNEL); in fill_list()
85 if (!pgmap) { in fill_list()
90 pgmap->type = MEMORY_DEVICE_GENERIC; in fill_list()
91 pgmap->range = (struct range) { in fill_list()
95 pgmap->nr_range = 1; in fill_list()
96 pgmap->owner = res; in fill_list()
121 vaddr = memremap_pages(pgmap, NUMA_NO_NODE); in fill_list()
139 kfree(pgmap); in fill_list()
[all...]
/kernel/linux/linux-6.6/drivers/pci/
H A Dp2pdma.c31 struct dev_pagemap pgmap; member
36 static struct pci_p2pdma_pagemap *to_p2p_pgmap(struct dev_pagemap *pgmap) in to_p2p_pgmap() argument
38 return container_of(pgmap, struct pci_p2pdma_pagemap, pgmap); in to_p2p_pgmap()
196 struct pci_p2pdma_pagemap *pgmap = to_p2p_pgmap(page->pgmap); in p2pdma_page_free() local
199 rcu_dereference_protected(pgmap->provider->p2pdma, 1); in p2pdma_page_free()
289 struct dev_pagemap *pgmap; in pci_p2pdma_add_resource() local
316 pgmap = &p2p_pgmap->pgmap; in pci_p2pdma_add_resource()
968 pci_p2pdma_map_type(struct dev_pagemap *pgmap, struct device *dev) pci_p2pdma_map_type() argument
[all...]
/kernel/linux/linux-5.10/drivers/nvdimm/
H A Dpmem.c335 static void pmem_pagemap_cleanup(struct dev_pagemap *pgmap) in pmem_pagemap_cleanup() argument
338 container_of(pgmap->ref, struct request_queue, q_usage_counter); in pmem_pagemap_cleanup()
343 static void pmem_release_queue(void *pgmap) in pmem_release_queue() argument
345 pmem_pagemap_cleanup(pgmap); in pmem_release_queue()
348 static void pmem_pagemap_kill(struct dev_pagemap *pgmap) in pmem_pagemap_kill() argument
351 container_of(pgmap->ref, struct request_queue, q_usage_counter); in pmem_pagemap_kill()
401 rc = nvdimm_setup_pfn(nd_pfn, &pmem->pgmap); in pmem_attach_disk()
429 pmem->pgmap.ref = &q->q_usage_counter; in pmem_attach_disk()
431 pmem->pgmap.type = MEMORY_DEVICE_FS_DAX; in pmem_attach_disk()
432 pmem->pgmap in pmem_attach_disk()
[all...]
H A Dpfn_devs.c673 static int __nvdimm_setup_pfn(struct nd_pfn *nd_pfn, struct dev_pagemap *pgmap) in __nvdimm_setup_pfn() argument
675 struct range *range = &pgmap->range; in __nvdimm_setup_pfn()
676 struct vmem_altmap *altmap = &pgmap->altmap; in __nvdimm_setup_pfn()
696 pgmap->nr_range = 1; in __nvdimm_setup_pfn()
711 pgmap->flags |= PGMAP_ALTMAP_VALID; in __nvdimm_setup_pfn()
835 int nvdimm_setup_pfn(struct nd_pfn *nd_pfn, struct dev_pagemap *pgmap) in nvdimm_setup_pfn() argument
847 return __nvdimm_setup_pfn(nd_pfn, pgmap); in nvdimm_setup_pfn()
/kernel/linux/linux-5.10/drivers/pci/
H A Dp2pdma.c37 struct dev_pagemap pgmap; member
42 static struct pci_p2pdma_pagemap *to_p2p_pgmap(struct dev_pagemap *pgmap) in to_p2p_pgmap() argument
44 return container_of(pgmap, struct pci_p2pdma_pagemap, pgmap); in to_p2p_pgmap()
161 struct dev_pagemap *pgmap; in pci_p2pdma_add_resource() local
187 pgmap = &p2p_pgmap->pgmap; in pci_p2pdma_add_resource()
188 pgmap->range.start = pci_resource_start(pdev, bar) + offset; in pci_p2pdma_add_resource()
189 pgmap->range.end = pgmap in pci_p2pdma_add_resource()
[all...]
/kernel/linux/linux-5.10/drivers/dax/pmem/
H A Dcore.c20 struct dev_pagemap pgmap = { }; in __dax_pmem_probe() local
34 rc = nvdimm_setup_pfn(nd_pfn, &pgmap); in __dax_pmem_probe()
54 range = pgmap.range; in __dax_pmem_probe()
65 .pgmap = &pgmap, in __dax_pmem_probe()
/kernel/linux/linux-6.6/drivers/dax/
H A Ddevice.c85 if (dev_dax->pgmap->vmemmap_shift) in dax_set_mapping()
397 struct dev_pagemap *pgmap; in dev_dax_probe() local
406 "static pgmap / multi-range device conflict\n"); in dev_dax_probe()
410 pgmap = dev_dax->pgmap; in dev_dax_probe()
412 if (dev_dax->pgmap) { in dev_dax_probe()
418 pgmap = devm_kzalloc(dev, in dev_dax_probe()
419 struct_size(pgmap, ranges, dev_dax->nr_range - 1), in dev_dax_probe()
421 if (!pgmap) in dev_dax_probe()
424 pgmap in dev_dax_probe()
[all...]
H A Dpmem.c19 struct dev_pagemap pgmap = { }; in __dax_pmem_probe() local
33 rc = nvdimm_setup_pfn(nd_pfn, &pgmap); in __dax_pmem_probe()
53 range = pgmap.range; in __dax_pmem_probe()
64 .pgmap = &pgmap, in __dax_pmem_probe()
/kernel/linux/linux-5.10/drivers/dax/
H A Ddevice.c395 struct dev_pagemap *pgmap; in dev_dax_probe() local
401 pgmap = dev_dax->pgmap; in dev_dax_probe()
402 if (dev_WARN_ONCE(dev, pgmap && dev_dax->nr_range > 1, in dev_dax_probe()
403 "static pgmap / multi-range device conflict\n")) in dev_dax_probe()
406 if (!pgmap) { in dev_dax_probe()
407 pgmap = devm_kzalloc(dev, sizeof(*pgmap) + sizeof(struct range) in dev_dax_probe()
409 if (!pgmap) in dev_dax_probe()
411 pgmap in dev_dax_probe()
[all...]
H A Dsuper.c133 struct dev_pagemap *pgmap, *end_pgmap; in __generic_fsdax_supported() local
135 pgmap = get_dev_pagemap(pfn_t_to_pfn(pfn), NULL); in __generic_fsdax_supported()
137 if (pgmap && pgmap == end_pgmap && pgmap->type == MEMORY_DEVICE_FS_DAX in __generic_fsdax_supported()
138 && pfn_t_to_page(pfn)->pgmap == pgmap in __generic_fsdax_supported()
139 && pfn_t_to_page(end_pfn)->pgmap == pgmap in __generic_fsdax_supported()
143 put_dev_pagemap(pgmap); in __generic_fsdax_supported()
[all...]
/kernel/linux/linux-6.6/tools/testing/nvdimm/test/
H A Diomap.c99 struct dev_pagemap *pgmap = _pgmap; in nfit_test_kill() local
101 WARN_ON(!pgmap); in nfit_test_kill()
103 percpu_ref_kill(&pgmap->ref); in nfit_test_kill()
105 wait_for_completion(&pgmap->done); in nfit_test_kill()
106 percpu_ref_exit(&pgmap->ref); in nfit_test_kill()
111 struct dev_pagemap *pgmap = container_of(ref, struct dev_pagemap, ref); in dev_pagemap_percpu_release() local
113 complete(&pgmap->done); in dev_pagemap_percpu_release()
116 void *__wrap_devm_memremap_pages(struct device *dev, struct dev_pagemap *pgmap) in __wrap_devm_memremap_pages() argument
119 resource_size_t offset = pgmap->range.start; in __wrap_devm_memremap_pages()
123 return devm_memremap_pages(dev, pgmap); in __wrap_devm_memremap_pages()
[all...]
/kernel/linux/linux-6.6/drivers/gpu/drm/amd/amdkfd/
H A Dkfd_migrate.c209 return (addr + adev->kfd.pgmap.range.start) >> PAGE_SHIFT; in svm_migrate_addr_to_pfn()
239 return (addr - adev->kfd.pgmap.range.start); in svm_migrate_addr()
998 struct dev_pagemap *pgmap; in kgd2kfd_init_zone_device() local
1010 pgmap = &kfddev->pgmap; in kgd2kfd_init_zone_device()
1011 memset(pgmap, 0, sizeof(*pgmap)); in kgd2kfd_init_zone_device()
1018 pgmap->range.start = adev->gmc.aper_base; in kgd2kfd_init_zone_device()
1019 pgmap->range.end = adev->gmc.aper_base + adev->gmc.aper_size - 1; in kgd2kfd_init_zone_device()
1020 pgmap in kgd2kfd_init_zone_device()
[all...]
/kernel/linux/linux-6.6/drivers/nvdimm/
H A Dpmem.c434 static int pmem_pagemap_memory_failure(struct dev_pagemap *pgmap, in pmem_pagemap_memory_failure() argument
438 container_of(pgmap, struct pmem_device, pgmap); in pmem_pagemap_memory_failure()
477 rc = nvdimm_setup_pfn(nd_pfn, &pmem->pgmap); in pmem_attach_disk()
506 pmem->pgmap.owner = pmem; in pmem_attach_disk()
509 pmem->pgmap.type = MEMORY_DEVICE_FS_DAX; in pmem_attach_disk()
510 pmem->pgmap.ops = &fsdax_pagemap_ops; in pmem_attach_disk()
511 addr = devm_memremap_pages(dev, &pmem->pgmap); in pmem_attach_disk()
515 range_len(&pmem->pgmap.range); in pmem_attach_disk()
517 bb_range = pmem->pgmap in pmem_attach_disk()
[all...]

Completed in 22 milliseconds

123