/kernel/linux/linux-6.6/mm/ |
H A D | cma.c | 45 return PFN_PHYS(cma->base_pfn); in cma_get_base() 73 return (cma->base_pfn & ((1UL << align_order) - 1)) in cma_bitmap_aligned_offset() 89 bitmap_no = (pfn - cma->base_pfn) >> cma->order_per_bit; in cma_clear_bitmap() 99 unsigned long base_pfn = cma->base_pfn, pfn; in cma_activate_area() local 111 WARN_ON_ONCE(!pfn_valid(base_pfn)); in cma_activate_area() 112 zone = page_zone(pfn_to_page(base_pfn)); in cma_activate_area() 113 for (pfn = base_pfn + 1; pfn < base_pfn + cma->count; pfn++) { in cma_activate_area() 119 for (pfn = base_pfn; pf in cma_activate_area() [all...] |
H A D | cma.h | 14 unsigned long base_pfn; member
|
H A D | cma_debug.c | 171 debugfs_create_file("base_pfn", 0444, tmp, in cma_debugfs_add_one() 172 &cma->base_pfn, &cma_debugfs_fops); in cma_debugfs_add_one()
|
/kernel/linux/linux-5.10/arch/powerpc/platforms/powernv/ |
H A D | memtrace.c | 116 u64 base_pfn; in memtrace_alloc_node() local 130 for (base_pfn = end_pfn; base_pfn > start_pfn; base_pfn -= nr_pages) { in memtrace_alloc_node() 131 if (memtrace_offline_pages(nid, base_pfn, nr_pages) == true) { in memtrace_alloc_node() 136 memtrace_clear_range(base_pfn, nr_pages); in memtrace_alloc_node() 143 end_pfn = base_pfn + nr_pages; in memtrace_alloc_node() 144 for (pfn = base_pfn; pfn < end_pfn; pfn += bytes>> PAGE_SHIFT) { in memtrace_alloc_node() 148 return base_pfn << PAGE_SHIFT; in memtrace_alloc_node()
|
/kernel/linux/linux-5.10/mm/ |
H A D | cma.c | 45 return PFN_PHYS(cma->base_pfn); in cma_get_base() 73 return (cma->base_pfn & ((1UL << align_order) - 1)) in cma_bitmap_aligned_offset() 88 bitmap_no = (pfn - cma->base_pfn) >> cma->order_per_bit; in cma_clear_bitmap() 98 unsigned long base_pfn = cma->base_pfn, pfn = base_pfn; in cma_activate_area() local 112 base_pfn = pfn; in cma_activate_area() 124 init_cma_reserved_pageblock(pfn_to_page(base_pfn)); in cma_activate_area() 206 cma->base_pfn = PFN_DOWN(base); in cma_init_reserved_mem() 456 pfn = cma->base_pfn in cma_alloc() [all...] |
H A D | cma.h | 8 unsigned long base_pfn; member
|
H A D | cma_debug.c | 174 debugfs_create_file("base_pfn", 0444, tmp, in cma_debugfs_add_one() 175 &cma->base_pfn, &cma_debugfs_fops); in cma_debugfs_add_one()
|
H A D | sparse-vmemmap.c | 92 return altmap->base_pfn + altmap->reserve + altmap->alloc in vmem_altmap_next_pfn()
|
/kernel/linux/linux-5.10/arch/powerpc/mm/ |
H A D | init_64.c | 194 if (start_pfn < altmap->base_pfn) in altmap_cross_boundary() 311 unsigned long base_pfn; in vmemmap_free() local 315 alt_start = altmap->base_pfn; in vmemmap_free() 316 alt_end = altmap->base_pfn + altmap->reserve + altmap->free; in vmemmap_free() 340 base_pfn = PHYS_PFN(addr); in vmemmap_free() 342 if (base_pfn >= alt_start && base_pfn < alt_end) { in vmemmap_free()
|
/kernel/linux/linux-6.6/arch/x86/hyperv/ |
H A D | hv_proc.c | 33 u64 base_pfn; in hv_call_deposit_pages() local 88 base_pfn = page_to_pfn(pages[i]); in hv_call_deposit_pages() 90 input_page->gpa_page_list[page_count] = base_pfn + j; in hv_call_deposit_pages() 106 base_pfn = page_to_pfn(pages[i]); in hv_call_deposit_pages() 108 __free_page(pfn_to_page(base_pfn + j)); in hv_call_deposit_pages()
|
/kernel/linux/linux-5.10/arch/powerpc/kvm/ |
H A D | book3s_hv_uvmem.c | 227 unsigned long base_pfn; member 259 p->base_pfn = slot->base_gfn; in kvmppc_uvmem_slot_init() 277 if (p->base_pfn == slot->base_gfn) { in kvmppc_uvmem_slot_free() 293 if (gfn >= p->base_pfn && gfn < p->base_pfn + p->nr_pfns) { in kvmppc_mark_gfn() 294 unsigned long index = gfn - p->base_pfn; in kvmppc_mark_gfn() 337 if (gfn >= p->base_pfn && gfn < p->base_pfn + p->nr_pfns) { in kvmppc_gfn_is_uvmem_pfn() 338 unsigned long index = gfn - p->base_pfn; in kvmppc_gfn_is_uvmem_pfn() 367 if (*gfn >= iter->base_pfn in kvmppc_next_nontransitioned_gfn() [all...] |
/kernel/linux/linux-6.6/arch/powerpc/kvm/ |
H A D | book3s_hv_uvmem.c | 229 unsigned long base_pfn; member 261 p->base_pfn = slot->base_gfn; in kvmppc_uvmem_slot_init() 279 if (p->base_pfn == slot->base_gfn) { in kvmppc_uvmem_slot_free() 295 if (gfn >= p->base_pfn && gfn < p->base_pfn + p->nr_pfns) { in kvmppc_mark_gfn() 296 unsigned long index = gfn - p->base_pfn; in kvmppc_mark_gfn() 339 if (gfn >= p->base_pfn && gfn < p->base_pfn + p->nr_pfns) { in kvmppc_gfn_is_uvmem_pfn() 340 unsigned long index = gfn - p->base_pfn; in kvmppc_gfn_is_uvmem_pfn() 369 if (*gfn >= iter->base_pfn in kvmppc_next_nontransitioned_gfn() [all...] |
/kernel/linux/linux-6.6/arch/powerpc/mm/ |
H A D | init_64.c | 196 if (start_pfn < altmap->base_pfn) in altmap_cross_boundary() 325 unsigned long base_pfn; in __vmemmap_free() local 329 alt_start = altmap->base_pfn; in __vmemmap_free() 330 alt_end = altmap->base_pfn + altmap->reserve + altmap->free; in __vmemmap_free() 354 base_pfn = PHYS_PFN(addr); in __vmemmap_free() 356 if (base_pfn >= alt_start && base_pfn < alt_end) { in __vmemmap_free()
|
/kernel/linux/linux-5.10/arch/x86/kernel/cpu/mtrr/ |
H A D | cleanup.c | 37 unsigned long base_pfn; member 77 base = range_state[i].base_pfn; in x86_get_mtrr_mem_range() 98 base = range_state[i].base_pfn; in x86_get_mtrr_mem_range() 207 range_state[reg].base_pfn = basek >> (PAGE_SHIFT - 10); in save_var_mtrr() 219 basek = range_state[reg].base_pfn << (PAGE_SHIFT - 10); in set_var_mtrr_all() 409 set_var_mtrr_range(struct var_mtrr_state *state, unsigned long base_pfn, in set_var_mtrr_range() argument 418 basek = base_pfn << (PAGE_SHIFT - 10); in set_var_mtrr_range() 541 start_base = range_state[i].base_pfn << (PAGE_SHIFT - 10); in print_out_mtrr_range_state() 704 range_state[i].base_pfn = base; in mtrr_cleanup() 901 range_state[i].base_pfn in mtrr_trim_uncached_memory() [all...] |
/kernel/linux/linux-6.6/arch/x86/kernel/cpu/mtrr/ |
H A D | cleanup.c | 37 unsigned long base_pfn; member 74 base = range_state[i].base_pfn; in x86_get_mtrr_mem_range() 94 base = range_state[i].base_pfn; in x86_get_mtrr_mem_range() 194 range_state[reg].base_pfn = basek >> (PAGE_SHIFT - 10); in save_var_mtrr() 206 basek = range_state[reg].base_pfn << (PAGE_SHIFT - 10); in set_var_mtrr_all() 396 set_var_mtrr_range(struct var_mtrr_state *state, unsigned long base_pfn, in set_var_mtrr_range() argument 405 basek = base_pfn << (PAGE_SHIFT - 10); in set_var_mtrr_range() 528 start_base = range_state[i].base_pfn << (PAGE_SHIFT - 10); in print_out_mtrr_range_state() 694 range_state[i].base_pfn = base; in mtrr_cleanup() 894 range_state[i].base_pfn in mtrr_trim_uncached_memory() [all...] |
/kernel/linux/linux-5.10/drivers/staging/media/ipu3/ |
H A D | ipu3-dmamap.c | 233 unsigned long order, base_pfn; in imgu_dmamap_init() local 240 base_pfn = max_t(unsigned long, 1, imgu->mmu->aperture_start >> order); in imgu_dmamap_init() 241 init_iova_domain(&imgu->iova_domain, 1UL << order, base_pfn); in imgu_dmamap_init()
|
/kernel/linux/linux-6.6/drivers/staging/media/ipu3/ |
H A D | ipu3-dmamap.c | 233 unsigned long order, base_pfn; in imgu_dmamap_init() local 240 base_pfn = max_t(unsigned long, 1, imgu->mmu->aperture_start >> order); in imgu_dmamap_init() 241 init_iova_domain(&imgu->iova_domain, 1UL << order, base_pfn); in imgu_dmamap_init()
|
/kernel/linux/linux-5.10/drivers/nvdimm/ |
H A D | pfn_devs.c | 659 unsigned long base_pfn = PHYS_PFN(base); in init_altmap_base() local 661 return SUBSECTION_ALIGN_DOWN(base_pfn); in init_altmap_base() 667 unsigned long base_pfn = PHYS_PFN(base); in init_altmap_reserve() local 669 reserve += base_pfn - SUBSECTION_ALIGN_DOWN(base_pfn); in init_altmap_reserve() 687 .base_pfn = init_altmap_base(base), in __nvdimm_setup_pfn()
|
/kernel/linux/linux-6.6/drivers/nvdimm/ |
H A D | pfn_devs.c | 668 unsigned long base_pfn = PHYS_PFN(base); in init_altmap_base() local 670 return SUBSECTION_ALIGN_DOWN(base_pfn); in init_altmap_base() 676 unsigned long base_pfn = PHYS_PFN(base); in init_altmap_reserve() local 678 reserve += base_pfn - SUBSECTION_ALIGN_DOWN(base_pfn); in init_altmap_reserve() 696 .base_pfn = init_altmap_base(base), in __nvdimm_setup_pfn()
|
/kernel/linux/linux-5.10/include/linux/ |
H A D | memremap.h | 13 * @base_pfn: base of the entire dev_pagemap mapping 20 const unsigned long base_pfn; member
|
/kernel/linux/linux-5.10/drivers/iommu/ |
H A D | dma-iommu.c | 307 unsigned long order, base_pfn; in iommu_dma_init_domain() local 318 base_pfn = max_t(unsigned long, 1, base >> order); in iommu_dma_init_domain() 328 base_pfn = max_t(unsigned long, base_pfn, in iommu_dma_init_domain() 335 base_pfn != iovad->start_pfn) { in iommu_dma_init_domain() 343 init_iova_domain(iovad, 1UL << order, base_pfn); in iommu_dma_init_domain()
|
/kernel/linux/linux-6.6/include/linux/ |
H A D | memremap.h | 15 * @base_pfn: base of the entire dev_pagemap mapping 22 unsigned long base_pfn; member
|
/kernel/linux/linux-6.6/arch/powerpc/mm/book3s64/ |
H A D | radix_pgtable.c | 698 unsigned long base_pfn = page_to_pfn(page); in free_vmemmap_pages() local 705 alt_start = altmap->base_pfn; in free_vmemmap_pages() 706 alt_end = altmap->base_pfn + altmap->reserve + altmap->free; in free_vmemmap_pages() 708 if (base_pfn >= alt_start && base_pfn < alt_end) { in free_vmemmap_pages()
|
/kernel/linux/linux-5.10/arch/x86/mm/ |
H A D | numa_emulation.c | 204 unsigned long base_pfn = PHYS_PFN(base); in uniform_size() local 207 return PFN_PHYS((max_pfn - base_pfn - hole_pfns) / nr_nodes); in uniform_size()
|
/kernel/linux/linux-6.6/arch/x86/mm/ |
H A D | numa_emulation.c | 204 unsigned long base_pfn = PHYS_PFN(base); in uniform_size() local 207 return PFN_PHYS((max_pfn - base_pfn - hole_pfns) / nr_nodes); in uniform_size()
|