Lines Matching refs:end_pfn
441 defer_init(int nid, unsigned long pfn, unsigned long end_pfn)
449 if (prev_end_pfn != end_pfn) {
450 prev_end_pfn = end_pfn;
455 if (end_pfn < pgdat_end_pfn(NODE_DATA(nid)))
480 static inline bool defer_init(int nid, unsigned long pfn, unsigned long end_pfn)
1500 unsigned long end_pfn = PFN_UP(end);
1502 for (; start_pfn < end_pfn; start_pfn++) {
1580 unsigned long start_pfn, end_pfn;
1586 nid = memblock_search_pfn_nid(pfn, &start_pfn, &end_pfn);
1589 state->last_end = end_pfn;
1622 * [start_pfn, end_pfn) is valid and within the same zone, before scanning it
1638 unsigned long end_pfn, struct zone *zone)
1643 /* end_pfn is one past the range we are checking */
1644 end_pfn--;
1646 if (!pfn_valid(start_pfn) || !pfn_valid(end_pfn))
1656 end_page = pfn_to_page(end_pfn);
1753 unsigned long end_pfn)
1758 for (; pfn < end_pfn; pfn++) {
1780 unsigned long end_pfn)
1788 for (; pfn < end_pfn; pfn++) {
1845 unsigned long *end_pfn)
1848 unsigned long spfn = *start_pfn, epfn = *end_pfn;
1853 for_each_free_mem_pfn_range_in_zone_from(j, zone, start_pfn, end_pfn) {
1859 t = min(mo_pfn, *end_pfn);
1862 if (mo_pfn < *end_pfn) {
1888 deferred_init_memmap_chunk(unsigned long start_pfn, unsigned long end_pfn,
1901 while (spfn < end_pfn) {
2373 unsigned long start_pfn, unsigned long end_pfn,
2381 for (pfn = start_pfn; pfn <= end_pfn;) {
2417 unsigned long start_pfn, end_pfn, pfn;
2424 end_pfn = start_pfn + pageblock_nr_pages - 1;
2429 if (!zone_spans_pfn(zone, end_pfn))
2432 return move_freepages(zone, start_pfn, end_pfn, migratetype,
6216 unsigned long pfn, end_pfn = start_pfn + size;
6219 if (highest_memmap_pfn < end_pfn - 1)
6220 highest_memmap_pfn = end_pfn - 1;
6236 end_pfn = altmap->base_pfn + vmem_altmap_offset(altmap);
6240 for (pfn = start_pfn; pfn < end_pfn; ) {
6276 unsigned long pfn, end_pfn = start_pfn + nr_pages;
6293 nr_pages = end_pfn - start_pfn;
6296 for (pfn = start_pfn; pfn < end_pfn; pfn++) {
6403 unsigned long end_pfn,
6411 end_pfn = clamp(end_pfn, zone_start_pfn, zone_end_pfn);
6413 if (start_pfn >= end_pfn)
6416 memmap_init_zone(end_pfn - start_pfn, nid, zone_id, start_pfn,
6422 *hole_pfn = end_pfn;
6427 unsigned long start_pfn, end_pfn;
6431 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, &nid) {
6440 memmap_init_zone_range(zone, start_pfn, end_pfn,
6456 end_pfn = round_up(end_pfn, PAGES_PER_SECTION);
6457 if (hole_pfn < end_pfn)
6459 init_unavailable_range(hole_pfn, end_pfn, zone_id, nid);
6683 * @end_pfn: Passed by reference. On return, it will have the node end_pfn.
6691 unsigned long *start_pfn, unsigned long *end_pfn)
6697 *end_pfn = 0;
6701 *end_pfn = max(*end_pfn, this_end_pfn);
6811 unsigned long start_pfn, end_pfn;
6814 for_each_mem_pfn_range(i, nid, &start_pfn, &end_pfn, NULL) {
6816 end_pfn = clamp(end_pfn, range_start_pfn, range_end_pfn);
6817 nr_absent -= end_pfn - start_pfn;
6825 * @end_pfn: The end PFN to stop searching for holes
6830 unsigned long end_pfn)
6832 return __absent_pages_in_range(MAX_NUMNODES, start_pfn, end_pfn);
6864 unsigned long start_pfn, end_pfn;
6870 end_pfn = clamp(memblock_region_memory_end_pfn(r),
6875 nr_absent += end_pfn - start_pfn;
6879 nr_absent += end_pfn - start_pfn;
7238 unsigned long end_pfn = 0;
7243 get_pfn_range_for_nid(nid, &start_pfn, &end_pfn);
7251 end_pfn ? ((u64)end_pfn << PAGE_SHIFT) - 1 : 0);
7252 calculate_node_totalpages(pgdat, start_pfn, end_pfn);
7347 unsigned long start_pfn, end_pfn;
7350 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, &nid) {
7351 unsigned long pages = end_pfn - start_pfn;
7478 unsigned long start_pfn, end_pfn;
7496 for_each_mem_pfn_range(i, nid, &start_pfn, &end_pfn, NULL) {
7500 if (start_pfn >= end_pfn)
7506 kernel_pages = min(end_pfn, usable_startpfn)
7515 if (end_pfn <= usable_startpfn) {
7523 zone_movable_pfn[nid] = end_pfn;
7531 * start_pfn->end_pfn. Calculate size_pages as the
7534 size_pages = end_pfn - start_pfn;
7565 unsigned long start_pfn, end_pfn;
7570 get_pfn_range_for_nid(nid, &start_pfn, &end_pfn);
7571 if (zone_movable_pfn[nid] >= end_pfn)
7621 unsigned long start_pfn, end_pfn;
7643 end_pfn = max(max_zone_pfn[zone], start_pfn);
7645 arch_zone_highest_possible_pfn[zone] = end_pfn;
7647 start_pfn = end_pfn;
7685 for_each_mem_pfn_range(i, MAX_NUMNODES, &start_pfn, &end_pfn, &nid) {
7688 ((u64)end_pfn << PAGE_SHIFT) - 1);
7689 subsection_map_init(start_pfn, end_pfn - start_pfn);
8794 unsigned long end_pfn = start_pfn + nr_pages;
8796 return alloc_contig_range(start_pfn, end_pfn, MIGRATE_MOVABLE,
8803 unsigned long i, end_pfn = start_pfn + nr_pages;
8806 for (i = start_pfn; i < end_pfn; i++) {
8942 void __offline_isolated_pages(unsigned long start_pfn, unsigned long end_pfn)
8950 offline_mem_sections(pfn, end_pfn);
8953 while (pfn < end_pfn) {