Lines Matching refs:start
283 * Decide whether we really walk over the current vma on [@start, @end)
288 static int walk_page_test(unsigned long start, unsigned long end,
295 return ops->test_walk(start, end, walk);
308 err = ops->pte_hole(start, end, -1, walk);
314 static int __walk_page_range(unsigned long start, unsigned long end,
322 err = ops->pre_vma(start, end, walk);
329 err = walk_hugetlb_range(start, end, walk);
331 err = walk_pgd_range(start, end, walk);
342 * @start: start address of the virtual address range
348 * within the virtual address range [@start, @end). During walking, we can do
379 int walk_page_range(struct mm_struct *mm, unsigned long start,
392 if (start >= end)
400 vma = find_vma(walk.mm, start);
406 err = ops->pte_hole(start, next, -1, &walk);
407 } else if (start < vma->vm_start) { /* outside vma */
411 err = ops->pte_hole(start, next, -1, &walk);
417 err = walk_page_test(start, next, &walk);
429 err = __walk_page_range(start, next, &walk);
433 } while (start = next, start < end);
443 int walk_page_range_novma(struct mm_struct *mm, unsigned long start,
456 if (start >= end || !walk.mm)
461 return walk_pgd_range(start, end, &walk);