Lines Matching refs:end
42 unsigned long addr, unsigned long end, pgprot_t newprot,
190 } while (pte++, addr += PAGE_SIZE, addr != end);
223 pud_t *pud, unsigned long addr, unsigned long end,
238 next = pmd_addr_end(addr, end);
256 vma, vma->vm_mm, addr, end);
284 } while (pmd++, addr = next, addr != end);
295 p4d_t *p4d, unsigned long addr, unsigned long end,
304 next = pud_addr_end(addr, end);
309 } while (pud++, addr = next, addr != end);
315 pgd_t *pgd, unsigned long addr, unsigned long end,
324 next = p4d_addr_end(addr, end);
329 } while (p4d++, addr = next, addr != end);
335 unsigned long addr, unsigned long end, pgprot_t newprot,
344 BUG_ON(addr >= end);
346 flush_cache_range(vma, addr, end);
349 next = pgd_addr_end(addr, end);
354 } while (pgd++, addr = next, addr != end);
358 flush_tlb_range(vma, start, end);
365 unsigned long end, pgprot_t newprot,
373 pages = hugetlb_change_protection(vma, start, end, newprot);
375 pages = change_protection_range(vma, start, end, newprot,
410 unsigned long start, unsigned long end, unsigned long newflags)
414 long nrpages = (end - start) >> PAGE_SHIFT;
435 error = walk_page_range(current->mm, start, end,
465 *pprev = vma_merge(mm, *pprev, start, end, newflags,
482 if (end != vma->vm_end) {
483 error = split_vma(mm, vma, end, 0);
497 change_protection(vma, start, end, vma->vm_page_prot,
506 populate_vma_page_range(vma, start, end, NULL);
525 unsigned long nstart, end, tmp, reqprot;
556 end = start + len;
557 if (end <= start)
581 if (vma->vm_start >= end)
591 end = vma->vm_end;
640 if (tmp > end)
641 tmp = end;
649 if (nstart >= end)