Lines Matching defs:end

86 		unsigned long end, pgprot_t newprot, unsigned long cp_flags)
285 } while (pte++, addr += PAGE_SIZE, addr != end);
356 unsigned long end, pgprot_t newprot, unsigned long cp_flags)
371 next = pmd_addr_end(addr, end);
386 vma->vm_mm, addr, end);
428 } while (pmd++, addr = next, addr != end);
440 unsigned long end, pgprot_t newprot, unsigned long cp_flags)
448 next = pud_addr_end(addr, end);
456 } while (pud++, addr = next, addr != end);
463 unsigned long end, pgprot_t newprot, unsigned long cp_flags)
471 next = p4d_addr_end(addr, end);
479 } while (p4d++, addr = next, addr != end);
486 unsigned long end, pgprot_t newprot, unsigned long cp_flags)
493 BUG_ON(addr >= end);
497 next = pgd_addr_end(addr, end);
507 } while (pgd++, addr = next, addr != end);
516 unsigned long end, unsigned long cp_flags)
536 pages = hugetlb_change_protection(vma, start, end, newprot,
539 pages = change_protection_range(tlb, vma, start, end, newprot,
578 unsigned long start, unsigned long end, unsigned long newflags)
582 long nrpages = (end - start) >> PAGE_SHIFT;
603 error = walk_page_range(current->mm, start, end,
633 *pprev = vma_merge(vmi, mm, *pprev, start, end, newflags,
650 if (end != vma->vm_end) {
651 error = split_vma(vmi, vma, end, 0);
667 change_protection(tlb, vma, start, end, mm_cp_flags);
675 populate_vma_page_range(vma, start, end, NULL);
694 unsigned long nstart, end, tmp, reqprot;
722 end = start + len;
723 if (end <= start)
742 vma = vma_find(&vmi, end);
748 if (vma->vm_start >= end)
758 end = vma->vm_end;
772 for_each_vma_range(vmi, vma, end) {
819 if (tmp > end)
820 tmp = end;
838 if (!error && tmp < end)