Lines Matching defs:vmi

157 static inline struct vm_area_struct *vma_prev_limit(struct vma_iterator *vmi,
160 return mas_prev(&vmi->mas, min);
182 static int do_brk_flags(struct vma_iterator *vmi, struct vm_area_struct *brkvma,
192 struct vma_iterator vmi;
235 vma_iter_init(&vmi, mm, newbrk);
236 brkvma = vma_find(&vmi, oldbrk);
245 if (do_vma_munmap(&vmi, brkvma, newbrk, oldbrk, &uf, true))
258 vma_iter_init(&vmi, mm, oldbrk);
259 next = vma_find(&vmi, newbrk + PAGE_SIZE + stack_guard_gap);
263 brkvma = vma_prev_limit(&vmi, mm->start_brk);
265 if (do_brk_flags(&vmi, brkvma, oldbrk, newbrk - oldbrk, 0) < 0)
292 VMA_ITERATOR(vmi, mm, 0);
295 for_each_vma(vmi, vma) {
303 vmi_start = vma_iter_addr(&vmi);
304 vmi_end = vma_iter_end(&vmi);
317 vma_iter_dump_tree(&vmi);
376 VMA_ITERATOR(vmi, mm, addr);
380 for_each_vma_range(vmi, vma, end) {
403 VMA_ITERATOR(vmi, mm, 0);
406 vma_iter_config(&vmi, vma->vm_start, vma->vm_end);
407 if (vma_iter_prealloc(&vmi, vma))
412 vma_iter_store(&vmi, vma);
513 * @vmi: The vma iterator
517 struct vma_iterator *vmi, struct mm_struct *mm)
538 vma_iter_store(vmi, vp->insert);
622 * @vmi: The vma iterator
636 int vma_expand(struct vma_iterator *vmi, struct vm_area_struct *vma,
663 vma_iter_config(vmi, start, end);
664 if (vma_iter_prealloc(vmi, vma))
672 vma_iter_store(vmi, vma);
674 vma_complete(&vp, vmi, vma->vm_mm);
685 * @vmi: The vma iterator
692 int vma_shrink(struct vma_iterator *vmi, struct vm_area_struct *vma,
700 vma_iter_config(vmi, vma->vm_start, start);
702 vma_iter_config(vmi, end, vma->vm_end);
704 if (vma_iter_prealloc(vmi, NULL))
713 vma_iter_clear(vmi);
717 vma_complete(&vp, vmi, vma->vm_mm);
869 struct vm_area_struct *vma_merge(struct vma_iterator *vmi, struct mm_struct *mm,
916 vma_prev(vmi);
1001 vma_iter_config(vmi, vma_start, vma_end);
1003 vma_iter_config(vmi, adjust->vm_start + adj_start,
1007 if (vma_iter_prealloc(vmi, vma))
1022 vma_iter_store(vmi, vma);
1029 vma_iter_store(vmi, next);
1033 vma_complete(&vp, vmi, mm);
1042 vma_iter_set(vmi, addr);
1043 vma_iter_load(vmi);
2366 int __split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma,
2394 vma_iter_config(vmi, new->vm_start, new->vm_end);
2395 if (vma_iter_prealloc(vmi, new))
2428 vma_complete(&vp, vmi, vma->vm_mm);
2432 vma_next(vmi);
2438 vma_iter_free(vmi);
2448 int split_vma(struct vma_iterator *vmi, struct vm_area_struct *vma,
2454 return __split_vma(vmi, vma, addr, new_below);
2459 * @vmi: The vma iterator
2472 do_vmi_align_munmap(struct vma_iterator *vmi, struct vm_area_struct *vma,
2482 mt_init_flags(&mt_detach, vmi->mas.tree->ma_flags & MT_FLAGS_LOCK_MASK);
2504 error = __split_vma(vmi, vma, start, 1);
2517 error = __split_vma(vmi, next, end, 0);
2550 } for_each_vma_range(*vmi, next, end);
2559 vma_iter_set(vmi, start);
2562 for_each_vma_range(*vmi, vma_mas, end) {
2572 while (vma_iter_addr(vmi) > start)
2573 vma_iter_prev_range(vmi);
2575 error = vma_iter_clear_gfp(vmi, start, end, GFP_KERNEL);
2585 prev = vma_iter_prev_range(vmi);
2586 next = vma_next(vmi);
2588 vma_iter_prev_range(vmi);
2624 * @vmi: The vma iterator
2638 int do_vmi_munmap(struct vma_iterator *vmi, struct mm_struct *mm,
2661 vma = vma_find(vmi, end);
2668 return do_vmi_align_munmap(vmi, vma, mm, start, end, uf, unlock);
2682 VMA_ITERATOR(vmi, mm, start);
2684 return do_vmi_munmap(&vmi, mm, start, len, uf, false);
2700 VMA_ITERATOR(vmi, mm, addr);
2718 if (do_vmi_munmap(&vmi, mm, addr, len, uf, false))
2731 next = vma_next(&vmi);
2732 prev = vma_prev(&vmi);
2735 vma_iter_next_range(&vmi);
2759 vma_iter_next_range(&vmi);
2764 !vma_expand(&vmi, vma, merge_start, merge_end, vm_pgoff, next)) {
2770 vma_iter_set(&vmi, addr);
2784 vma_iter_config(&vmi, addr, end);
2811 vma_iter_config(&vmi, addr, end);
2817 merge = vma_merge(&vmi, mm, prev, vma->vm_start,
2858 if (vma_iter_prealloc(&vmi, vma))
2863 vma_iter_store(&vmi, vma);
2927 vma_iter_set(&vmi, vma->vm_end);
2929 unmap_region(mm, &vmi.mas, vma, prev, next, vma->vm_start,
2948 VMA_ITERATOR(vmi, mm, start);
2953 ret = do_vmi_munmap(&vmi, mm, start, len, &uf, unlock);
3011 VMA_ITERATOR(vmi, mm, vma->vm_end);
3014 for_each_vma_range(vmi, next, start + size) {
3059 * @vmi: The vma iterator pointing at the vma
3072 int do_vma_munmap(struct vma_iterator *vmi, struct vm_area_struct *vma,
3079 return do_vmi_align_munmap(vmi, vma, mm, start, end, uf, unlock);
3084 * @vmi: The vma iterator
3094 static int do_brk_flags(struct vma_iterator *vmi, struct vm_area_struct *vma,
3121 vma_iter_config(vmi, vma->vm_start, addr + len);
3122 if (vma_iter_prealloc(vmi, vma))
3132 vma_iter_store(vmi, vma);
3134 vma_complete(&vp, vmi, mm);
3140 vma_iter_next_range(vmi);
3153 if (vma_iter_store_gfp(vmi, vma, GFP_KERNEL))
3183 VMA_ITERATOR(vmi, mm, addr);
3202 ret = do_vmi_munmap(&vmi, mm, addr, len, &uf, 0);
3206 vma = vma_prev(&vmi);
3207 ret = do_brk_flags(&vmi, vma, addr, len, flags);
3346 VMA_ITERATOR(vmi, mm, addr);
3361 new_vma = vma_merge(&vmi, mm, prev, addr, addr + len, vma->vm_flags,