Lines Matching refs:dst_mm
23 struct vm_area_struct *find_dst_vma(struct mm_struct *dst_mm,
33 dst_vma = find_vma(dst_mm, dst_start);
76 struct mm_struct *dst_mm = dst_vma->vm_mm;
94 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl);
127 inc_mm_counter(dst_mm, mm_counter(page));
129 set_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte);
298 struct mm_struct *dst_mm = dst_vma->vm_mm;
304 dst_pte = pte_offset_map_lock(dst_mm, dst_pmd, dst_addr, &ptl);
318 set_pte_at(dst_mm, dst_addr, dst_pte, _dst_pte);
363 struct mm_struct *dst_mm = dst_vma->vm_mm;
382 mmap_read_unlock(dst_mm);
406 dst_vma = find_dst_vma(dst_mm, dst_start, len);
442 dst_pte = huge_pte_alloc(dst_mm, dst_vma, dst_addr, vma_hpagesize);
466 mmap_read_unlock(dst_mm);
475 mmap_read_lock(dst_mm);
504 mmap_read_unlock(dst_mm);
567 static __always_inline ssize_t mfill_atomic(struct mm_struct *dst_mm,
596 mmap_read_lock(dst_mm);
612 dst_vma = find_dst_vma(dst_mm, dst_start, len);
660 dst_pmd = mm_alloc_pmd(dst_mm, dst_addr);
676 unlikely(__pte_alloc(dst_mm, dst_pmd))) {
696 mmap_read_unlock(dst_mm);
726 mmap_read_unlock(dst_mm);
736 ssize_t mfill_atomic_copy(struct mm_struct *dst_mm, unsigned long dst_start,
740 return mfill_atomic(dst_mm, dst_start, src_start, len, mmap_changing,
744 ssize_t mfill_atomic_zeropage(struct mm_struct *dst_mm, unsigned long start,
747 return mfill_atomic(dst_mm, start, 0, len, mmap_changing,
751 ssize_t mfill_atomic_continue(struct mm_struct *dst_mm, unsigned long start,
755 return mfill_atomic(dst_mm, start, 0, len, mmap_changing,
759 ssize_t mfill_atomic_poison(struct mm_struct *dst_mm, unsigned long start,
763 return mfill_atomic(dst_mm, start, 0, len, mmap_changing,
796 int mwriteprotect_range(struct mm_struct *dst_mm, unsigned long start,
805 VMA_ITERATOR(vmi, dst_mm, start);
816 mmap_read_lock(dst_mm);
853 mmap_read_unlock(dst_mm);