Lines Matching refs:src_mm
768 copy_nonpresent_pte(struct mm_struct *dst_mm, struct mm_struct *src_mm,
787 &src_mm->mmlist);
793 set_pte_at(src_mm, addr, src_pte, pte);
815 set_pte_at(src_mm, addr, src_pte, pte);
848 set_pte_at(src_mm, addr, src_pte, pte);
929 struct mm_struct *src_mm = src_vma->vm_mm;
964 ptep_set_wrprotect(src_mm, addr, src_pte);
984 static inline struct folio *page_copy_prealloc(struct mm_struct *src_mm,
993 if (mem_cgroup_charge(new_folio, src_mm, GFP_KERNEL)) {
1008 struct mm_struct *src_mm = src_vma->vm_mm;
1035 src_pte = pte_offset_map_nolock(src_mm, src_pmd, addr, &src_ptl);
1063 ret = copy_nonpresent_pte(dst_mm, src_mm,
1121 prealloc = page_copy_prealloc(src_mm, src_vma, addr);
1145 struct mm_struct *src_mm = src_vma->vm_mm;
1159 err = copy_huge_pmd(dst_mm, src_mm, dst_pmd, src_pmd,
1182 struct mm_struct *src_mm = src_vma->vm_mm;
1196 err = copy_huge_pud(dst_mm, src_mm,
1277 struct mm_struct *src_mm = src_vma->vm_mm;
1286 return copy_hugetlb_page_range(dst_mm, src_mm, dst_vma, src_vma);
1308 0, src_mm, addr, end);
1318 raw_write_seqcount_begin(&src_mm->write_protect_seq);
1323 src_pgd = pgd_offset(src_mm, addr);
1337 raw_write_seqcount_end(&src_mm->write_protect_seq);