Lines Matching refs:src_mm
700 copy_nonpresent_pte(struct mm_struct *dst_mm, struct mm_struct *src_mm,
718 &src_mm->mmlist);
739 set_pte_at(src_mm, addr, src_pte, pte);
770 set_pte_at(src_mm, addr, src_pte, pte);
804 struct mm_struct *src_mm = src_vma->vm_mm;
823 if (likely(!atomic_read(&src_mm->has_pinned)))
873 struct mm_struct *src_mm = src_vma->vm_mm;
897 ptep_set_wrprotect(src_mm, addr, src_pte);
917 page_copy_prealloc(struct mm_struct *src_mm, struct vm_area_struct *vma,
926 if (mem_cgroup_charge(new_page, src_mm, GFP_KERNEL)) {
941 struct mm_struct *src_mm = src_vma->vm_mm;
960 src_ptl = pte_lockptr(src_mm, src_pmd);
982 entry.val = copy_nonpresent_pte(dst_mm, src_mm,
1028 prealloc = page_copy_prealloc(src_mm, src_vma, addr);
1048 struct mm_struct *src_mm = src_vma->vm_mm;
1062 err = copy_huge_pmd(dst_mm, src_mm, dst_pmd, src_pmd,
1085 struct mm_struct *src_mm = src_vma->vm_mm;
1099 err = copy_huge_pud(dst_mm, src_mm,
1148 struct mm_struct *src_mm = src_vma->vm_mm;
1164 return copy_hugetlb_page_range(dst_mm, src_mm, src_vma);
1186 0, src_vma, src_mm, addr, end);
1195 mmap_assert_write_locked(src_mm);
1196 raw_write_seqcount_begin(&src_mm->write_protect_seq);
1201 src_pgd = pgd_offset(src_mm, addr);
1214 raw_write_seqcount_end(&src_mm->write_protect_seq);