Lines Matching refs:src_mm

784 copy_nonpresent_pte(struct mm_struct *dst_mm, struct mm_struct *src_mm,  in copy_nonpresent_pte()  argument
804 &src_mm->mmlist); in copy_nonpresent_pte()
810 set_pte_at(src_mm, addr, src_pte, pte); in copy_nonpresent_pte()
832 set_pte_at(src_mm, addr, src_pte, pte); in copy_nonpresent_pte()
866 set_pte_at(src_mm, addr, src_pte, pte); in copy_nonpresent_pte()
945 struct mm_struct *src_mm = src_vma->vm_mm; in __copy_present_ptes() local
949 wrprotect_ptes(src_mm, addr, src_pte, nr); in __copy_present_ptes()
1048 static inline struct folio *folio_prealloc(struct mm_struct *src_mm, in folio_prealloc() argument
1061 if (mem_cgroup_charge(new_folio, src_mm, GFP_KERNEL)) { in folio_prealloc()
1076 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pte_range() local
1112 src_pte = pte_offset_map_rw_nolock(src_mm, src_pmd, addr, &dummy_pmdval, in copy_pte_range()
1143 ret = copy_nonpresent_pte(dst_mm, src_mm, in copy_pte_range()
1207 prealloc = folio_prealloc(src_mm, src_vma, addr, false); in copy_pte_range()
1231 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pmd_range() local
1245 err = copy_huge_pmd(dst_mm, src_mm, dst_pmd, src_pmd, in copy_pmd_range()
1268 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pud_range() local
1282 err = copy_huge_pud(dst_mm, src_mm, in copy_pud_range()
1362 struct mm_struct *src_mm = src_vma->vm_mm; in copy_page_range() local
1372 return copy_hugetlb_page_range(dst_mm, src_mm, dst_vma, src_vma); in copy_page_range()
1390 0, src_mm, addr, end); in copy_page_range()
1400 raw_write_seqcount_begin(&src_mm->write_protect_seq); in copy_page_range()
1405 src_pgd = pgd_offset(src_mm, addr); in copy_page_range()
1418 raw_write_seqcount_end(&src_mm->write_protect_seq); in copy_page_range()