Lines Matching refs:src_vma

786 		struct vm_area_struct *src_vma, unsigned long addr, int *rss)  in copy_nonpresent_pte()  argument
850 folio_try_dup_anon_rmap_pte(folio, page, dst_vma, src_vma); in copy_nonpresent_pte()
875 VM_BUG_ON(!is_cow_mapping(src_vma->vm_flags)); in copy_nonpresent_pte()
876 if (try_restore_exclusive_pte(src_vma, addr, src_pte, orig_pte)) in copy_nonpresent_pte()
906 copy_present_page(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_present_page() argument
922 if (copy_mc_user_highpage(&new_folio->page, page, addr, src_vma)) in copy_present_page()
942 struct vm_area_struct *src_vma, pte_t *dst_pte, pte_t *src_pte, in __copy_present_ptes() argument
945 struct mm_struct *src_mm = src_vma->vm_mm; in __copy_present_ptes()
948 if (is_cow_mapping(src_vma->vm_flags) && pte_write(pte)) { in __copy_present_ptes()
954 if (src_vma->vm_flags & VM_SHARED) in __copy_present_ptes()
972 copy_present_ptes(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_present_ptes() argument
982 page = vm_normal_page(src_vma, addr, pte); in copy_present_ptes()
994 if (src_vma->vm_flags & VM_SHARED) in copy_present_ptes()
996 if (!vma_soft_dirty_enabled(src_vma)) in copy_present_ptes()
1004 nr, dst_vma, src_vma))) { in copy_present_ptes()
1015 pte = pte_mkwrite(pte, src_vma); in copy_present_ptes()
1016 __copy_present_ptes(dst_vma, src_vma, dst_pte, src_pte, pte, in copy_present_ptes()
1029 if (unlikely(folio_try_dup_anon_rmap_pte(folio, page, dst_vma, src_vma))) { in copy_present_ptes()
1032 err = copy_present_page(dst_vma, src_vma, dst_pte, src_pte, in copy_present_ptes()
1044 __copy_present_ptes(dst_vma, src_vma, dst_pte, src_pte, pte, addr, 1); in copy_present_ptes()
1071 copy_pte_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_pte_range() argument
1076 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pte_range()
1145 dst_vma, src_vma, in copy_pte_range()
1167 ret = copy_present_ptes(dst_vma, src_vma, dst_pte, src_pte, in copy_pte_range()
1207 prealloc = folio_prealloc(src_mm, src_vma, addr, false); in copy_pte_range()
1226 copy_pmd_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_pmd_range() argument
1231 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pmd_range()
1244 VM_BUG_ON_VMA(next-addr != HPAGE_PMD_SIZE, src_vma); in copy_pmd_range()
1246 addr, dst_vma, src_vma); in copy_pmd_range()
1255 if (copy_pte_range(dst_vma, src_vma, dst_pmd, src_pmd, in copy_pmd_range()
1263 copy_pud_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_pud_range() argument
1268 struct mm_struct *src_mm = src_vma->vm_mm; in copy_pud_range()
1281 VM_BUG_ON_VMA(next-addr != HPAGE_PUD_SIZE, src_vma); in copy_pud_range()
1283 dst_pud, src_pud, addr, src_vma); in copy_pud_range()
1292 if (copy_pmd_range(dst_vma, src_vma, dst_pud, src_pud, in copy_pud_range()
1300 copy_p4d_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma, in copy_p4d_range() argument
1316 if (copy_pud_range(dst_vma, src_vma, dst_p4d, src_p4d, in copy_p4d_range()
1329 vma_needs_copy(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma) in vma_needs_copy() argument
1340 if (src_vma->vm_flags & (VM_PFNMAP | VM_MIXEDMAP)) in vma_needs_copy()
1343 if (src_vma->anon_vma) in vma_needs_copy()
1356 copy_page_range(struct vm_area_struct *dst_vma, struct vm_area_struct *src_vma) in copy_page_range() argument
1359 unsigned long addr = src_vma->vm_start; in copy_page_range()
1360 unsigned long end = src_vma->vm_end; in copy_page_range()
1362 struct mm_struct *src_mm = src_vma->vm_mm; in copy_page_range()
1368 if (!vma_needs_copy(dst_vma, src_vma)) in copy_page_range()
1371 if (is_vm_hugetlb_page(src_vma)) in copy_page_range()
1372 return copy_hugetlb_page_range(dst_mm, src_mm, dst_vma, src_vma); in copy_page_range()
1374 if (unlikely(src_vma->vm_flags & VM_PFNMAP)) { in copy_page_range()
1375 ret = track_pfn_copy(dst_vma, src_vma, &pfn); in copy_page_range()
1386 is_cow = is_cow_mapping(src_vma->vm_flags); in copy_page_range()
1399 vma_assert_write_locked(src_vma); in copy_page_range()
1410 if (unlikely(copy_p4d_range(dst_vma, src_vma, dst_pgd, src_pgd, in copy_page_range()
1421 if (ret && unlikely(src_vma->vm_flags & VM_PFNMAP)) in copy_page_range()