Home
last modified time | relevance | path

Searched refs:vm_end (Results 1 – 25 of 240) sorted by relevance

12345678910

/linux-6.15/mm/
H A Dnommu.c677 if (vma->vm_end != end) in find_vma_exact()
995 region->vm_end = vma->vm_end = 0; in do_mmap_private()
1127 vma->vm_end = 0; in do_mmap()
1162 vma->vm_end = region->vm_end = addr + len; in do_mmap()
1335 region->vm_top = region->vm_end = new->vm_end = addr; in split_vma()
1357 vma->vm_region->vm_end = vma->vm_end = addr; in split_vma()
1392 vma->vm_end = from; in vmi_shrink_vma()
1454 if (end == vma->vm_end) in do_munmap()
1652 len = vma->vm_end - addr; in __access_remote_vm()
1737 if (addr_end > vma->vm_end) in __copy_remote_vm_str()
[all …]
H A Dvma.c355 WARN_ON_ONCE(vp->vma->vm_end < vp->remove->vm_end); in vma_complete()
446 unmap_vmas(&tlb, mas, vma, vma->vm_start, vma->vm_end, vma->vm_end, in unmap_region()
448 mas_set(mas, vma->vm_end); in unmap_region()
482 new->vm_end = addr; in __split_vma()
519 vma->vm_end = addr; in __split_vma()
862 vmg->end = next->vm_end; in vma_merge_existing_range()
904 vmg->end = next->vm_end; in vma_merge_existing_range()
1023 vmg->end = next->vm_end; in vma_merge_new_range()
1586 if (vma->vm_end > end) { in vma_modify()
1666 VMG_VMA_STATE(vmg, vmi, vma, vma, vma->vm_end, vma->vm_end + delta); in vma_merge_extend()
[all …]
H A Dmsync.c90 fend = fstart + (min(end, vma->vm_end) - start) - 1; in SYSCALL_DEFINE3()
91 start = vma->vm_end; in SYSCALL_DEFINE3()
107 vma = find_vma(mm, vma->vm_end); in SYSCALL_DEFINE3()
H A Dmseal.c114 if (vma->vm_end >= end) in check_mm_seal()
117 nstart = vma->vm_end; in check_mm_seal()
149 tmp = vma->vm_end; in apply_mm_seal()
H A Dmremap.c1012 if (!err && vma->vm_end != old_addr + old_len) in prep_move_vma()
1050 unsigned long vm_end; in unmap_source_vma() local
1082 vm_end = vma->vm_end; in unmap_source_vma()
1127 if (vm_end > end) { in unmap_source_vma()
1213 unsigned long old_end = vrm->vma->vm_end; in dontunmap_complete()
1315 if (old_len > vma->vm_end - addr) in resize_is_valid()
1448 unsigned long end = vma->vm_end + delta; in vma_expandable()
1450 if (end < vma->vm_end) /* overflow */ in vma_expandable()
1452 if (find_vma_intersection(vma->vm_mm, vma->vm_end, end)) in vma_expandable()
1464 unsigned long suffix_bytes = vrm->vma->vm_end - vrm->addr; in vrm_can_expand_in_place()
[all …]
H A Dvma.h209 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store_gfp()
433 vma->vm_end, vmi->mas.index, vmi->mas.last); in vma_iter_store_overwrite()
438 vmi->mas.last, vma->vm_start, vma->vm_start, vma->vm_end, in vma_iter_store_overwrite()
447 __mas_set_range(&vmi->mas, vma->vm_start, vma->vm_end - 1); in vma_iter_store_overwrite()
H A Dpagewalk.c490 next = min(end, vma->vm_end); in walk_page_range_mm()
491 vma = find_vma(mm, vma->vm_end); in walk_page_range_mm()
664 if (start < vma->vm_start || end > vma->vm_end) in walk_page_range_vma()
691 return __walk_page_range(vma->vm_start, vma->vm_end, &walk); in walk_page_vma()
759 err = walk_page_test(vma->vm_start, vma->vm_end, &walk); in walk_page_mapping()
846 if (WARN_ON_ONCE(addr < vma->vm_start || addr >= vma->vm_end)) in folio_walk_start()
H A Dmmap.c979 populate_vma_page_range(prev, addr, prev->vm_end, NULL); in find_extend_vma_locked()
1173 if (start + size > vma->vm_end) { in SYSCALL_DEFINE5()
1174 VMA_ITERATOR(vmi, mm, vma->vm_end); in SYSCALL_DEFINE5()
1179 if (next->vm_start != prev->vm_end) in SYSCALL_DEFINE5()
1188 if (start + size <= next->vm_end) in SYSCALL_DEFINE5()
1294 vma_iter_set(&vmi, vma->vm_end); in exit_mmap()
1304 vma_iter_set(&vmi, vma->vm_end); in exit_mmap()
1333 if (find_vma_intersection(mm, vma->vm_start, vma->vm_end)) in insert_vm_struct()
1743 unsigned long old_end = vma->vm_end; in relocate_vma_down()
H A Duserfaultfd.c27 if (dst_end > dst_vma->vm_end) in validate_dst_vma()
907 VM_WARN_ONCE(start < dst_vma->vm_start || start + len > dst_vma->vm_end, in uffd_wp_range()
977 _end = min(dst_vma->vm_end, end); in mwriteprotect_range()
1514 if (src_start >= vma->vm_start && src_start < vma->vm_end) in find_vmas_mm_locked()
1544 if (src_start >= vma->vm_start && src_start < vma->vm_end) { in uffd_move_lock()
1739 if (src_start + len > src_vma->vm_end) in move_pages()
1744 if (dst_start + len > dst_vma->vm_end) in move_pages()
1919 if (start == vma->vm_start && end == vma->vm_end) in userfaultfd_clear_vma()
1974 vma_end = min(end, vma->vm_end); in userfaultfd_register_range()
1996 start = vma->vm_end; in userfaultfd_register_range()
[all …]
/linux-6.15/tools/testing/vma/
H A Dvma.c74 ret->vm_end = end; in alloc_vma()
292 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_merge()
323 ASSERT_EQ(vma->vm_end, 0x2000); in test_simple_modify()
335 ASSERT_EQ(vma->vm_end, 0x1000); in test_simple_modify()
344 ASSERT_EQ(vma->vm_end, 0x2000); in test_simple_modify()
353 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_modify()
381 ASSERT_EQ(vma->vm_end, 0x3000); in test_simple_expand()
402 ASSERT_EQ(vma->vm_end, 0x1000); in test_simple_shrink()
485 ASSERT_EQ(vma->vm_end, 0x4000); in test_merge_new()
502 ASSERT_EQ(vma->vm_end, 0x5000); in test_merge_new()
[all …]
H A Dvma_internal.h245 unsigned long vm_end; member
584 vma->vm_end = end; in vma_set_range()
688 return (vma->vm_end - vma->vm_start) >> PAGE_SHIFT; in vma_pages()
1113 unsigned long vm_end = vma->vm_end; in vm_end_gap() local
1116 vm_end += stack_guard_gap; in vm_end_gap()
1117 if (vm_end < vma->vm_end) in vm_end_gap()
1118 vm_end = -PAGE_SIZE; in vm_end_gap()
1120 return vm_end; in vm_end_gap()
/linux-6.15/tools/testing/selftests/bpf/progs/
H A Diters_task_vma.c14 __u64 vm_end; member
35 vm_ranges[seen].vm_end = vma->vm_end; in iter_task_vma_for_each()
/linux-6.15/mm/damon/tests/
H A Dvaddr-kunit.h28 mas_set_range(&mas, vmas[i].vm_start, vmas[i].vm_end - 1); in __link_vmas()
72 (struct vm_area_struct) {.vm_start = 10, .vm_end = 20}, in damon_test_three_regions_in_vmas()
73 (struct vm_area_struct) {.vm_start = 20, .vm_end = 25}, in damon_test_three_regions_in_vmas()
74 (struct vm_area_struct) {.vm_start = 200, .vm_end = 210}, in damon_test_three_regions_in_vmas()
75 (struct vm_area_struct) {.vm_start = 210, .vm_end = 220}, in damon_test_three_regions_in_vmas()
76 (struct vm_area_struct) {.vm_start = 300, .vm_end = 305}, in damon_test_three_regions_in_vmas()
77 (struct vm_area_struct) {.vm_start = 307, .vm_end = 330}, in damon_test_three_regions_in_vmas()
/linux-6.15/fs/proc/
H A Dtask_nommu.c35 size += region->vm_end - region->vm_start; in task_mem()
37 size = vma->vm_end - vma->vm_start; in task_mem()
46 slack = region->vm_end - vma->vm_end; in task_mem()
89 vsize += vma->vm_end - vma->vm_start; in task_vsize()
109 size += region->vm_end - region->vm_start; in task_statm()
150 vma->vm_end, in nommu_vma_show()
/linux-6.15/include/trace/events/
H A Dmmap.h80 __field(unsigned long, vm_end)
87 __entry->vm_end = vma->vm_end - 1;
93 (unsigned long) __entry->vm_end
H A Dfs_dax.h17 __field(unsigned long, vm_end)
30 __entry->vm_end = vmf->vma->vm_end;
47 __entry->vm_end,
/linux-6.15/arch/powerpc/include/asm/
H A Dvideo.h8 unsigned long vm_start, unsigned long vm_end, in pgprot_framebuffer() argument
11 return __phys_mem_access_prot(PHYS_PFN(offset), vm_end - vm_start, prot); in pgprot_framebuffer()
/linux-6.15/scripts/coccinelle/api/
H A Dvma_pages.cocci22 * (vma->vm_end - vma->vm_start) >> PAGE_SHIFT
32 - ((vma->vm_end - vma->vm_start) >> PAGE_SHIFT)
44 (vma->vm_end@p - vma->vm_start) >> PAGE_SHIFT
/linux-6.15/arch/x86/um/
H A Dmem_32.c18 gate_vma.vm_end = FIXADDR_USER_END; in gate_vma_init()
49 return (addr >= vma->vm_start) && (addr < vma->vm_end); in in_gate_area()
/linux-6.15/drivers/gpu/drm/virtio/
H A Dvirtgpu_vram.c39 unsigned long vm_size = vma->vm_end - vma->vm_start; in virtio_gpu_vram_mmap()
40 unsigned long vm_end; in virtio_gpu_vram_mmap() local
60 if (check_add_overflow(vma->vm_pgoff << PAGE_SHIFT, vm_size, &vm_end)) in virtio_gpu_vram_mmap()
63 if (vm_end > vram->vram_node.size) in virtio_gpu_vram_mmap()
/linux-6.15/drivers/media/common/videobuf2/
H A Dvideobuf2-memops.c96 vma->vm_end); in vb2_common_vm_open()
114 vma->vm_end); in vb2_common_vm_close()
/linux-6.15/drivers/soc/qcom/
H A Drmtfs_mem.c136 if (vma->vm_end - vma->vm_start > rmtfs_mem->size) { in qcom_rmtfs_mem_mmap()
139 vma->vm_end, vma->vm_start, in qcom_rmtfs_mem_mmap()
140 (vma->vm_end - vma->vm_start), &rmtfs_mem->size); in qcom_rmtfs_mem_mmap()
148 vma->vm_end - vma->vm_start, in qcom_rmtfs_mem_mmap()
/linux-6.15/arch/x86/include/asm/
H A Dvideo.h12 unsigned long vm_start, unsigned long vm_end,
/linux-6.15/arch/m68k/include/asm/
H A Dvideo.h9 unsigned long vm_start, unsigned long vm_end, in pgprot_framebuffer() argument
/linux-6.15/arch/powerpc/kvm/
H A Dbook3s_hv_uvmem.c416 ret = ksm_madvise(vma, vma->vm_start, vma->vm_end, in kvmppc_memslot_page_merge()
423 start = vma->vm_end; in kvmppc_memslot_page_merge()
424 } while (end > vma->vm_end); in kvmppc_memslot_page_merge()
628 if (!vma || addr >= vma->vm_end) { in kvmppc_uvmem_drop_pages()
812 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_uv_migrate_mem_slot()
972 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_h_svm_page_in()
1072 if (!vma || vma->vm_start > start || vma->vm_end < end) in kvmppc_h_svm_page_out()

12345678910