| /linux-6.15/drivers/gpu/drm/xe/ |
| H A D | xe_hmm.c | 34 if (mmu_interval_read_retry(range->notifier, range->notifier_seq)) { in xe_alloc_sg() 234 unsigned long notifier_seq; in xe_hmm_userptr_populate_range() local 246 notifier_seq = mmu_interval_read_begin(&userptr->notifier); in xe_hmm_userptr_populate_range() 247 if (notifier_seq == userptr->notifier_seq) in xe_hmm_userptr_populate_range() 269 hmm_range.notifier_seq = mmu_interval_read_begin(&userptr->notifier); in xe_hmm_userptr_populate_range() 301 if (mmu_interval_read_retry(hmm_range.notifier, hmm_range.notifier_seq)) { in xe_hmm_userptr_populate_range() 313 userptr->notifier_seq = hmm_range.notifier_seq; in xe_hmm_userptr_populate_range()
|
| H A D | xe_vm_types.h | 64 unsigned long notifier_seq; member
|
| H A D | xe_pt.c | 1337 unsigned long notifier_seq; in vma_check_userptr() local 1348 notifier_seq = uvma->userptr.notifier_seq; in vma_check_userptr() 1351 notifier_seq)) in vma_check_userptr()
|
| H A D | xe_vm.c | 65 uvma->userptr.notifier_seq) ? in xe_vma_userptr_check_repin() 685 uvma->userptr.notifier_seq)) in xe_vma_userptr_force_invalidate() 686 uvma->userptr.notifier_seq -= 2; in xe_vma_userptr_force_invalidate() 1236 userptr->notifier_seq = LONG_MAX; in xe_vma_create() 3631 to_userptr_vma(vma)->userptr.notifier_seq)); in xe_vm_invalidate_vma()
|
| H A D | xe_svm.c | 70 (r__)->base.notifier_seq, \
|
| /linux-6.15/drivers/gpu/drm/i915/gem/ |
| H A D | i915_gem_userptr.c | 238 unsigned long notifier_seq; in i915_gem_object_userptr_submit_init() local 244 notifier_seq = mmu_interval_read_begin(&obj->userptr.notifier); in i915_gem_object_userptr_submit_init() 250 if (notifier_seq == obj->userptr.notifier_seq && obj->userptr.pvec) { in i915_gem_object_userptr_submit_init() 283 !obj->userptr.page_ref ? notifier_seq : in i915_gem_object_userptr_submit_init() 284 obj->userptr.notifier_seq)) { in i915_gem_object_userptr_submit_init() 291 obj->userptr.notifier_seq = notifier_seq; in i915_gem_object_userptr_submit_init() 313 obj->userptr.notifier_seq)) { in i915_gem_object_userptr_submit_done() 532 obj->userptr.notifier_seq = ULONG_MAX; in i915_gem_userptr_ioctl()
|
| H A D | i915_gem_object_types.h | 716 unsigned long notifier_seq; member
|
| /linux-6.15/drivers/gpu/drm/amd/amdgpu/ |
| H A D | amdgpu_hmm.c | 208 hmm_range->notifier_seq = mmu_interval_read_begin(notifier); in amdgpu_hmm_range_get_pages() 252 hmm_range->notifier_seq); in amdgpu_hmm_range_get_pages_done()
|
| /linux-6.15/include/linux/ |
| H A D | hmm.h | 93 unsigned long notifier_seq; member
|
| /linux-6.15/drivers/gpu/drm/ |
| H A D | drm_gpusvm.c | 814 range->notifier_seq = LONG_MAX; in drm_gpusvm_range_alloc() 855 hmm_range.notifier_seq = mmu_interval_read_begin(¬ifier->notifier); in drm_gpusvm_check_pages() 864 hmm_range.notifier_seq = in drm_gpusvm_check_pages() 1348 hmm_range.notifier_seq = mmu_interval_read_begin(notifier); in drm_gpusvm_range_get_pages() 1371 hmm_range.notifier_seq = in drm_gpusvm_range_get_pages() 1397 if (mmu_interval_read_retry(notifier, hmm_range.notifier_seq)) { in drm_gpusvm_range_get_pages() 1501 range->notifier_seq = hmm_range.notifier_seq; in drm_gpusvm_range_get_pages() 2135 hmm_range.notifier_seq = mmu_interval_read_begin(notifier); in drm_gpusvm_range_evict()
|
| /linux-6.15/Documentation/translations/zh_CN/mm/ |
| H A D | hmm.rst | 148 range.notifier_seq = mmu_interval_read_begin(&interval_sub); 160 if (mmu_interval_read_retry(&ni, range.notifier_seq) {
|
| /linux-6.15/drivers/gpu/drm/nouveau/ |
| H A D | nouveau_svm.c | 596 unsigned long notifier_seq; in nouveau_atomic_range_fault() local 611 notifier_seq = mmu_interval_read_begin(¬ifier->notifier); in nouveau_atomic_range_fault() 623 notifier_seq)) in nouveau_atomic_range_fault() 686 range.notifier_seq = mmu_interval_read_begin(range.notifier); in nouveau_range_fault() 698 range.notifier_seq)) { in nouveau_range_fault()
|
| /linux-6.15/include/drm/ |
| H A D | drm_gpusvm.h | 238 unsigned long notifier_seq; member
|
| /linux-6.15/lib/ |
| H A D | test_hmm.c | 302 range->notifier_seq = mmu_interval_read_begin(range->notifier); in dmirror_range_fault() 314 range->notifier_seq)) { in dmirror_range_fault() 1109 range->notifier_seq = mmu_interval_read_begin(range->notifier); in dmirror_range_snapshot() 1122 range->notifier_seq)) { in dmirror_range_snapshot()
|
| /linux-6.15/drivers/infiniband/core/ |
| H A D | umem_odp.c | 396 current_seq = range.notifier_seq = in ib_umem_odp_map_dma_and_lock()
|
| /linux-6.15/drivers/accel/amdxdna/ |
| H A D | aie2_ctx.c | 779 range.notifier_seq = mmu_interval_read_begin(&abo->mem.notifier); in aie2_populate_range() 796 if (mmu_interval_read_retry(&abo->mem.notifier, range.notifier_seq)) { in aie2_populate_range()
|
| /linux-6.15/mm/ |
| H A D | hmm.c | 596 range->notifier_seq)) in hmm_range_fault()
|
| /linux-6.15/Documentation/mm/ |
| H A D | hmm.rst | 182 range.notifier_seq = mmu_interval_read_begin(&interval_sub); 194 if (mmu_interval_read_retry(&ni, range.notifier_seq) {
|