Lines Matching refs:object

141     vm_object_t object, vm_pindex_t pindex, vm_size_t size, int flags);
152 ((e)->object.vm_object != NULL && (e)->object.vm_object->cred != NULL && \
541 vm_object_t object; in vm_map_entry_set_vnode_text() local
549 object = entry->object.vm_object; in vm_map_entry_set_vnode_text()
550 KASSERT(object != NULL, ("No object for text, entry %p", entry)); in vm_map_entry_set_vnode_text()
551 if ((object->flags & OBJ_ANON) != 0) in vm_map_entry_set_vnode_text()
552 object = object->handle; in vm_map_entry_set_vnode_text()
554 KASSERT(object->backing_object == NULL, in vm_map_entry_set_vnode_text()
555 ("non-anon object %p shadows", object)); in vm_map_entry_set_vnode_text()
556 KASSERT(object != NULL, ("No content object for text, entry %p obj %p", in vm_map_entry_set_vnode_text()
557 entry, entry->object.vm_object)); in vm_map_entry_set_vnode_text()
566 if (object->type == OBJT_DEAD) { in vm_map_entry_set_vnode_text()
571 } else if (object->type == OBJT_VNODE) { in vm_map_entry_set_vnode_text()
572 vp = object->handle; in vm_map_entry_set_vnode_text()
573 } else if (object->type == OBJT_SWAP) { in vm_map_entry_set_vnode_text()
574 KASSERT((object->flags & OBJ_TMPFS_NODE) != 0, in vm_map_entry_set_vnode_text()
576 "entry %p, object %p, add %d", entry, object, add)); in vm_map_entry_set_vnode_text()
582 VM_OBJECT_RLOCK(object); in vm_map_entry_set_vnode_text()
583 if ((object->flags & OBJ_TMPFS) != 0) { in vm_map_entry_set_vnode_text()
584 vp = object->un_pager.swp.swp_tmpfs; in vm_map_entry_set_vnode_text()
590 VM_OBJECT_RUNLOCK(object); in vm_map_entry_set_vnode_text()
594 "entry %p, object %p, add %d", entry, object, add)); in vm_map_entry_set_vnode_text()
620 vm_object_t object; in vm_map_process_deferred() local
637 object = entry->object.vm_object; in vm_map_process_deferred()
638 KASSERT(object != NULL, ("No object for writecount")); in vm_map_process_deferred()
639 vm_pager_release_writecount(object, entry->start, in vm_map_process_deferred()
1647 vm_map_insert(vm_map_t map, vm_object_t object, vm_ooffset_t offset, in vm_map_insert() argument
1658 KASSERT(object != kernel_object || in vm_map_insert()
1661 KASSERT(object == NULL || (cow & MAP_NOFAULT) == 0 || in vm_map_insert()
1664 object, cow)); in vm_map_insert()
1692 if ((cow & MAP_CREATE_GUARD) != 0 && (object != NULL || in vm_map_insert()
1739 ((protoeflags & MAP_ENTRY_NEEDS_COPY) || object == NULL))) { in vm_map_insert()
1742 KASSERT(object == NULL || in vm_map_insert()
1744 object->cred == NULL, in vm_map_insert()
1745 ("overcommit: vm_map_insert o %p", object)); in vm_map_insert()
1753 if (object != NULL) { in vm_map_insert()
1761 if ((object->flags & OBJ_ANON) != 0) { in vm_map_insert()
1762 VM_OBJECT_WLOCK(object); in vm_map_insert()
1763 if (object->ref_count > 1 || object->shadow_count != 0) in vm_map_insert()
1764 vm_object_clear_flag(object, OBJ_ONEMAPPING); in vm_map_insert()
1765 VM_OBJECT_WUNLOCK(object); in vm_map_insert()
1772 (prev_entry->object.vm_object != NULL && in vm_map_insert()
1773 prev_entry->object.vm_object->cred == cred)) && in vm_map_insert()
1774 vm_object_coalesce(prev_entry->object.vm_object, in vm_map_insert()
1805 object = prev_entry->object.vm_object; in vm_map_insert()
1808 vm_object_reference(object); in vm_map_insert()
1809 if (cred != NULL && object != NULL && object->cred != NULL && in vm_map_insert()
1827 new_entry->object.vm_object = object; in vm_map_insert()
1859 vm_map_pmap_enter(map, start, prot, object, OFF_TO_IDX(offset), in vm_map_insert()
1976 vm_map_fixed(vm_map_t map, vm_object_t object, vm_ooffset_t offset, in vm_map_fixed() argument
1985 object == NULL, in vm_map_fixed()
1998 result = vm_map_insert(map, object, offset, start, end, in vm_map_fixed()
2047 vm_map_alignspace(vm_map_t map, vm_object_t object, vm_ooffset_t offset, in vm_map_alignspace() argument
2064 pmap_align_superpage(object, offset, addr, length); in vm_map_alignspace()
2124 vm_map_find(vm_map_t map, vm_object_t object, vm_ooffset_t offset, in vm_map_find() argument
2134 object == NULL, in vm_map_find()
2138 if (find_space == VMFS_OPTIMAL_SPACE && (object == NULL || in vm_map_find()
2139 (object->flags & OBJ_COLORED) == 0)) in vm_map_find()
2149 find_space != VMFS_NO_SPACE && object == NULL && in vm_map_find()
2235 (rv = vm_map_alignspace(map, object, offset, addr, length, in vm_map_find()
2259 rv = vm_map_insert(map, object, offset, *addr, *addr + length, in vm_map_find()
2282 vm_map_find_min(vm_map_t map, vm_object_t object, vm_ooffset_t offset, in vm_map_find_min() argument
2292 rv = vm_map_find(map, object, offset, addr, length, max_addr, in vm_map_find_min()
2316 prev->object.vm_object == entry->object.vm_object && in vm_map_mergeable_neighbors()
2317 (prev->object.vm_object == NULL || in vm_map_mergeable_neighbors()
2341 if (entry->object.vm_object != NULL) in vm_map_merged_neighbor_dispose()
2342 vm_object_deallocate(entry->object.vm_object); in vm_map_merged_neighbor_dispose()
2378 vm_object_t object; in vm_map_entry_back() local
2380 KASSERT(entry->object.vm_object == NULL, in vm_map_entry_back()
2384 object = vm_object_allocate_anon(atop(entry->end - entry->start), NULL, in vm_map_entry_back()
2386 entry->object.vm_object = object; in vm_map_entry_back()
2404 if (entry->object.vm_object == NULL && !map->system_map && in vm_map_entry_charge_object()
2407 else if (entry->object.vm_object != NULL && in vm_map_entry_charge_object()
2410 VM_OBJECT_WLOCK(entry->object.vm_object); in vm_map_entry_charge_object()
2411 KASSERT(entry->object.vm_object->cred == NULL, in vm_map_entry_charge_object()
2413 entry->object.vm_object->cred = entry->cred; in vm_map_entry_charge_object()
2414 entry->object.vm_object->charge = entry->end - entry->start; in vm_map_entry_charge_object()
2415 VM_OBJECT_WUNLOCK(entry->object.vm_object); in vm_map_entry_charge_object()
2444 vm_object_reference(new_entry->object.vm_object); in vm_map_entry_clone()
2612 entry->object.vm_object == NULL) { in vm_map_submap()
2619 entry->object.sub_map = submap; in vm_map_submap()
2653 vm_object_t object, vm_pindex_t pindex, vm_size_t size, int flags) in vm_map_pmap_enter() argument
2659 if ((prot & (VM_PROT_READ | VM_PROT_EXECUTE)) == 0 || object == NULL) in vm_map_pmap_enter()
2661 if (object->type == OBJT_DEVICE || object->type == OBJT_SG) { in vm_map_pmap_enter()
2662 VM_OBJECT_WLOCK(object); in vm_map_pmap_enter()
2663 if (object->type == OBJT_DEVICE || object->type == OBJT_SG) { in vm_map_pmap_enter()
2664 pmap_object_init_pt(map->pmap, addr, object, pindex, in vm_map_pmap_enter()
2666 VM_OBJECT_WUNLOCK(object); in vm_map_pmap_enter()
2669 VM_OBJECT_LOCK_DOWNGRADE(object); in vm_map_pmap_enter()
2671 VM_OBJECT_RLOCK(object); in vm_map_pmap_enter()
2674 if (psize + pindex > object->size) { in vm_map_pmap_enter()
2675 if (pindex >= object->size) { in vm_map_pmap_enter()
2676 VM_OBJECT_RUNLOCK(object); in vm_map_pmap_enter()
2679 psize = object->size - pindex; in vm_map_pmap_enter()
2686 p = vm_page_find_least(object, pindex); in vm_map_pmap_enter()
2730 VM_OBJECT_RUNLOCK(object); in vm_map_pmap_enter()
2846 obj = entry->object.vm_object; in vm_map_protect()
3078 entry->object.vm_object != NULL && in vm_map_madvise()
3079 entry->object.vm_object->backing_object != NULL) in vm_map_madvise()
3113 vm_object_madvise(entry->object.vm_object, pstart, in vm_map_madvise()
3126 entry->object.vm_object, in vm_map_madvise()
3435 vm_object_unwire(entry->object.vm_object, entry->offset, in vm_map_wire_entry_failure()
3723 vm_object_t object; in vm_map_sync() local
3783 smap = entry->object.sub_map; in vm_map_sync()
3789 object = tentry->object.vm_object; in vm_map_sync()
3793 object = entry->object.vm_object; in vm_map_sync()
3795 vm_object_reference(object); in vm_map_sync()
3798 if (!vm_object_sync(object, offset, size, syncio, invalidate)) in vm_map_sync()
3801 vm_object_deallocate(object); in vm_map_sync()
3833 vm_object_unwire(entry->object.vm_object, entry->offset, size, in vm_map_entry_unwire()
3843 vm_object_deallocate(entry->object.vm_object); in vm_map_entry_deallocate()
3855 vm_object_t object; in vm_map_entry_delete() local
3860 object = entry->object.vm_object; in vm_map_entry_delete()
3865 MPASS(object == NULL); in vm_map_entry_delete()
3878 if ((entry->eflags & MAP_ENTRY_IS_SUB_MAP) != 0 || object == NULL) { in vm_map_entry_delete()
3879 entry->object.vm_object = NULL; in vm_map_entry_delete()
3880 } else if ((object->flags & OBJ_ANON) != 0 || in vm_map_entry_delete()
3881 object == kernel_object) { in vm_map_entry_delete()
3882 KASSERT(entry->cred == NULL || object->cred == NULL || in vm_map_entry_delete()
3887 VM_OBJECT_WLOCK(object); in vm_map_entry_delete()
3888 if (object->ref_count != 1 && in vm_map_entry_delete()
3889 ((object->flags & OBJ_ONEMAPPING) != 0 || in vm_map_entry_delete()
3890 object == kernel_object)) { in vm_map_entry_delete()
3891 vm_object_collapse(object); in vm_map_entry_delete()
3899 vm_object_page_remove(object, offidxstart, offidxend, in vm_map_entry_delete()
3901 if (offidxend >= object->size && in vm_map_entry_delete()
3902 offidxstart < object->size) { in vm_map_entry_delete()
3903 size1 = object->size; in vm_map_entry_delete()
3904 object->size = offidxstart; in vm_map_entry_delete()
3905 if (object->cred != NULL) { in vm_map_entry_delete()
3906 size1 -= object->size; in vm_map_entry_delete()
3907 KASSERT(object->charge >= ptoa(size1), in vm_map_entry_delete()
3908 ("object %p charge < 0", object)); in vm_map_entry_delete()
3910 object->cred); in vm_map_entry_delete()
3911 object->charge -= ptoa(size1); in vm_map_entry_delete()
3915 VM_OBJECT_WUNLOCK(object); in vm_map_entry_delete()
4001 entry->object.vm_object != NULL) in vm_map_delete()
4095 src_object = src_entry->object.vm_object; in vm_map_copy_swap_object()
4102 src_object = src_entry->object.vm_object; in vm_map_copy_swap_object()
4117 dst_entry->object.vm_object = src_object; in vm_map_copy_swap_object()
4172 if ((src_object = src_entry->object.vm_object) != NULL) { in vm_map_copy_entry()
4178 src_object = src_entry->object.vm_object; in vm_map_copy_entry()
4181 dst_entry->object.vm_object = src_object; in vm_map_copy_entry()
4203 fake_entry->object.vm_object = src_object; in vm_map_copy_entry()
4215 dst_entry->object.vm_object = NULL; in vm_map_copy_entry()
4283 vm_object_t object; in vmspace_fork() local
4335 object = old_entry->object.vm_object; in vmspace_fork()
4336 if (object == NULL) { in vmspace_fork()
4338 object = old_entry->object.vm_object; in vmspace_fork()
4345 vm_object_reference(object); in vmspace_fork()
4347 vm_object_shadow(&old_entry->object.vm_object, in vmspace_fork()
4361 vm_object_deallocate(object); in vmspace_fork()
4362 object = old_entry->object.vm_object; in vmspace_fork()
4364 VM_OBJECT_WLOCK(object); in vmspace_fork()
4365 vm_object_clear_flag(object, OBJ_ONEMAPPING); in vmspace_fork()
4367 KASSERT(object->cred == NULL, in vmspace_fork()
4369 object->cred = old_entry->cred; in vmspace_fork()
4370 object->charge = old_entry->end - in vmspace_fork()
4382 object->type == OBJT_VNODE) { in vmspace_fork()
4383 KASSERT(((struct vnode *)object-> in vmspace_fork()
4386 object)); in vmspace_fork()
4387 KASSERT(object->un_pager.vnp. in vmspace_fork()
4390 object)); in vmspace_fork()
4392 VM_OBJECT_WUNLOCK(object); in vmspace_fork()
4405 vm_pager_update_writecount(object, in vmspace_fork()
4439 new_entry->object.vm_object = NULL; in vmspace_fork()
4800 if (cred == NULL && stack_entry->object.vm_object != NULL) in vm_map_growstack()
4801 cred = stack_entry->object.vm_object->cred; in vm_map_growstack()
4805 else if (stack_entry->object.vm_object == NULL || in vm_map_growstack()
4806 vm_object_coalesce(stack_entry->object.vm_object, in vm_map_growstack()
4945 vm_object_t *object, /* OUT */ in vm_map_lookup() argument
4979 *var_map = map = entry->object.sub_map; in vm_map_lookup()
5058 eobject = entry->object.vm_object; in vm_map_lookup()
5059 vm_object_shadow(&entry->object.vm_object, in vm_map_lookup()
5061 if (eobject == entry->object.vm_object) { in vm_map_lookup()
5084 if (entry->object.vm_object == NULL && !map->system_map) { in vm_map_lookup()
5087 entry->object.vm_object = vm_object_allocate_anon(atop(size), in vm_map_lookup()
5099 *object = entry->object.vm_object; in vm_map_lookup()
5116 vm_object_t *object, /* OUT */ in vm_map_lookup_locked() argument
5172 if (entry->object.vm_object == NULL && !map->system_map) in vm_map_lookup_locked()
5180 *object = entry->object.vm_object; in vm_map_lookup_locked()
5330 (void *)entry->object.sub_map, in vm_map_print()
5333 prev->object.sub_map != in vm_map_print()
5334 entry->object.sub_map) { in vm_map_print()
5336 vm_map_print((vm_map_t)entry->object.sub_map); in vm_map_print()
5343 (void *)entry->object.vm_object, in vm_map_print()
5345 if (entry->object.vm_object && entry->object.vm_object->cred) in vm_map_print()
5347 entry->object.vm_object->cred->cr_ruid, in vm_map_print()
5348 (uintmax_t)entry->object.vm_object->charge); in vm_map_print()
5355 prev->object.vm_object != in vm_map_print()
5356 entry->object.vm_object) { in vm_map_print()
5359 entry->object.vm_object, in vm_map_print()