Searched refs:atomic_load_long (Results 1 – 12 of 12) sorted by relevance
247 old = atomic_load_long(p); in atomic_testandset_long()265 old = atomic_load_long(p); in atomic_testandclear_long()
43 #define atomic_load_long(p) (*(volatile u_long *)(p)) macro
204 #define atomic_load_long kcsan_atomic_load_long macro
713 return (atomic_load_long(&fp->f_offset)); in foffset_lock()721 return (atomic_load_long(&fp->f_offset)); in foffset_lock()744 res = atomic_load_long(&fp->f_offset); in foffset_lock()
1437 rfreevnodes = atomic_load_long(&freevnodes); in vnlru_under_unlocked()1471 rnumvnodes = atomic_load_long(&numvnodes); in vnlru_proc()1485 rnumvnodes = atomic_load_long(&numvnodes); in vnlru_proc()1656 rnumvnodes = atomic_load_long(&numvnodes); in vn_alloc_hard()1684 if (atomic_load_long(&numvnodes) + 1 > desiredvnodes && in vn_alloc_hard()
664 lnumcache = atomic_load_long(&numcache); in cache_alloc()1427 lnumneg = atomic_load_long(&numneg); in cache_neg_evict_cond()
2492 aio_complete(job, atomic_load_long(&job->nbytes), 0); in aio_biowakeup()
1279 if (atomic_load_long(&pmap->pm_eptgen) == vmx->eptgen[curcpu]) { in vmx_invvpid()2955 eptgen = atomic_load_long(&pmap->pm_eptgen); in vmx_pmap_activate()
629 if (*m_gen > atomic_load_long(invl_gen)) in pmap_delayed_invl_wait_block()893 mygen = atomic_load_long(&invl_gen->gen); in pmap_delayed_invl_finish_u_crit()1087 while (*m_gen > atomic_load_long(&pmap_invl_gen_head.gen)) { in pmap_delayed_invl_wait_u()1119 atomic_load_long(&pmap_invl_gen_head.gen)) { in pmap_delayed_invl_wait_u()
1856 eptgen = atomic_load_long(&pmap->pm_eptgen); in svm_pmap_activate()
5067 return (atomic_load_long(&uma_kmem_total));
5901 prev_addr = atomic_load_long(&pmap_trm_arena_last); in pmap_trm_import()