Searched refs:atomic_load_int (Results 1 – 25 of 31) sorted by relevance
12
72 return (atomic_load_int(count)); in refcount_load()106 old = atomic_load_int(count); in refcount_acquire_checked()125 old = atomic_load_int(count); in refcount_acquire_if_gt()184 old = atomic_load_int(count); \
49 return (_BLOCKCOUNT_COUNT(atomic_load_int(&count->__count))); in blockcount_read()
42 #define atomic_load_int(p) (*(volatile u_int *)(p)) macro
94 return (atomic_load_int(&s->s_wr.seq)); in smr_shared_current()
176 #define atomic_load_int kcsan_atomic_load_int macro
90 #define pte_load(ptep) atomic_load_int(ptep)
376 c_seq = atomic_load_int(&c->c_seq); in smr_poll_cpu()456 s_rd_seq = atomic_load_int(&s->s_rd_seq); in smr_poll_scan()
367 kstacks_val = atomic_load_int(&tty_info_kstacks); in tty_info()
1564 aseq = atomic_load_int(&pps->ppsinfo.assert_sequence); in pps_fetch()1565 cseq = atomic_load_int(&pps->ppsinfo.clear_sequence); in pps_fetch()1566 while (aseq == atomic_load_int(&pps->ppsinfo.assert_sequence) && in pps_fetch()1567 cseq == atomic_load_int(&pps->ppsinfo.clear_sequence)) { in pps_fetch()
359 atomic_load_int(&bf->flag) != 0; in kern_jail()695 atomic_load_int(&bf->flag) != 0; in kern_jail_set()2216 atomic_load_int(&bf->flag) != 0; in kern_jail_get()4095 atomic_load_int(&bf->flag) != 0; in prison_add_allow()4122 if (atomic_load_int(&bf->flag) == 0) in prison_add_allow()4432 atomic_load_int(&bf->flag) != 0; in db_show_prison()
1141 while (atomic_load_int(&pcpu->influx)) in rms_wait_func()
3446 type = atomic_load_int(&type); in vn_fullpath_hardlink()3669 lookup_flag = atomic_load_int(&cache_fast_lookup); in cache_fast_lookup_enabled_recalc()3682 old = atomic_load_int(&cache_fast_lookup); in syscal_vfs_cache_fast_lookup()3684 if (error == 0 && req->newptr && old != atomic_load_int(&cache_fast_lookup)) in syscal_vfs_cache_fast_lookup()
281 while (atomic_load_int(&sfio->nios) != 1) in sendfile_iowait()
1263 (atomic_load_int(&ie->ie_hflags) & IH_NET) != 0; in ithread_loop()
694 cticks = atomic_load_int(&ticks); in thread_reap_callout_cb()
2488 if (atomic_load_int(&job->nbio) == 0) { in aio_biowakeup()2489 if (atomic_load_int(&job->error)) in aio_biowakeup()
249 while (!atomic_load_int(&aps_ready)) in init_secondary()483 naps = atomic_load_int(&aps_started); in start_cpu()510 while (atomic_load_int(&aps_started) < naps + 1) in start_cpu()
137 cpuid = atomic_load_int(&td->td_oncpu); in stack_save_td()
239 KASSERT(atomic_load_int(state) == STATE_SLEEPING, in acpi_cpu_idle_mwait()250 if (atomic_load_int(state) == STATE_MWAIT) in acpi_cpu_idle_mwait()564 if (atomic_load_int(state) == STATE_MWAIT) in cpu_idle_mwait()654 switch (atomic_load_int(state)) { in cpu_idle_wakeup()
1490 if (atomic_load_int(&mb->stop_state) == in cpustop_handler()
480 else if (atomic_load_int(&frag6_nfrags) >= (u_int)ip6_maxfrags) in frag6_input()535 atomic_load_int(&V_frag6_nfragpackets) >= in frag6_input()935 atomic_load_int(&V_frag6_nfragpackets) > in frag6_slowtimo()
732 while (atomic_load_int(p_cpudone) != generation) in smp_targeted_tlb_shootdown()1105 if (atomic_load_int(&scoreboard[initiator_cpu_id]) == 0) in invlop_handler()
705 #define vm_page_busy_fetch(m) atomic_load_int(&(m)->busy_lock)
2048 last = atomic_load_int(&lowmem_ticks); in vm_pageout_lowmem()
205 (tmpmax >= 0 && atomic_load_int(&nfrags) >= (u_int)tmpmax)) { in ip_reass()