Searched refs:ATOMIC_ACQUIRE (Results 1 – 12 of 12) sorted by relevance
| /f-stack/app/redis-5.0.5/deps/jemalloc/src/ |
| H A D | extent_dss.c | 58 ret = (dss_prec_t)atomic_load_u(&dss_prec_default, ATOMIC_ACQUIRE); in extent_dss_prec_get() 132 if (!atomic_load_b(&dss_exhausted, ATOMIC_ACQUIRE)) { in extent_alloc_dss() 241 ATOMIC_ACQUIRE)); in extent_in_dss() 255 max = atomic_load_p(&dss_max, ATOMIC_ACQUIRE); in extent_dss_mergeable()
|
| H A D | log.c | 55 if (!atomic_load_b(&log_init_done, ATOMIC_ACQUIRE)) { in log_var_update_state()
|
| H A D | rtree.c | 173 ATOMIC_ACQUIRE); in rtree_child_node_tryread() 202 ATOMIC_ACQUIRE); in rtree_child_leaf_tryread()
|
| H A D | base.c | 410 ATOMIC_ACQUIRE); in base_extent_hooks_get()
|
| H A D | arena.c | 1674 return (dss_prec_t)atomic_load_u(&arena->dss_prec, ATOMIC_ACQUIRE); in arena_dss_prec_get()
|
| H A D | jemalloc.c | 307 return atomic_load_u(&narenas_total, ATOMIC_ACQUIRE); in narenas_total_get()
|
| /f-stack/app/redis-5.0.5/deps/jemalloc/include/jemalloc/internal/ |
| H A D | background_thread_inlines.h | 24 assert(atomic_load_b(&info->indefinite_sleep, ATOMIC_ACQUIRE) == in background_thread_wakeup_time_get() 40 return atomic_load_b(&info->indefinite_sleep, ATOMIC_ACQUIRE); in background_thread_indefinite_sleep()
|
| H A D | atomic.h | 42 #define ATOMIC_ACQUIRE atomic_memory_order_acquire macro
|
| H A D | rtree.h | 176 ? ATOMIC_RELAXED : ATOMIC_ACQUIRE); in rtree_leaf_elm_bits_read() 218 ? ATOMIC_RELAXED : ATOMIC_ACQUIRE); in rtree_leaf_elm_extent_read() 231 : ATOMIC_ACQUIRE); in rtree_leaf_elm_szind_read() 243 ATOMIC_ACQUIRE); in rtree_leaf_elm_slab_read()
|
| H A D | jemalloc_internal_inlines_a.h | 88 ret = (arena_t *)atomic_load_p(&arenas[ind], ATOMIC_ACQUIRE); in arena_get()
|
| H A D | extent_inlines.h | 49 return (arena_t *)atomic_load_p(&arenas[arena_ind], ATOMIC_ACQUIRE); in extent_arena_get() 176 ATOMIC_ACQUIRE); in extent_prof_tctx_get()
|
| /f-stack/app/redis-5.0.5/deps/jemalloc/test/unit/ |
| H A D | retained.c | 75 while ((cur_epoch = atomic_load_u(&epoch, ATOMIC_ACQUIRE)) != in thd_start() 121 while (atomic_load_u(&nfinished, ATOMIC_ACQUIRE) < nthreads) { in TEST_BEGIN()
|