| /f-stack/app/redis-5.0.5/deps/jemalloc/src/ |
| H A D | tcache.c | 127 if (arena_prof_accum(tsd_tsdn(tsd), arena, in tcache_bin_flush_small() 129 prof_idump(tsd_tsdn(tsd)); in tcache_bin_flush_small() 134 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small() 149 arena_dalloc_bin_junked_locked(tsd_tsdn(tsd), in tcache_bin_flush_small() 163 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small() 173 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small() 177 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small() 248 large_dalloc_finish(tsd_tsdn(tsd), extent); in tcache_bin_flush_large() 262 prof_idump(tsd_tsdn(tsd)); in tcache_bin_flush_large() 416 arena = arena_get(tsd_tsdn(tsd), 0, false); in tsd_tcache_data_init() [all …]
|
| H A D | jemalloc.c | 556 ret = arena_get(tsd_tsdn(tsd), in arena_choose_hard() 568 malloc_mutex_unlock(tsd_tsdn(tsd), in arena_choose_hard() 585 tsd_tsdn(tsd), choose[j]); in arena_choose_hard() 2340 tsdn = tsd_tsdn(tsd); in je_realloc() 3213 ctl_prefork(tsd_tsdn(tsd)); in jemalloc_prefork() 3214 tcache_prefork(tsd_tsdn(tsd)); in jemalloc_prefork() 3219 prof_prefork0(tsd_tsdn(tsd)); in jemalloc_prefork() 3258 prof_prefork1(tsd_tsdn(tsd)); in jemalloc_prefork() 3296 ctl_postfork_parent(tsd_tsdn(tsd)); in jemalloc_postfork_parent() 3317 prof_postfork_child(tsd_tsdn(tsd)); in jemalloc_postfork_child() [all …]
|
| H A D | prof.c | 289 malloc_mutex_lock(tsd_tsdn(tsd), &bt2gctx_mtx); in prof_enter() 310 prof_idump(tsd_tsdn(tsd)); in prof_leave() 313 prof_gdump(tsd_tsdn(tsd)); in prof_leave() 591 malloc_mutex_lock(tsd_tsdn(tsd), gctx->lock); in prof_gctx_try_destroy() 661 malloc_mutex_lock(tsd_tsdn(tsd), gctx->lock); in prof_tctx_destroy() 702 malloc_mutex_unlock(tsd_tsdn(tsd), gctx->lock); in prof_tctx_destroy() 800 malloc_mutex_lock(tsd_tsdn(tsd), tdata->lock); in prof_lookup() 848 malloc_mutex_lock(tsd_tsdn(tsd), gctx->lock); in prof_lookup() 1231 malloc_mutex_lock(tsd_tsdn(tsd), gctx->lock); in prof_gctx_finish() 1240 (void *)tsd_tsdn(tsd)); in prof_gctx_finish() [all …]
|
| H A D | background_thread.c | 315 malloc_mutex_assert_owner(tsd_tsdn(tsd), 318 malloc_mutex_assert_not_owner(tsd_tsdn(tsd), 323 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx); 333 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx); 393 tsdn_t *tsdn = tsd_tsdn(tsd); 454 background_work_sleep_once(tsd_tsdn(tsd), 488 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx); 548 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx); 561 malloc_mutex_lock(tsd_tsdn(tsd), &t0->mtx); 564 malloc_mutex_unlock(tsd_tsdn(tsd), &t0->mtx); [all …]
|
| H A D | ctl.c | 1001 tsdn_t *tsdn = tsd_tsdn(tsd); in ctl_init() 1502 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); in CTL_RO_NL_GEN() 1505 ctl_refresh(tsd_tsdn(tsd)); in CTL_RO_NL_GEN() 1524 background_thread_ctl_init(tsd_tsdn(tsd)); in background_thread_ctl() 1582 background_thread_ctl_init(tsd_tsdn(tsd)); in max_background_threads_ctl() 1738 tcache_arena_reassociate(tsd_tsdn(tsd), in CTL_RO_CONFIG_GEN() 1923 tsdn_t *tsdn = tsd_tsdn(tsd); in arena_i_initialized_ctl() 2489 extent = iealloc(tsd_tsdn(tsd), ptr); in arenas_lookup_ctl() 2552 oldval = prof_active_get(tsd_tsdn(tsd)); in prof_active_ctl() 2601 oldval = prof_gdump_get(tsd_tsdn(tsd)); in prof_gdump_ctl() [all …]
|
| H A D | ckh.c | 282 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, in ckh_grow() 295 idalloctm(tsd_tsdn(tsd), tab, NULL, NULL, true, true); in ckh_grow() 300 idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, NULL, true, true); in ckh_grow() 326 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, NULL, in ckh_shrink() 342 idalloctm(tsd_tsdn(tsd), tab, NULL, NULL, true, true); in ckh_shrink() 350 idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, NULL, true, true); in ckh_shrink() 403 ckh->tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, in ckh_new() 431 idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, NULL, true, true); in ckh_delete()
|
| H A D | arena.c | 990 assert(usize == isalloc(tsd_tsdn(tsd), ptr)); in arena_reset() 996 large_dalloc(tsd_tsdn(tsd), extent); in arena_reset() 1005 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock); in arena_reset() 1010 arena_slab_dalloc(tsd_tsdn(tsd), arena, slab); in arena_reset() 1011 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock); in arena_reset() 1016 arena_slab_dalloc(tsd_tsdn(tsd), arena, slab); in arena_reset() 1017 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock); in arena_reset() 1023 arena_slab_dalloc(tsd_tsdn(tsd), arena, slab); in arena_reset() 1024 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock); in arena_reset() 1070 arena_destroy_retained(tsd_tsdn(tsd), arena); in arena_destroy() [all …]
|
| H A D | extent.c | 206 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx); in extent_hooks_set() 210 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx); in extent_hooks_set() 1200 if (arena == arena_get(tsd_tsdn(tsd), 0, false)) { in extent_hook_pre_reentrancy()
|
| /f-stack/app/redis-5.0.5/deps/jemalloc/include/jemalloc/internal/ |
| H A D | jemalloc_internal_inlines_b.h | 17 return arena_get(tsd_tsdn(tsd), 0, true); in arena_choose_impl() 29 arena_get(tsd_tsdn(tsd), 0, false)); in arena_choose_impl() 31 tcache_arena_reassociate(tsd_tsdn(tsd), in arena_choose_impl() 35 tcache_arena_associate(tsd_tsdn(tsd), tcache, in arena_choose_impl() 49 tsd_tsdn(tsd))) { in arena_choose_impl() 55 ret->last_thd = tsd_tsdn(tsd); in arena_choose_impl()
|
| H A D | prof_inlines_b.h | 146 assert(usize == isalloc(tsd_tsdn(tsd), ptr)); in prof_realloc() 165 prof_malloc_sample_object(tsd_tsdn(tsd), ptr, usize, tctx); in prof_realloc() 167 prof_tctx_set(tsd_tsdn(tsd), ptr, usize, NULL, in prof_realloc() 176 prof_tctx_reset(tsd_tsdn(tsd), ptr, tctx); in prof_realloc() 178 assert((uintptr_t)prof_tctx_get(tsd_tsdn(tsd), ptr, NULL) == in prof_realloc() 196 prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), ptr, alloc_ctx); in prof_free() 199 assert(usize == isalloc(tsd_tsdn(tsd), ptr)); in prof_free()
|
| H A D | tcache_inlines.h | 60 ret = tcache_alloc_small_hard(tsd_tsdn(tsd), arena, tcache, in tcache_alloc_small() 74 assert(tcache_salloc(tsd_tsdn(tsd), ret) == usize); in tcache_alloc_small() 124 ret = large_malloc(tsd_tsdn(tsd), arena, sz_s2u(size), zero); in tcache_alloc_large() 169 assert(tcache_salloc(tsd_tsdn(tsd), ptr) <= SMALL_MAXCLASS); in tcache_dalloc_small() 194 assert(tcache_salloc(tsd_tsdn(tsd), ptr) > SMALL_MAXCLASS); in tcache_dalloc_large() 195 assert(tcache_salloc(tsd_tsdn(tsd), ptr) <= tcache_maxclass); in tcache_dalloc_large()
|
| H A D | arena_inlines_a.h | 44 arena_t *newarena = arena_get(tsd_tsdn(tsd), newind, true); in percpu_arena_update() 51 tcache_arena_reassociate(tsd_tsdn(tsd), tcache, in percpu_arena_update()
|
| H A D | jemalloc_internal_inlines_c.h | 62 return iallocztm(tsd_tsdn(tsd), size, ind, zero, tcache_get(tsd), false, in ialloc() 94 return ipallocztm(tsd_tsdn(tsd), usize, alignment, zero, in ipalloc() 123 idalloctm(tsd_tsdn(tsd), ptr, tcache_get(tsd), NULL, false, true); in idalloc() 197 return iralloct(tsd_tsdn(tsd), ptr, oldsize, size, alignment, zero, in iralloc()
|
| H A D | tsd.h | 137 tsd_tsdn(tsd_t *tsd) { in tsd_tsdn() function 305 return tsd_tsdn(tsd_fetch_impl(false, false)); in tsdn_fetch()
|
| H A D | jemalloc_internal_inlines_a.h | 151 assert(arena != arena_get(tsd_tsdn(tsd), 0, false)); in pre_reentrancy()
|
| /f-stack/app/redis-5.0.5/deps/jemalloc/test/unit/ |
| H A D | background_thread.c | 92 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx); in TEST_BEGIN() 96 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx); in TEST_BEGIN()
|
| H A D | base.c | 33 tsdn_t *tsdn = tsd_tsdn(tsd_fetch()); in TEST_BEGIN() 75 tsdn_t *tsdn = tsd_tsdn(tsd_fetch()); in TEST_BEGIN() 120 tsdn_t *tsdn = tsd_tsdn(tsd_fetch()); in TEST_BEGIN()
|