| /f-stack/app/redis-5.0.5/deps/jemalloc/include/jemalloc/internal/ |
| H A D | prof_inlines_b.h | 53 arena_prof_tctx_set(tsdn, ptr, usize, alloc_ctx, tctx); in prof_tctx_set() 84 if (likely(tdata->bytes_until_sample >= usize)) { in prof_sample_accum_update() 86 tdata->bytes_until_sample -= usize; in prof_sample_accum_update() 107 assert(usize == sz_s2u(usize)); in prof_alloc_prep() 126 assert(usize == isalloc(tsdn, ptr)); in prof_malloc() 129 prof_malloc_sample_object(tsdn, ptr, usize, tctx); in prof_malloc() 131 prof_tctx_set(tsdn, ptr, usize, alloc_ctx, in prof_malloc() 146 assert(usize == isalloc(tsd_tsdn(tsd), ptr)); in prof_realloc() 167 prof_tctx_set(tsd_tsdn(tsd), ptr, usize, NULL, in prof_realloc() 199 assert(usize == isalloc(tsd_tsdn(tsd), ptr)); in prof_free() [all …]
|
| H A D | sz.h | 120 return usize; in sz_psz2u() 195 return usize; in sz_index2size_compute() 232 return usize; in sz_s2u_compute() 263 size_t usize; in sz_sa2u() local 284 if (usize < LARGE_MINCLASS) { in sz_sa2u() 285 return usize; in sz_sa2u() 297 usize = LARGE_MINCLASS; in sz_sa2u() 299 usize = sz_s2u(size); in sz_sa2u() 300 if (usize < size) { in sz_sa2u() 310 if (usize + sz_large_pad + PAGE_CEILING(alignment) - PAGE < usize) { in sz_sa2u() [all …]
|
| H A D | jemalloc_internal_inlines_c.h | 71 assert(usize != 0); in ipallocztm() 72 assert(usize == sz_sa2u(usize, alignment)); in ipallocztm() 93 ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero) { in ipalloc() argument 94 return ipallocztm(tsd_tsdn(tsd), usize, alignment, zero, in ipalloc() 141 size_t usize, copysize; in iralloct_realign() local 143 usize = sz_sa2u(size + extra, alignment); in iralloct_realign() 144 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) { in iralloct_realign() 147 p = ipalloct(tsdn, usize, alignment, zero, tcache, arena); in iralloct_realign() 153 usize = sz_sa2u(size, alignment); in iralloct_realign() 154 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) { in iralloct_realign() [all …]
|
| H A D | tcache_inlines.h | 47 size_t usize JEMALLOC_CC_SILENCE_INIT(0); in tcache_alloc_small() 73 usize = sz_index2size(binind); in tcache_alloc_small() 83 memset(ret, 0, usize); in tcache_alloc_small() 90 memset(ret, 0, usize); in tcache_alloc_small() 97 tcache->prof_accumbytes += usize; in tcache_alloc_small() 134 usize = sz_index2size(binind); in tcache_alloc_large() 135 assert(usize <= tcache_maxclass); in tcache_alloc_large() 142 usize); in tcache_alloc_large() 144 memset(ret, 0, usize); in tcache_alloc_large() 148 memset(ret, 0, usize); in tcache_alloc_large() [all …]
|
| H A D | prof_inlines_a.h | 45 prof_accum_cancel(tsdn_t *tsdn, prof_accum_t *prof_accum, size_t usize) { in prof_accum_cancel() argument 58 a1 = (a0 >= LARGE_MINCLASS - usize) ? a0 - (LARGE_MINCLASS - in prof_accum_cancel() 59 usize) : 0; in prof_accum_cancel() 65 a1 = (a0 >= LARGE_MINCLASS - usize) ? a0 - (LARGE_MINCLASS - usize) : in prof_accum_cancel()
|
| H A D | large_externs.h | 4 void *large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero); 5 void *large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, 9 void *large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize,
|
| H A D | arena_externs.h | 32 size_t usize, size_t alignment, bool *zero); 57 void *arena_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, 59 void arena_prof_promote(tsdn_t *tsdn, const void *ptr, size_t usize);
|
| H A D | prof_externs.h | 46 void prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize, 48 void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx);
|
| H A D | arena_inlines_b.h | 31 arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, UNUSED size_t usize, in arena_prof_tctx_set() argument
|
| /f-stack/app/redis-5.0.5/deps/jemalloc/src/ |
| H A D | large.c | 15 assert(usize == sz_s2u(usize)); in large_malloc() 30 ausize = sz_sa2u(usize, alignment); in large_palloc() 102 assert(oldusize > usize); in large_ralloc_no_move_shrink() 111 &extent_hooks, extent, usize + sz_large_pad, in large_ralloc_no_move_shrink() 136 size_t trailsize = usize - oldusize; in large_ralloc_no_move_expand() 181 szind_t szind = sz_size2index(usize); in large_ralloc_no_move_expand() 210 JEMALLOC_ALLOC_JUNK, usize - oldusize); in large_ralloc_no_move_expand() 267 return large_malloc(tsdn, arena, usize, zero); in large_ralloc_move_helper() 278 assert(usize > 0 && usize <= LARGE_MAXCLASS); in large_ralloc() 283 if (!large_ralloc_no_move(tsdn, extent, usize, usize, zero)) { in large_ralloc() [all …]
|
| H A D | jemalloc.c | 1838 assert(usize > 0 && usize <= LARGE_MAXCLASS); in imalloc_body() 1842 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) { in imalloc_body() 1880 sopts, dopts, tsd, usize, usize, ind); in imalloc_body() 2210 size_t usize; in ifree() local 2331 ret = unlikely(usize == 0 || usize > LARGE_MAXCLASS) ? in je_realloc() 2664 size_t usize; in je_rallocx() local 2711 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) { in je_rallocx() 2755 size_t usize; in ixallocx_helper() local 2762 return usize; in ixallocx_helper() 2768 size_t usize; in ixallocx_prof_sample() local [all …]
|
| H A D | arena.c | 295 usize = LARGE_MINCLASS; in arena_large_malloc_stats_update() 311 usize = LARGE_MINCLASS; in arena_large_dalloc_stats_update() 322 size_t usize) { in arena_large_ralloc_stats_update() argument 979 size_t usize; in arena_reset() local 1320 size_t usize; in arena_malloc_small() local 1363 memset(ret, 0, usize); in arena_malloc_small() 1396 ret = arena_malloc(tsdn, arena, usize, sz_size2index(usize), in arena_palloc() 1621 return arena_malloc(tsdn, arena, usize, sz_size2index(usize), in arena_ralloc_move_helper() 1624 usize = sz_sa2u(usize, alignment); in arena_ralloc_move_helper() 1625 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) { in arena_ralloc_move_helper() [all …]
|
| H A D | ckh.c | 274 size_t usize; in ckh_grow() local 277 usize = sz_sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); in ckh_grow() 278 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) { in ckh_grow() 282 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, in ckh_grow() 313 size_t usize; in ckh_shrink() local 322 usize = sz_sa2u(sizeof(ckhc_t) << lg_curcells, CACHELINE); in ckh_shrink() 323 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) { in ckh_shrink() 362 size_t mincells, usize; in ckh_new() local 398 usize = sz_sa2u(sizeof(ckhc_t) << lg_mincells, CACHELINE); in ckh_new() 399 if (unlikely(usize == 0 || usize > LARGE_MAXCLASS)) { in ckh_new() [all …]
|
| H A D | base.c | 252 size_t usize = ALIGNMENT_CEILING(size, alignment); in base_block_alloc() local 264 + usize)); in base_block_alloc() 424 size_t usize = ALIGNMENT_CEILING(size, alignment); in base_alloc_impl() local 425 size_t asize = usize + alignment - QUANTUM; in base_alloc_impl() 438 extent = base_extent_alloc(tsdn, base, usize, alignment); in base_alloc_impl() 446 ret = base_extent_bump_alloc(base, extent, usize, alignment); in base_alloc_impl()
|
| H A D | prof.c | 241 prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize, in prof_malloc_sample_object() argument 243 prof_tctx_set(tsdn, ptr, usize, NULL, tctx); in prof_malloc_sample_object() 247 tctx->cnts.curbytes += usize; in prof_malloc_sample_object() 250 tctx->cnts.accumbytes += usize; in prof_malloc_sample_object() 257 prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx) { in prof_free_sampled_object() argument 260 assert(tctx->cnts.curbytes >= usize); in prof_free_sampled_object() 262 tctx->cnts.curbytes -= usize; in prof_free_sampled_object()
|
| H A D | extent.c | 111 size_t usize, size_t pad, size_t alignment, bool slab, szind_t szind,
|
| /f-stack/app/redis-5.0.5/deps/jemalloc/test/unit/ |
| H A D | junk.c | 33 large_dalloc_junk_intercept(void *ptr, size_t usize) { in large_dalloc_junk_intercept() argument 36 large_dalloc_junk_orig(ptr, usize); in large_dalloc_junk_intercept() 37 for (i = 0; i < usize; i++) { in large_dalloc_junk_intercept() 40 i, usize); in large_dalloc_junk_intercept() 48 large_dalloc_maybe_junk_intercept(void *ptr, size_t usize) { in large_dalloc_maybe_junk_intercept() argument 49 large_dalloc_maybe_junk_orig(ptr, usize); in large_dalloc_maybe_junk_intercept()
|
| /f-stack/app/redis-5.0.5/deps/jemalloc/test/integration/ |
| H A D | allocated.c | 17 size_t sz, usize; in thd_start() local 73 usize = malloc_usable_size(p); in thd_start() 74 assert_u64_le(a0 + usize, a1, in thd_start() 90 assert_u64_le(d0 + usize, d1, in thd_start()
|
| /f-stack/freebsd/contrib/openzfs/tests/zfs-tests/tests/functional/mount/ |
| H A D | umount_unlinked_drain.ksh | 52 usize=$(($nunlinks - $nunlinked)) 53 if [[ $iters == $MAX_ITERS && $usize == $1 ]]; then 56 if [[ $usize == $last_usize ]]; then 61 last_usize=$usize
|
| /f-stack/freebsd/crypto/ccp/ |
| H A D | ccp_hardware.c | 1404 enum ccp_xts_unitsize usize; in ccp_do_xts() local 1413 usize = ccp_xts_unitsize_map[i].cxu_id; in ccp_do_xts() 1439 desc->aes_xts.size = usize; in ccp_do_xts()
|
| /f-stack/freebsd/contrib/dev/ath/ath_hal/ar9300/ |
| H A D | ar9300_eeprom.c | 4019 usize = -1; in ar9300_eeprom_restore_internal_address() 4020 if (usize != mdata_size) { in ar9300_eeprom_restore_internal_address() 4023 __func__, usize, mdata_size); in ar9300_eeprom_restore_internal_address()
|