| /f-stack/app/nginx-1.16.1/src/core/ |
| H A D | ngx_slab.c | 120 slots[i].slab = 0; in ngx_slab_init() 142 pool->free.slab = 0; in ngx_slab_init() 146 page->slab = pages; in ngx_slab_init() 156 page->slab = pages; in ngx_slab_init() 477 slab = page->slab; in ngx_slab_free_locked() 552 if (slab & m) { in ngx_slab_free_locked() 592 if (slab & m) { in ngx_slab_free_locked() 689 page[pages].slab = page->slab - pages; in ngx_slab_alloc_pages() 741 page->slab = pages--; in ngx_slab_free_pages() 761 page->slab += join->slab; in ngx_slab_free_pages() [all …]
|
| H A D | ngx_slab.h | 19 uintptr_t slab; member
|
| /f-stack/app/redis-5.0.5/deps/jemalloc/include/jemalloc/internal/ |
| H A D | arena_inlines_b.h | 168 bool slab; in arena_dalloc_no_tcache() local 180 if (likely(slab)) { in arena_dalloc_no_tcache() 201 bool slab; in arena_dalloc() local 205 slab = alloc_ctx->slab; in arena_dalloc() 222 if (likely(slab)) { in arena_dalloc() 248 bool slab; in arena_sdalloc_no_tcache() local 277 if (likely(slab)) { in arena_sdalloc_no_tcache() 299 bool slab; in arena_sdalloc() local 309 &local_ctx.slab); in arena_sdalloc() 313 slab = alloc_ctx->slab; in arena_sdalloc() [all …]
|
| H A D | rtree.h | 281 rtree_leaf_elm_t *elm, bool slab) { in rtree_leaf_elm_slab_write() argument 290 atomic_store_b(&elm->le_slab, slab, ATOMIC_RELEASE); in rtree_leaf_elm_slab_write() 296 extent_t *extent, szind_t szind, bool slab) { in rtree_leaf_elm_write() argument 300 ((uintptr_t)slab); in rtree_leaf_elm_write() 303 rtree_leaf_elm_slab_write(tsdn, rtree, elm, slab); in rtree_leaf_elm_write() 315 rtree_leaf_elm_t *elm, szind_t szind, bool slab) { in rtree_leaf_elm_szind_slab_update() argument 316 assert(!slab || szind < NBINS); in rtree_leaf_elm_szind_slab_update() 322 rtree_leaf_elm_slab_write(tsdn, rtree, elm, slab); in rtree_leaf_elm_szind_slab_update() 387 extent_t *extent, szind_t szind, bool slab) { in rtree_write() argument 476 uintptr_t key, szind_t szind, bool slab) { in rtree_szind_slab_update() argument [all …]
|
| H A D | jemalloc_internal_inlines_c.h | 228 extent_t *slab = iealloc(tsdn, ptr); in iget_defrag_hint() local 229 arena_t *arena = extent_arena_get(slab); in iget_defrag_hint() 230 szind_t binind = extent_szind_get(slab); in iget_defrag_hint() 234 if (slab != bin->slabcur) { in iget_defrag_hint() 238 *run_util = ((long long)(bin_info->nregs - extent_nfree_get(slab))<<16) / bin_info->nregs; in iget_defrag_hint()
|
| H A D | extent_inlines.h | 292 extent_slab_set(extent_t *extent, bool slab) { in extent_slab_set() argument 294 ((uint64_t)slab << EXTENT_BITS_SLAB_SHIFT); in extent_slab_set() 304 bool slab, szind_t szind, size_t sn, extent_state_t state, bool zeroed, in extent_init() argument 306 assert(addr == PAGE_ADDR2BASE(addr) || !slab); in extent_init() 311 extent_slab_set(extent, slab); in extent_init()
|
| H A D | extent_externs.h | 36 size_t size, size_t pad, size_t alignment, bool slab, szind_t szind, 47 size_t alignment, bool slab, szind_t szind, bool *zero, bool *commit);
|
| H A D | arena_structs_b.h | 226 bool slab; member
|
| /f-stack/app/redis-5.0.5/deps/jemalloc/src/ |
| H A D | arena.c | 932 return slab; in arena_bin_slabs_nonfull_tryget() 1095 extent_t *slab; in arena_slab_alloc_hard() local 1111 return slab; in arena_slab_alloc_hard() 1148 return slab; in arena_slab_alloc() 1154 extent_t *slab; in arena_bin_nonfull_slab_get() local 1160 return slab; in arena_bin_nonfull_slab_get() 1177 return slab; in arena_bin_nonfull_slab_get() 1187 return slab; in arena_bin_nonfull_slab_get() 1198 extent_t *slab; in arena_bin_malloc_hard() local 1266 if ((slab = bin->slabcur) != NULL && extent_nfree_get(slab) > in arena_tcache_fill_small() [all …]
|
| H A D | extent.c | 683 slab); in extent_rtree_write_acquired() 757 if (slab) { in extent_register_impl() 962 slab, growing_retained); in extent_split_interior() 1067 assert(new_addr == NULL || !slab); in extent_recycle() 1068 assert(pad == 0 || !slab); in extent_recycle() 1069 assert(!*zero || !slab); in extent_recycle() 1109 if (slab) { in extent_recycle() 1110 extent_slab_set(extent, slab); in extent_recycle() 1230 assert(pad == 0 || !slab); in extent_grow_retained() 1231 assert(!*zero || !slab); in extent_grow_retained() [all …]
|
| H A D | jemalloc.c | 1878 alloc_ctx.slab = (usize <= SMALL_MAXCLASS); in imalloc_body() 1888 alloc_ctx.slab = false; in imalloc_body() 2207 (uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab); in ifree() 2251 alloc_ctx.slab = true; in isfree() 2258 &dbg_ctx.slab); in isfree() 2260 assert(dbg_ctx.slab == alloc_ctx.slab); in isfree() 2265 (uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab); in isfree() 2325 (uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab); in je_realloc() 2704 (uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab); in je_rallocx() 2848 (uintptr_t)ptr, true, &alloc_ctx.szind, &alloc_ctx.slab); in je_xallocx()
|
| /f-stack/app/redis-5.0.5/deps/jemalloc/test/unit/ |
| H A D | slab.c | 8 extent_t slab; in TEST_BEGIN() local 10 extent_init(&slab, NULL, mallocx(bin_info->slab_size, in TEST_BEGIN() 13 assert_ptr_not_null(extent_addr_get(&slab), in TEST_BEGIN() 16 void *reg = (void *)((uintptr_t)extent_addr_get(&slab) + in TEST_BEGIN() 18 assert_zu_eq(arena_slab_regind(&slab, binind, reg), in TEST_BEGIN() 23 free(extent_addr_get(&slab)); in TEST_BEGIN()
|
| /f-stack/freebsd/vm/ |
| H A D | uma_int.h | 408 slab_tohashslab(uma_slab_t slab) in slab_tohashslab() argument 415 slab_data(uma_slab_t slab, uma_keg_t keg) in slab_data() argument 421 return (slab_tohashslab(slab)->uhs_data); in slab_data() 425 slab_item(uma_slab_t slab, uma_keg_t keg, int index) in slab_item() argument 429 data = (uintptr_t)slab_data(slab, keg); in slab_item() 438 data = (uintptr_t)slab_data(slab, keg); in slab_item_index() 606 uma_hash_slab_t slab; in hash_sfind() local 613 return (&slab->uhs_slab); in hash_sfind() 625 return (p->plinks.uma.slab); in vtoslab() 634 *slab = p->plinks.uma.slab; in vtozoneslab() [all …]
|
| H A D | uma_core.c | 1541 uma_slab_t slab; in keg_alloc_slab() local 1551 slab = NULL; in keg_alloc_slab() 1640 return (slab); in keg_alloc_slab() 3645 uma_slab_t slab; in keg_first_slab() local 3652 slab = NULL; in keg_first_slab() 3657 return (slab); in keg_first_slab() 3662 return (slab); in keg_first_slab() 3692 return (slab); in keg_fetch_free_slab() 3753 return (slab); in keg_fetch_slab() 3800 slab = NULL; in zone_import() [all …]
|
| /f-stack/tools/compat/include/vm/ |
| H A D | uma_int.h | 407 slab_tohashslab(uma_slab_t slab) in slab_tohashslab() argument 414 slab_data(uma_slab_t slab, uma_keg_t keg) in slab_data() argument 420 return (slab_tohashslab(slab)->uhs_data); in slab_data() 424 slab_item(uma_slab_t slab, uma_keg_t keg, int index) in slab_item() argument 428 data = (uintptr_t)slab_data(slab, keg); in slab_item() 437 data = (uintptr_t)slab_data(slab, keg); in slab_item_index() 605 uma_hash_slab_t slab; in hash_sfind() local 612 return (&slab->uhs_slab); in hash_sfind() 623 return (p->plinks.uma.slab); in vtoslab() 632 *slab = p->plinks.uma.slab; in vtozoneslab() [all …]
|
| /f-stack/lib/include/vm/ |
| H A D | uma_int.h | 85 vtozoneslab(vm_offset_t va, uma_zone_t *zone, uma_slab_t *slab) in vtozoneslab() argument 95 *slab = up->up_slab; in vtozoneslab() 100 vsetzoneslab(vm_offset_t va, uma_zone_t zone, uma_slab_t slab) in vsetzoneslab() argument 110 up->up_slab = slab; in vsetzoneslab() 117 up->up_slab = slab; in vsetzoneslab()
|
| /f-stack/dpdk/app/test/ |
| H A D | test_bitmap.c | 190 uint64_t slab; in test_bitmap_all_set() local 211 pos = slab = 0; in test_bitmap_all_set() 212 if (!rte_bitmap_scan(bmp, &pos, &slab)) { in test_bitmap_all_set() 216 pos += (slab ? __builtin_ctzll(slab) : 0); in test_bitmap_all_set() 220 if (rte_bitmap_scan(bmp, &pos, &slab)) { in test_bitmap_all_set()
|
| /f-stack/freebsd/kern/ |
| H A D | kern_malloc.c | 572 va = (uintptr_t)slab; in malloc_large_slab() 581 va = (uintptr_t)slab; in malloc_large_size() 870 uma_slab_t slab; in free() local 882 if (slab == NULL) in free() 910 uma_slab_t slab; in zfree() local 922 if (slab == NULL) in zfree() 948 uma_slab_t slab; in realloc() local 972 slab = NULL; in realloc() 979 KASSERT(slab != NULL, in realloc() 1042 uma_slab_t slab; in malloc_usable_size() local [all …]
|
| /f-stack/dpdk/lib/librte_eal/include/ |
| H A D | rte_bitmap.h | 404 rte_bitmap_set_slab(struct rte_bitmap *bmp, uint32_t pos, uint64_t slab) in rte_bitmap_set_slab() argument 416 *slab2 |= slab; in rte_bitmap_set_slab() 512 __rte_bitmap_scan_read(struct rte_bitmap *bmp, uint32_t *pos, uint64_t *slab) in __rte_bitmap_scan_read() argument 520 *slab = *slab2; in __rte_bitmap_scan_read() 553 rte_bitmap_scan(struct rte_bitmap *bmp, uint32_t *pos, uint64_t *slab) in rte_bitmap_scan() argument 556 if (__rte_bitmap_scan_read(bmp, pos, slab)) { in rte_bitmap_scan() 563 __rte_bitmap_scan_read(bmp, pos, slab); in rte_bitmap_scan()
|
| /f-stack/dpdk/drivers/net/octeontx2/ |
| H A D | otx2_flow_utils.c | 460 slab >>= 32; in flow_first_set_bit() 462 if ((slab & 0xffff) == 0) { in flow_first_set_bit() 464 slab >>= 16; in flow_first_set_bit() 466 if ((slab & 0xff) == 0) { in flow_first_set_bit() 468 slab >>= 8; in flow_first_set_bit() 470 if ((slab & 0xf) == 0) { in flow_first_set_bit() 472 slab >>= 4; in flow_first_set_bit() 474 if ((slab & 0x3) == 0) { in flow_first_set_bit() 476 slab >>= 2; in flow_first_set_bit() 478 if ((slab & 0x1) == 0) in flow_first_set_bit() [all …]
|
| /f-stack/dpdk/drivers/mempool/octeontx2/ |
| H A D | otx2_mempool_ops.c | 549 bitmap_ctzll(uint64_t slab) in bitmap_ctzll() argument 551 if (slab == 0) in bitmap_ctzll() 554 return __builtin_ctzll(slab); in bitmap_ctzll() 565 uint64_t slab; in npa_lf_aura_pool_pair_alloc() local 578 pos = slab = 0; in npa_lf_aura_pool_pair_alloc() 582 rc = rte_bitmap_scan(lf->npa_bmp, &pos, &slab); in npa_lf_aura_pool_pair_alloc() 590 aura_id = pos + bitmap_ctzll(slab); in npa_lf_aura_pool_pair_alloc()
|
| /f-stack/freebsd/contrib/openzfs/config/ |
| H A D | kernel-kmem-cache.m4 | 8 #include <linux/slab.h>
|
| /f-stack/dpdk/drivers/crypto/bcmfs/hw/ |
| H A D | bcmfs5_rm.c | 386 uint64_t slab = 0; in bcmfs5_enqueue_single_request_qp() local 401 ret = rte_bitmap_scan(qp->ctx_bmp, &pos, &slab); in bcmfs5_enqueue_single_request_qp() 407 reqid = pos + __builtin_ctzll(slab); in bcmfs5_enqueue_single_request_qp()
|
| H A D | bcmfs4_rm.c | 456 uint64_t slab = 0; in bcmfs4_enqueue_single_request_qp() local 470 ret = rte_bitmap_scan(qp->ctx_bmp, &pos, &slab); in bcmfs4_enqueue_single_request_qp() 476 reqid = pos + __builtin_ctzll(slab); in bcmfs4_enqueue_single_request_qp()
|
| /f-stack/dpdk/drivers/net/bonding/ |
| H A D | rte_eth_bond_api.c | 195 uint64_t slab = 0; in slave_vlan_filter_set() local 205 found = rte_bitmap_scan(internals->vlan_filter_bmp, &pos, &slab); in slave_vlan_filter_set() 218 if (unlikely(slab & mask)) { in slave_vlan_filter_set() 226 &pos, &slab); in slave_vlan_filter_set()
|