Lines Matching refs:fBlock

267     static void markBlocks(FreeBlock *fBlock, int num, size_t size) {  in markBlocks()  argument
269 fBlock = (FreeBlock*)((uintptr_t)fBlock + size); in markBlocks()
270 fBlock->initHeader(); in markBlocks()
340 void CoalRequestQ::putBlock(FreeBlock *fBlock) in putBlock() argument
342 MALLOC_ASSERT(fBlock->sizeTmp >= FreeBlock::minBlockSize, ASSERT_TEXT); in putBlock()
343 fBlock->markUsed(); in putBlock()
349 fBlock->nextToFree = myBlToFree; in putBlock()
350 if (blocksToFree.compare_exchange_strong(myBlToFree, fBlock)) { in putBlock()
391 FreeBlock *fBlock = nullptr; in getFromBin()
414 fBlock = curr; in getFromBin()
428 fBlock = curr; in getFromBin()
431 if (fBlock) { in getFromBin()
435 b->removeBlock(fBlock); in getFromBin()
438 fBlock->sizeTmp = szBlock; in getFromBin()
446 return fBlock; in getFromBin()
477 void Backend::Bin::removeBlock(FreeBlock *fBlock) in removeBlock() argument
479 MALLOC_ASSERT(fBlock->next||fBlock->prev||fBlock== head.load(std::memory_order_relaxed), in removeBlock()
481 if (head.load(std::memory_order_relaxed) == fBlock) in removeBlock()
482 head.store(fBlock->next, std::memory_order_relaxed); in removeBlock()
483 if (tail == fBlock) in removeBlock()
484 tail = fBlock->prev; in removeBlock()
485 if (fBlock->prev) in removeBlock()
486 fBlock->prev->next = fBlock->next; in removeBlock()
487 if (fBlock->next) in removeBlock()
488 fBlock->next->prev = fBlock->prev; in removeBlock()
491 void Backend::IndexedBins::addBlock(int binIdx, FreeBlock *fBlock, size_t /* blockSz */, bool addTo… in addBlock() argument
494 fBlock->myBin = binIdx; in addBlock()
495 fBlock->next = fBlock->prev = nullptr; in addBlock()
499 fBlock->prev = b->tail; in addBlock()
500 b->tail = fBlock; in addBlock()
501 if (fBlock->prev) in addBlock()
502 fBlock->prev->next = fBlock; in addBlock()
504 b->head.store(fBlock, std::memory_order_relaxed); in addBlock()
506 fBlock->next = b->head.load(std::memory_order_relaxed); in addBlock()
507 b->head.store(fBlock, std::memory_order_relaxed); in addBlock()
508 if (fBlock->next) in addBlock()
509 fBlock->next->prev = fBlock; in addBlock()
511 b->tail = fBlock; in addBlock()
517 bool Backend::IndexedBins::tryAddBlock(int binIdx, FreeBlock *fBlock, bool addToTail) in tryAddBlock() argument
521 fBlock->myBin = binIdx; in tryAddBlock()
523 fBlock->next = nullptr; in tryAddBlock()
528 fBlock->prev = b->tail; in tryAddBlock()
529 b->tail = fBlock; in tryAddBlock()
530 if (fBlock->prev) in tryAddBlock()
531 fBlock->prev->next = fBlock; in tryAddBlock()
533 b->head.store(fBlock, std::memory_order_relaxed); in tryAddBlock()
536 fBlock->prev = nullptr; in tryAddBlock()
541 fBlock->next = b->head.load(std::memory_order_relaxed); in tryAddBlock()
542 b->head.store(fBlock, std::memory_order_relaxed); in tryAddBlock()
543 if (fBlock->next) in tryAddBlock()
544 fBlock->next->prev = fBlock; in tryAddBlock()
546 b->tail = fBlock; in tryAddBlock()
560 void Backend::IndexedBins::lockRemoveBlock(int binIdx, FreeBlock *fBlock) in lockRemoveBlock() argument
563 freeBins[binIdx].removeBlock(fBlock); in lockRemoveBlock()
573 FreeBlock *Backend::splitBlock(FreeBlock *fBlock, int num, size_t size, bool blockIsAligned, bool n… in splitBlock() argument
584 FreeBlock *newBlock = alignUp(fBlock, slabSize); in splitBlock()
586 uintptr_t fBlockEnd = (uintptr_t)fBlock + fBlock->sizeTmp; in splitBlock()
595 if (newBlock != fBlock) { in splitBlock()
597 size_t leftSize = (uintptr_t)newBlock - (uintptr_t)fBlock; in splitBlock()
598 coalescAndPut(fBlock, leftSize, toAlignedBin(fBlock, leftSize)); in splitBlock()
600 fBlock = newBlock; in splitBlock()
601 } else if (size_t splitSize = fBlock->sizeTmp - totalSize) { // need to split the block in splitBlock()
607 splitBlock = fBlock; in splitBlock()
608 fBlock = (FreeBlock*)((uintptr_t)splitBlock + splitSize); in splitBlock()
609 fBlock->initHeader(); in splitBlock()
612 splitBlock = (FreeBlock*)((uintptr_t)fBlock + totalSize); in splitBlock()
620 …MALLOC_ASSERT(!needAlignedBlock || isAligned(fBlock, slabSize), "Expect to get aligned block, if o… in splitBlock()
621 FreeBlock::markBlocks(fBlock, num, size); in splitBlock()
622 return fBlock; in splitBlock()
905 void Backend::removeBlockFromBin(FreeBlock *fBlock) in removeBlockFromBin() argument
907 if (fBlock->myBin != Backend::NO_BIN) { in removeBlockFromBin()
908 if (fBlock->slabAligned) in removeBlockFromBin()
909 freeSlabAlignedBins.lockRemoveBlock(fBlock->myBin, fBlock); in removeBlockFromBin()
911 freeLargeBlockBins.lockRemoveBlock(fBlock->myBin, fBlock); in removeBlockFromBin()
915 void Backend::genericPutBlock(FreeBlock *fBlock, size_t blockSz, bool slabAligned) in genericPutBlock() argument
918 coalescAndPut(fBlock, blockSz, slabAligned); in genericPutBlock()
1004 FreeBlock *fBlock = (FreeBlock *)alignUp((uintptr_t)region + sizeof(MemRegion), in remap() local
1008 startUseBlock(region, fBlock, /*addToBin=*/false); in remap()
1009 MALLOC_ASSERT(fBlock->sizeTmp == region->blockSz, ASSERT_TEXT); in remap()
1020 LargeMemoryBlock *lmb = (LargeMemoryBlock*)fBlock; in remap()
1042 FreeBlock *Backend::doCoalesc(FreeBlock *fBlock, MemRegion **mRegion) in doCoalesc() argument
1044 FreeBlock *resBlock = fBlock; in doCoalesc()
1045 size_t resSize = fBlock->sizeTmp; in doCoalesc()
1048 fBlock->markCoalescing(resSize); in doCoalesc()
1052 size_t leftSz = fBlock->trySetLeftUsed(GuardedSize::COAL_BLOCK); in doCoalesc()
1055 coalescQ.putBlock(fBlock); in doCoalesc()
1058 FreeBlock *left = fBlock->leftNeig(leftSz); in doCoalesc()
1061 fBlock->setLeftFree(leftSz); // rollback in doCoalesc()
1062 coalescQ.putBlock(fBlock); in doCoalesc()
1074 FreeBlock *right = fBlock->rightNeig(fBlock->sizeTmp); in doCoalesc()
1204 void Backend::coalescAndPut(FreeBlock *fBlock, size_t blockSz, bool slabAligned) in coalescAndPut() argument
1206 fBlock->sizeTmp = blockSz; in coalescAndPut()
1207 fBlock->nextToFree = nullptr; in coalescAndPut()
1208 fBlock->slabAligned = slabAligned; in coalescAndPut()
1210 coalescAndPutList(fBlock, /*forceCoalescQDrop=*/false, /*reportBlocksProcessed=*/false); in coalescAndPut()
1229 FreeBlock *fBlock; in findBlockInRegion() local
1239 fBlock = (FreeBlock *)alignUp((uintptr_t)region + sizeof(MemRegion), sizeof(uintptr_t)); in findBlockInRegion()
1242 fBlock = (FreeBlock *)alignUp((uintptr_t)region + sizeof(MemRegion), largeObjectAlignment); in findBlockInRegion()
1243 fBlockEnd = (uintptr_t)fBlock + exactBlockSize; in findBlockInRegion()
1246 if (fBlockEnd <= (uintptr_t)fBlock) in findBlockInRegion()
1248 blockSz = fBlockEnd - (uintptr_t)fBlock; in findBlockInRegion()
1256 return fBlock; in findBlockInRegion()
1261 void Backend::startUseBlock(MemRegion *region, FreeBlock *fBlock, bool addToBin) in startUseBlock() argument
1264 fBlock->initHeader(); in startUseBlock()
1265 fBlock->setMeFree(blockSz); in startUseBlock()
1267 LastFreeBlock *lastBl = static_cast<LastFreeBlock*>(fBlock->rightNeig(blockSz)); in startUseBlock()
1281 fBlock->slabAligned = true; in startUseBlock()
1282 freeSlabAlignedBins.addBlock(targetBin, fBlock, blockSz, /*addToTail=*/false); in startUseBlock()
1284 fBlock->slabAligned = false; in startUseBlock()
1285 freeLargeBlockBins.addBlock(targetBin, fBlock, blockSz, /*addToTail=*/false); in startUseBlock()
1291 fBlock->slabAligned = region->type == MEMREG_SLAB_BLOCKS ? true : false; in startUseBlock()
1292 fBlock->sizeTmp = fBlock->tryLockBlock(); in startUseBlock()
1293 MALLOC_ASSERT(fBlock->sizeTmp >= FreeBlock::minBlockSize, "Locking must be successful"); in startUseBlock()
1356 FreeBlock *fBlock = findBlockInRegion(region, size); in addNewRegion() local
1357 if (!fBlock) { in addNewRegion()
1363 startUseBlock(region, fBlock, addToBin); in addNewRegion()
1365 return addToBin? (FreeBlock*)VALID_BLOCK_IN_BIN : fBlock; in addNewRegion()
1387 FreeBlock *fBlock = findBlockInRegion(curr, curr->blockSz); in reset() local
1388 MALLOC_ASSERT(fBlock, "A memory region unexpectedly got smaller"); in reset()
1389 startUseBlock(curr, fBlock, /*addToBin=*/true); in reset()