Lines Matching refs:blockToUse
41 BackRefBlock(const BackRefBlock *blockToUse, intptr_t num) : in BackRefBlock()
42 … nextForUse(nullptr), bumpPtr((FreeObject*)((uintptr_t)blockToUse + slabSize - sizeof(void*))), in BackRefBlock()
263 BackRefBlock *blockToUse; in newBackRef() local
270 blockToUse = backRefMain.load(std::memory_order_relaxed)->findFreeBlock(); in newBackRef()
271 if (!blockToUse) in newBackRef()
275 MallocMutex::scoped_lock lock(blockToUse->blockMutex); in newBackRef()
277 if (blockToUse->freeList) { in newBackRef()
278 toUse = (void**)blockToUse->freeList; in newBackRef()
279 blockToUse->freeList = blockToUse->freeList->next; in newBackRef()
280 MALLOC_ASSERT(!blockToUse->freeList || in newBackRef()
281 ((uintptr_t)blockToUse->freeList>=(uintptr_t)blockToUse in newBackRef()
282 && (uintptr_t)blockToUse->freeList < in newBackRef()
283 (uintptr_t)blockToUse + slabSize), ASSERT_TEXT); in newBackRef()
284 } else if (blockToUse->allocatedCount.load(std::memory_order_relaxed) < BR_MAX_CNT) { in newBackRef()
285 toUse = (void**)blockToUse->bumpPtr; in newBackRef()
286 blockToUse->bumpPtr = in newBackRef()
287 (FreeObject*)((uintptr_t)blockToUse->bumpPtr - sizeof(void*)); in newBackRef()
288 if (blockToUse->allocatedCount.load(std::memory_order_relaxed) == BR_MAX_CNT-1) { in newBackRef()
289 MALLOC_ASSERT((uintptr_t)blockToUse->bumpPtr in newBackRef()
290 < (uintptr_t)blockToUse+sizeof(BackRefBlock), in newBackRef()
292 blockToUse->bumpPtr = nullptr; in newBackRef()
296 if (!blockToUse->allocatedCount.load(std::memory_order_relaxed) && in newBackRef()
300 …blockToUse->allocatedCount.store(blockToUse->allocatedCount.load(std::memory_order_relaxed) + 1, s… in newBackRef()
309 res.main = blockToUse->myNum; in newBackRef()
311 ((uintptr_t)toUse - ((uintptr_t)blockToUse + sizeof(BackRefBlock)))/sizeof(void*); in newBackRef()