Lines Matching refs:std
34 std::atomic<int> allocatedCount; // the number of objects allocated
39 std::atomic<bool> addedToForUse;
76 std::atomic<BackRefBlock*> active; // if defined, use it for allocations
77 std::atomic<BackRefBlock*> listForUse; // the chain of data blocks with free items
79 std::atomic <intptr_t> lastUsed; // index of the last used block
94 static std::atomic<BackRefMain*> backRefMain;
105 main->listForUse.store(nullptr, std::memory_order_relaxed); in initBackRefMain()
117 main->active.store(bl, std::memory_order_relaxed); in initBackRefMain()
120 backRefMain.store(main, std::memory_order_release); in initBackRefMain()
127 if (backRefMain.load(std::memory_order_acquire)) { // Is initBackRefMain() called? in destroyBackRefMain()
128 … for (BackRefBlock *curr = backRefMain.load(std::memory_order_relaxed)->allRawMemBlocks; curr; ) { in destroyBackRefMain()
135 backend->putBackRefSpace(backRefMain.load(std::memory_order_relaxed), BackRefMain::mainSize, in destroyBackRefMain()
136 backRefMain.load(std::memory_order_relaxed)->rawMemUsed); in destroyBackRefMain()
143 bl->nextForUse = listForUse.load(std::memory_order_relaxed); in addToForUseList()
144 listForUse.store(bl, std::memory_order_relaxed); in addToForUseList()
145 bl->addedToForUse.store(true, std::memory_order_relaxed); in addToForUseList()
156 lastUsed.store(nextLU, std::memory_order_release); in initEmptyBackRefBlock()
171 if (listForUse.load(std::memory_order_relaxed)) // double check that only one block is available in requestNewSpace()
196 newBl->nextRawMemBlock = backRefMain.load(std::memory_order_relaxed)->allRawMemBlocks; in requestNewSpace()
197 backRefMain.load(std::memory_order_relaxed)->allRawMemBlocks = newBl; in requestNewSpace()
201 …if (active.load(std::memory_order_relaxed)->allocatedCount.load(std::memory_order_relaxed) == BR_M… in requestNewSpace()
202 active.store(bl, std::memory_order_release); // active leaf is not needed in listForUse in requestNewSpace()
212 BackRefBlock* active_block = active.load(std::memory_order_acquire); in findFreeBlock()
215 if (active_block->allocatedCount.load(std::memory_order_relaxed) < BR_MAX_CNT) in findFreeBlock()
218 if (listForUse.load(std::memory_order_relaxed)) { // use released list in findFreeBlock()
221 if (active_block->allocatedCount.load(std::memory_order_relaxed) == BR_MAX_CNT) { in findFreeBlock()
222 active_block = listForUse.load(std::memory_order_relaxed); in findFreeBlock()
224 active.store(active_block, std::memory_order_release); in findFreeBlock()
225 listForUse.store(active_block->nextForUse, std::memory_order_relaxed); in findFreeBlock()
226 … MALLOC_ASSERT(active_block->addedToForUse.load(std::memory_order_relaxed), ASSERT_TEXT); in findFreeBlock()
227 active_block->addedToForUse.store(false, std::memory_order_relaxed); in findFreeBlock()
233 return active.load(std::memory_order_acquire); // reread because of requestNewSpace in findFreeBlock()
240 if (!(backRefMain.load(std::memory_order_acquire)) in getBackRef()
241 …|| backRefIdx.getMain() > (backRefMain.load(std::memory_order_relaxed)->lastUsed.load(std::memory_… in getBackRef()
246 std::atomic<void*>& backRefEntry = *(std::atomic<void*>*)( in getBackRef()
247 (uintptr_t)backRefMain.load(std::memory_order_relaxed)->backRefBl[backRefIdx.getMain()] in getBackRef()
248 + sizeof(BackRefBlock) + backRefIdx.getOffset() * sizeof(std::atomic<void*>) in getBackRef()
250 return backRefEntry.load(std::memory_order_relaxed); in getBackRef()
255 …MALLOC_ASSERT(backRefIdx.getMain()<=backRefMain.load(std::memory_order_relaxed)->lastUsed.load(std… in setBackRef()
257 …((std::atomic<void*>*)((uintptr_t)backRefMain.load(std::memory_order_relaxed)->backRefBl[backRefId… in setBackRef()
258 …+ sizeof(BackRefBlock) + backRefIdx.getOffset() * sizeof(void*)))->store(newPtr, std::memory_order… in setBackRef()
269 MALLOC_ASSERT(backRefMain.load(std::memory_order_relaxed), ASSERT_TEXT); in newBackRef()
270 blockToUse = backRefMain.load(std::memory_order_relaxed)->findFreeBlock(); in newBackRef()
284 } else if (blockToUse->allocatedCount.load(std::memory_order_relaxed) < BR_MAX_CNT) { in newBackRef()
288 if (blockToUse->allocatedCount.load(std::memory_order_relaxed) == BR_MAX_CNT-1) { in newBackRef()
296 if (!blockToUse->allocatedCount.load(std::memory_order_relaxed) && in newBackRef()
297 … !backRefMain.load(std::memory_order_relaxed)->listForUse.load(std::memory_order_relaxed)) { in newBackRef()
300 …oUse->allocatedCount.store(blockToUse->allocatedCount.load(std::memory_order_relaxed) + 1, std::me… in newBackRef()
307 backRefMain.load(std::memory_order_relaxed)->requestNewSpace(); in newBackRef()
323 …MALLOC_ASSERT(backRefIdx.getMain()<=backRefMain.load(std::memory_order_relaxed)->lastUsed.load(std… in removeBackRef()
325 …BackRefBlock *currBlock = backRefMain.load(std::memory_order_relaxed)->backRefBl[backRefIdx.getMai… in removeBackRef()
326 …std::atomic<void*>& backRefEntry = *(std::atomic<void*>*)((uintptr_t)currBlock + sizeof(BackRefBlo… in removeBackRef()
327 + backRefIdx.getOffset()*sizeof(std::atomic<void*>)); in removeBackRef()
333 backRefEntry.store(currBlock->freeList, std::memory_order_relaxed); in removeBackRef()
335 uintptr_t backRefEntryValue = (uintptr_t)backRefEntry.load(std::memory_order_relaxed); in removeBackRef()
341 …rBlock->allocatedCount.store(currBlock->allocatedCount.load(std::memory_order_relaxed)-1, std::mem… in removeBackRef()
344 if (!currBlock->addedToForUse.load(std::memory_order_relaxed) && in removeBackRef()
345 … currBlock!=backRefMain.load(std::memory_order_relaxed)->active.load(std::memory_order_relaxed)) { in removeBackRef()
348 if (!currBlock->addedToForUse.load(std::memory_order_relaxed) && in removeBackRef()
349 … currBlock!=backRefMain.load(std::memory_order_relaxed)->active.load(std::memory_order_relaxed)) in removeBackRef()
350 backRefMain.load(std::memory_order_relaxed)->addToForUseList(currBlock); in removeBackRef()