Lines Matching refs:std

35 using ticket_type = std::size_t;
39 return reinterpret_cast<std::uintptr_t>(p) > 1; in is_valid_page()
63 using size_type = std::size_t;
83 reference operator[] (std::size_t index) {
88 const_reference operator[] (std::size_t index) const {
94 std::atomic<std::uintptr_t> mask{};
131 padded_page* q = tail_page.load(std::memory_order_relaxed); in prepare_page()
135 head_page.store(p, std::memory_order_relaxed); in prepare_page()
137 tail_page.store(p, std::memory_order_relaxed); in prepare_page()
139 p = tail_page.load(std::memory_order_relaxed); in prepare_page()
160 page_allocator_traits::construct(page_allocator, &(*p)[index], std::forward<Args>(args)...); in push()
162 …p->mask.store(p->mask.load(std::memory_order_relaxed) | uintptr_t(1) << index, std::memory_order_r… in push()
182 padded_page *p = head_page.load(std::memory_order_relaxed); in pop()
190 if (p->mask.load(std::memory_order_relaxed) & (std::uintptr_t(1) << index)) { in pop()
203 … head_counter.store(src.head_counter.load(std::memory_order_relaxed), std::memory_order_relaxed); in assign()
204 … tail_counter.store(src.tail_counter.load(std::memory_order_relaxed), std::memory_order_relaxed); in assign()
206 const padded_page* srcp = src.head_page.load(std::memory_order_relaxed); in assign()
208 ticket_type g_index = head_counter.load(std::memory_order_relaxed); in assign()
209 …size_type n_items = (tail_counter.load(std::memory_order_relaxed) - head_counter.load(std::memory… in assign()
211 …size_type index = modulo_power_of_two(head_counter.load(std::memory_order_relaxed) / queue_rep_typ… in assign()
215 …_copy(allocator, srcp, index, end_in_first_page, g_index, construct_item), std::memory_order_relax… in assign()
217 head_counter.store(0, std::memory_order_relaxed); in assign()
218 tail_counter.store(0, std::memory_order_relaxed); in assign()
220 padded_page* cur_page = head_page.load(std::memory_order_relaxed); in assign()
223 if (srcp != src.tail_page.load(std::memory_order_relaxed)) { in assign()
224 … for (srcp = srcp->next; srcp != src.tail_page.load(std::memory_order_relaxed); srcp=srcp->next ) { in assign()
229 __TBB_ASSERT(srcp == src.tail_page.load(std::memory_order_relaxed), nullptr ); in assign()
230 …size_type last_index = modulo_power_of_two(tail_counter.load(std::memory_order_relaxed) / queue_re… in assign()
236 tail_page.store(cur_page, std::memory_order_relaxed); in assign()
238 padded_page* invalid_page = reinterpret_cast<padded_page*>(std::uintptr_t(1)); in assign()
239 tail_page.store(invalid_page, std::memory_order_relaxed); in assign()
242 head_page.store(nullptr, std::memory_order_relaxed); in assign()
243 tail_page.store(nullptr, std::memory_order_relaxed); in assign()
254 … new_page->mask.store(src_page->mask.load(std::memory_order_relaxed), std::memory_order_relaxed); in make_copy()
256 if (new_page->mask.load(std::memory_order_relaxed) & uintptr_t(1) << begin_in_page) { in make_copy()
265 padded_page* invalid_page = reinterpret_cast<padded_page*>(std::uintptr_t(1)); in invalidate_page()
268 tail_counter.store(k + queue_rep_type::n_queue + 1, std::memory_order_relaxed); in invalidate_page()
269 padded_page* q = tail_page.load(std::memory_order_relaxed); in invalidate_page()
273 head_page.store(invalid_page, std::memory_order_relaxed); in invalidate_page()
275 tail_page.store(invalid_page, std::memory_order_relaxed); in invalidate_page()
280 return head_page.load(std::memory_order_relaxed); in get_head_page()
285 …size_type index = (head_counter.load(std::memory_order_relaxed) / queue_rep_type::n_queue) % items…
290 … if (curr_page->mask.load(std::memory_order_relaxed) & (std::uintptr_t(1) << index)) {
302 head_counter.store(0, std::memory_order_relaxed);
303 tail_counter.store(0, std::memory_order_relaxed);
304 head_page.store(new_head, std::memory_order_relaxed);
305 tail_page.store(new_tail, std::memory_order_relaxed);
309 padded_page* invalid_page = reinterpret_cast<padded_page*>(std::uintptr_t(1)); in clear_and_invalidate()
337 *static_cast<T*>(dst) = std::move(from); in assign_and_destroy_item()
340 …void spin_wait_until_my_turn( std::atomic<ticket_type>& counter, ticket_type k, queue_rep_type& rb… in spin_wait_until_my_turn()
342 ticket_type c = counter.load(std::memory_order_acquire); in spin_wait_until_my_turn()
351 std::atomic<padded_page*> head_page{};
352 std::atomic<ticket_type> head_counter{};
354 std::atomic<padded_page*> tail_page{};
355 std::atomic<ticket_type> tail_counter{};
383 my_queue.head_page.store(q, std::memory_order_relaxed); in ~micro_queue_pop_finalizer()
385 my_queue.tail_page.store(nullptr, std::memory_order_relaxed); in ~micro_queue_pop_finalizer()
388 my_queue.head_counter.store(my_ticket_type, std::memory_order_release); in ~micro_queue_pop_finalizer()
410 using size_type = std::size_t;
438 head_counter.store(0, std::memory_order_relaxed); in clear()
439 tail_counter.store(0, std::memory_order_relaxed); in clear()
440 n_invalid_entries.store(0, std::memory_order_relaxed); in clear()
444 … head_counter.store(src.head_counter.load(std::memory_order_relaxed), std::memory_order_relaxed); in assign()
445 … tail_counter.store(src.tail_counter.load(std::memory_order_relaxed), std::memory_order_relaxed); in assign()
446 …n_invalid_entries.store(src.n_invalid_entries.load(std::memory_order_relaxed), std::memory_order_r… in assign()
458 head_counter.store(0, std::memory_order_relaxed); in assign()
459 tail_counter.store(0, std::memory_order_relaxed); in assign()
460 n_invalid_entries.store(0, std::memory_order_relaxed); in assign()
463 …__TBB_ASSERT(head_counter.load(std::memory_order_relaxed) == src.head_counter.load(std::memory_ord… in assign()
464 … tail_counter.load(std::memory_order_relaxed) == src.tail_counter.load(std::memory_order_relaxed), in assign()
469 ticket_type tc = tail_counter.load(std::memory_order_acquire); in empty()
470 ticket_type hc = head_counter.load(std::memory_order_relaxed); in empty()
472 return tc == tail_counter.load(std::memory_order_relaxed) && in empty()
473 std::ptrdiff_t(tc - hc - n_invalid_entries.load(std::memory_order_relaxed)) <= 0; in empty()
476 std::ptrdiff_t size() const { in size()
477 __TBB_ASSERT(sizeof(std::ptrdiff_t) <= sizeof(size_type), nullptr); in size()
478 std::ptrdiff_t hc = head_counter.load(std::memory_order_acquire); in size()
479 std::ptrdiff_t tc = tail_counter.load(std::memory_order_relaxed); in size()
480 std::ptrdiff_t nie = n_invalid_entries.load(std::memory_order_relaxed); in size()
499 alignas(max_nfs_size) std::atomic<ticket_type> head_counter{}; in alignas()
500 alignas(max_nfs_size) std::atomic<ticket_type> tail_counter{}; in alignas()
501 alignas(max_nfs_size) std::atomic<size_type> n_invalid_entries{}; in alignas()
521 my_head_counter(my_queue_rep->head_counter.load(std::memory_order_relaxed)) in concurrent_queue_iterator_base()
523 for (std::size_t i = 0; i < queue_rep_type::n_queue; ++i) { in concurrent_queue_iterator_base()
537 for (std::size_t i = 0; i < queue_rep_type::n_queue; ++i) { in assign()
545 std::size_t k = my_head_counter; in advance()
551std::size_t i = modulo_power_of_two(k / queue_rep_type::n_queue, my_queue_rep->items_per_page); in advance()
566 bool get_item( Value*& item, std::size_t k ) { in get_item()
567 if (k == my_queue_rep->tail_counter.load(std::memory_order_relaxed)) { in get_item()
573std::size_t i = modulo_power_of_two(k / queue_rep_type::n_queue, my_queue_rep->items_per_page); in get_item()
593 class concurrent_queue_iterator : public concurrent_queue_iterator_base<typename std::remove_cv<Val…
594 … using base_type = concurrent_queue_iterator_base<typename std::remove_cv<Value>::type, Allocator>;
599 using difference_type = std::ptrdiff_t;
600 using iterator_category = std::forward_iterator_tag;