xref: /llvm-project-15.0.7/libcxx/include/atomic (revision cf6a7c19)
1// -*- C++ -*-
2//===----------------------------------------------------------------------===//
3//
4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
5// See https://llvm.org/LICENSE.txt for license information.
6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7//
8//===----------------------------------------------------------------------===//
9
10#ifndef _LIBCPP_ATOMIC
11#define _LIBCPP_ATOMIC
12
13/*
14    atomic synopsis
15
16namespace std
17{
18
19// feature test macro [version.syn]
20
21#define __cpp_lib_atomic_is_always_lock_free
22#define __cpp_lib_atomic_flag_test
23#define __cpp_lib_atomic_lock_free_type_aliases
24#define __cpp_lib_atomic_wait
25
26 // order and consistency
27
28 enum memory_order: unspecified // enum class in C++20
29 {
30    relaxed,
31    consume, // load-consume
32    acquire, // load-acquire
33    release, // store-release
34    acq_rel, // store-release load-acquire
35    seq_cst // store-release load-acquire
36 };
37
38 inline constexpr auto memory_order_relaxed = memory_order::relaxed;
39 inline constexpr auto memory_order_consume = memory_order::consume;
40 inline constexpr auto memory_order_acquire = memory_order::acquire;
41 inline constexpr auto memory_order_release = memory_order::release;
42 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
43 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
44
45template <class T> T kill_dependency(T y) noexcept;
46
47// lock-free property
48
49#define ATOMIC_BOOL_LOCK_FREE unspecified
50#define ATOMIC_CHAR_LOCK_FREE unspecified
51#define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20
52#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
53#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
54#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
55#define ATOMIC_SHORT_LOCK_FREE unspecified
56#define ATOMIC_INT_LOCK_FREE unspecified
57#define ATOMIC_LONG_LOCK_FREE unspecified
58#define ATOMIC_LLONG_LOCK_FREE unspecified
59#define ATOMIC_POINTER_LOCK_FREE unspecified
60
61template <class T>
62struct atomic
63{
64    using value_type = T;
65
66    static constexpr bool is_always_lock_free;
67    bool is_lock_free() const volatile noexcept;
68    bool is_lock_free() const noexcept;
69
70    atomic() noexcept = default; // until C++20
71    constexpr atomic() noexcept(is_nothrow_default_constructible_v<T>); // since C++20
72    constexpr atomic(T desr) noexcept;
73    atomic(const atomic&) = delete;
74    atomic& operator=(const atomic&) = delete;
75    atomic& operator=(const atomic&) volatile = delete;
76
77    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
78    T load(memory_order m = memory_order_seq_cst) const noexcept;
79    operator T() const volatile noexcept;
80    operator T() const noexcept;
81    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
82    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
83    T operator=(T) volatile noexcept;
84    T operator=(T) noexcept;
85
86    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
87    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
88    bool compare_exchange_weak(T& expc, T desr,
89                               memory_order s, memory_order f) volatile noexcept;
90    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
91    bool compare_exchange_strong(T& expc, T desr,
92                                 memory_order s, memory_order f) volatile noexcept;
93    bool compare_exchange_strong(T& expc, T desr,
94                                 memory_order s, memory_order f) noexcept;
95    bool compare_exchange_weak(T& expc, T desr,
96                               memory_order m = memory_order_seq_cst) volatile noexcept;
97    bool compare_exchange_weak(T& expc, T desr,
98                               memory_order m = memory_order_seq_cst) noexcept;
99    bool compare_exchange_strong(T& expc, T desr,
100                                memory_order m = memory_order_seq_cst) volatile noexcept;
101    bool compare_exchange_strong(T& expc, T desr,
102                                 memory_order m = memory_order_seq_cst) noexcept;
103
104    void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept;
105    void wait(T, memory_order = memory_order::seq_cst) const noexcept;
106    void notify_one() volatile noexcept;
107    void notify_one() noexcept;
108    void notify_all() volatile noexcept;
109    void notify_all() noexcept;
110};
111
112template <>
113struct atomic<integral>
114{
115    using value_type = integral;
116    using difference_type = value_type;
117
118    static constexpr bool is_always_lock_free;
119    bool is_lock_free() const volatile noexcept;
120    bool is_lock_free() const noexcept;
121
122    atomic() noexcept = default;
123    constexpr atomic(integral desr) noexcept;
124    atomic(const atomic&) = delete;
125    atomic& operator=(const atomic&) = delete;
126    atomic& operator=(const atomic&) volatile = delete;
127
128    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
129    integral load(memory_order m = memory_order_seq_cst) const noexcept;
130    operator integral() const volatile noexcept;
131    operator integral() const noexcept;
132    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
133    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
134    integral operator=(integral desr) volatile noexcept;
135    integral operator=(integral desr) noexcept;
136
137    integral exchange(integral desr,
138                      memory_order m = memory_order_seq_cst) volatile noexcept;
139    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
140    bool compare_exchange_weak(integral& expc, integral desr,
141                               memory_order s, memory_order f) volatile noexcept;
142    bool compare_exchange_weak(integral& expc, integral desr,
143                               memory_order s, memory_order f) noexcept;
144    bool compare_exchange_strong(integral& expc, integral desr,
145                                 memory_order s, memory_order f) volatile noexcept;
146    bool compare_exchange_strong(integral& expc, integral desr,
147                                 memory_order s, memory_order f) noexcept;
148    bool compare_exchange_weak(integral& expc, integral desr,
149                               memory_order m = memory_order_seq_cst) volatile noexcept;
150    bool compare_exchange_weak(integral& expc, integral desr,
151                               memory_order m = memory_order_seq_cst) noexcept;
152    bool compare_exchange_strong(integral& expc, integral desr,
153                                memory_order m = memory_order_seq_cst) volatile noexcept;
154    bool compare_exchange_strong(integral& expc, integral desr,
155                                 memory_order m = memory_order_seq_cst) noexcept;
156
157    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
158    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
159    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
160    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
161    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
162    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
163    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
164    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
165    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
167
168    integral operator++(int) volatile noexcept;
169    integral operator++(int) noexcept;
170    integral operator--(int) volatile noexcept;
171    integral operator--(int) noexcept;
172    integral operator++() volatile noexcept;
173    integral operator++() noexcept;
174    integral operator--() volatile noexcept;
175    integral operator--() noexcept;
176    integral operator+=(integral op) volatile noexcept;
177    integral operator+=(integral op) noexcept;
178    integral operator-=(integral op) volatile noexcept;
179    integral operator-=(integral op) noexcept;
180    integral operator&=(integral op) volatile noexcept;
181    integral operator&=(integral op) noexcept;
182    integral operator|=(integral op) volatile noexcept;
183    integral operator|=(integral op) noexcept;
184    integral operator^=(integral op) volatile noexcept;
185    integral operator^=(integral op) noexcept;
186
187    void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept;
188    void wait(integral, memory_order = memory_order::seq_cst) const noexcept;
189    void notify_one() volatile noexcept;
190    void notify_one() noexcept;
191    void notify_all() volatile noexcept;
192    void notify_all() noexcept;
193};
194
195template <class T>
196struct atomic<T*>
197{
198    using value_type = T*;
199    using difference_type = ptrdiff_t;
200
201    static constexpr bool is_always_lock_free;
202    bool is_lock_free() const volatile noexcept;
203    bool is_lock_free() const noexcept;
204
205    atomic() noexcept = default; // until C++20
206    constexpr atomic() noexcept; // since C++20
207    constexpr atomic(T* desr) noexcept;
208    atomic(const atomic&) = delete;
209    atomic& operator=(const atomic&) = delete;
210    atomic& operator=(const atomic&) volatile = delete;
211
212    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
213    T* load(memory_order m = memory_order_seq_cst) const noexcept;
214    operator T*() const volatile noexcept;
215    operator T*() const noexcept;
216    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
217    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
218    T* operator=(T*) volatile noexcept;
219    T* operator=(T*) noexcept;
220
221    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
222    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
223    bool compare_exchange_weak(T*& expc, T* desr,
224                               memory_order s, memory_order f) volatile noexcept;
225    bool compare_exchange_weak(T*& expc, T* desr,
226                               memory_order s, memory_order f) noexcept;
227    bool compare_exchange_strong(T*& expc, T* desr,
228                                 memory_order s, memory_order f) volatile noexcept;
229    bool compare_exchange_strong(T*& expc, T* desr,
230                                 memory_order s, memory_order f) noexcept;
231    bool compare_exchange_weak(T*& expc, T* desr,
232                               memory_order m = memory_order_seq_cst) volatile noexcept;
233    bool compare_exchange_weak(T*& expc, T* desr,
234                               memory_order m = memory_order_seq_cst) noexcept;
235    bool compare_exchange_strong(T*& expc, T* desr,
236                                memory_order m = memory_order_seq_cst) volatile noexcept;
237    bool compare_exchange_strong(T*& expc, T* desr,
238                                 memory_order m = memory_order_seq_cst) noexcept;
239    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
240    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
241    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
242    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
243
244    T* operator++(int) volatile noexcept;
245    T* operator++(int) noexcept;
246    T* operator--(int) volatile noexcept;
247    T* operator--(int) noexcept;
248    T* operator++() volatile noexcept;
249    T* operator++() noexcept;
250    T* operator--() volatile noexcept;
251    T* operator--() noexcept;
252    T* operator+=(ptrdiff_t op) volatile noexcept;
253    T* operator+=(ptrdiff_t op) noexcept;
254    T* operator-=(ptrdiff_t op) volatile noexcept;
255    T* operator-=(ptrdiff_t op) noexcept;
256
257    void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept;
258    void wait(T*, memory_order = memory_order::seq_cst) const noexcept;
259    void notify_one() volatile noexcept;
260    void notify_one() noexcept;
261    void notify_all() volatile noexcept;
262    void notify_all() noexcept;
263};
264
265
266// [atomics.nonmembers], non-member functions
267template<class T>
268  bool atomic_is_lock_free(const volatile atomic<T>*) noexcept;
269template<class T>
270  bool atomic_is_lock_free(const atomic<T>*) noexcept;
271template<class T>
272  void atomic_store(volatile atomic<T>*, atomic<T>::value_type) noexcept;
273template<class T>
274  void atomic_store(atomic<T>*, atomic<T>::value_type) noexcept;
275template<class T>
276  void atomic_store_explicit(volatile atomic<T>*, atomic<T>::value_type,
277                             memory_order) noexcept;
278template<class T>
279  void atomic_store_explicit(atomic<T>*, atomic<T>::value_type,
280                             memory_order) noexcept;
281template<class T>
282  T atomic_load(const volatile atomic<T>*) noexcept;
283template<class T>
284  T atomic_load(const atomic<T>*) noexcept;
285template<class T>
286  T atomic_load_explicit(const volatile atomic<T>*, memory_order) noexcept;
287template<class T>
288  T atomic_load_explicit(const atomic<T>*, memory_order) noexcept;
289template<class T>
290  T atomic_exchange(volatile atomic<T>*, atomic<T>::value_type) noexcept;
291template<class T>
292  T atomic_exchange(atomic<T>*, atomic<T>::value_type) noexcept;
293template<class T>
294  T atomic_exchange_explicit(volatile atomic<T>*, atomic<T>::value_type,
295                             memory_order) noexcept;
296template<class T>
297  T atomic_exchange_explicit(atomic<T>*, atomic<T>::value_type,
298                             memory_order) noexcept;
299template<class T>
300  bool atomic_compare_exchange_weak(volatile atomic<T>*, atomic<T>::value_type*,
301                                    atomic<T>::value_type) noexcept;
302template<class T>
303  bool atomic_compare_exchange_weak(atomic<T>*, atomic<T>::value_type*,
304                                    atomic<T>::value_type) noexcept;
305template<class T>
306  bool atomic_compare_exchange_strong(volatile atomic<T>*, atomic<T>::value_type*,
307                                      atomic<T>::value_type) noexcept;
308template<class T>
309  bool atomic_compare_exchange_strong(atomic<T>*, atomic<T>::value_type*,
310                                      atomic<T>::value_type) noexcept;
311template<class T>
312  bool atomic_compare_exchange_weak_explicit(volatile atomic<T>*, atomic<T>::value_type*,
313                                             atomic<T>::value_type,
314                                             memory_order, memory_order) noexcept;
315template<class T>
316  bool atomic_compare_exchange_weak_explicit(atomic<T>*, atomic<T>::value_type*,
317                                             atomic<T>::value_type,
318                                             memory_order, memory_order) noexcept;
319template<class T>
320  bool atomic_compare_exchange_strong_explicit(volatile atomic<T>*, atomic<T>::value_type*,
321                                               atomic<T>::value_type,
322                                               memory_order, memory_order) noexcept;
323template<class T>
324  bool atomic_compare_exchange_strong_explicit(atomic<T>*, atomic<T>::value_type*,
325                                               atomic<T>::value_type,
326                                               memory_order, memory_order) noexcept;
327
328template<class T>
329  T atomic_fetch_add(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
330template<class T>
331  T atomic_fetch_add(atomic<T>*, atomic<T>::difference_type) noexcept;
332template<class T>
333  T atomic_fetch_add_explicit(volatile atomic<T>*, atomic<T>::difference_type,
334                              memory_order) noexcept;
335template<class T>
336  T atomic_fetch_add_explicit(atomic<T>*, atomic<T>::difference_type,
337                              memory_order) noexcept;
338template<class T>
339  T atomic_fetch_sub(volatile atomic<T>*, atomic<T>::difference_type) noexcept;
340template<class T>
341  T atomic_fetch_sub(atomic<T>*, atomic<T>::difference_type) noexcept;
342template<class T>
343  T atomic_fetch_sub_explicit(volatile atomic<T>*, atomic<T>::difference_type,
344                              memory_order) noexcept;
345template<class T>
346  T atomic_fetch_sub_explicit(atomic<T>*, atomic<T>::difference_type,
347                              memory_order) noexcept;
348template<class T>
349  T atomic_fetch_and(volatile atomic<T>*, atomic<T>::value_type) noexcept;
350template<class T>
351  T atomic_fetch_and(atomic<T>*, atomic<T>::value_type) noexcept;
352template<class T>
353  T atomic_fetch_and_explicit(volatile atomic<T>*, atomic<T>::value_type,
354                              memory_order) noexcept;
355template<class T>
356  T atomic_fetch_and_explicit(atomic<T>*, atomic<T>::value_type,
357                              memory_order) noexcept;
358template<class T>
359  T atomic_fetch_or(volatile atomic<T>*, atomic<T>::value_type) noexcept;
360template<class T>
361  T atomic_fetch_or(atomic<T>*, atomic<T>::value_type) noexcept;
362template<class T>
363  T atomic_fetch_or_explicit(volatile atomic<T>*, atomic<T>::value_type,
364                             memory_order) noexcept;
365template<class T>
366  T atomic_fetch_or_explicit(atomic<T>*, atomic<T>::value_type,
367                             memory_order) noexcept;
368template<class T>
369  T atomic_fetch_xor(volatile atomic<T>*, atomic<T>::value_type) noexcept;
370template<class T>
371  T atomic_fetch_xor(atomic<T>*, atomic<T>::value_type) noexcept;
372template<class T>
373  T atomic_fetch_xor_explicit(volatile atomic<T>*, atomic<T>::value_type,
374                              memory_order) noexcept;
375template<class T>
376  T atomic_fetch_xor_explicit(atomic<T>*, atomic<T>::value_type,
377                              memory_order) noexcept;
378
379template<class T>
380  void atomic_wait(const volatile atomic<T>*, atomic<T>::value_type);
381template<class T>
382  void atomic_wait(const atomic<T>*, atomic<T>::value_type);
383template<class T>
384  void atomic_wait_explicit(const volatile atomic<T>*, atomic<T>::value_type,
385                            memory_order);
386template<class T>
387  void atomic_wait_explicit(const atomic<T>*, atomic<T>::value_type,
388                            memory_order);
389template<class T>
390  void atomic_notify_one(volatile atomic<T>*);
391template<class T>
392  void atomic_notify_one(atomic<T>*);
393template<class T>
394  void atomic_notify_all(volatile atomic<T>*);
395template<class T>
396  void atomic_notify_all(atomic<T>*);
397
398// Atomics for standard typedef types
399
400typedef atomic<bool>               atomic_bool;
401typedef atomic<char>               atomic_char;
402typedef atomic<signed char>        atomic_schar;
403typedef atomic<unsigned char>      atomic_uchar;
404typedef atomic<short>              atomic_short;
405typedef atomic<unsigned short>     atomic_ushort;
406typedef atomic<int>                atomic_int;
407typedef atomic<unsigned int>       atomic_uint;
408typedef atomic<long>               atomic_long;
409typedef atomic<unsigned long>      atomic_ulong;
410typedef atomic<long long>          atomic_llong;
411typedef atomic<unsigned long long> atomic_ullong;
412typedef atomic<char8_t>            atomic_char8_t; // C++20
413typedef atomic<char16_t>           atomic_char16_t;
414typedef atomic<char32_t>           atomic_char32_t;
415typedef atomic<wchar_t>            atomic_wchar_t;
416
417typedef atomic<int_least8_t>   atomic_int_least8_t;
418typedef atomic<uint_least8_t>  atomic_uint_least8_t;
419typedef atomic<int_least16_t>  atomic_int_least16_t;
420typedef atomic<uint_least16_t> atomic_uint_least16_t;
421typedef atomic<int_least32_t>  atomic_int_least32_t;
422typedef atomic<uint_least32_t> atomic_uint_least32_t;
423typedef atomic<int_least64_t>  atomic_int_least64_t;
424typedef atomic<uint_least64_t> atomic_uint_least64_t;
425
426typedef atomic<int_fast8_t>   atomic_int_fast8_t;
427typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
428typedef atomic<int_fast16_t>  atomic_int_fast16_t;
429typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
430typedef atomic<int_fast32_t>  atomic_int_fast32_t;
431typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
432typedef atomic<int_fast64_t>  atomic_int_fast64_t;
433typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
434
435typedef atomic<int8_t>   atomic_int8_t;
436typedef atomic<uint8_t>  atomic_uint8_t;
437typedef atomic<int16_t>  atomic_int16_t;
438typedef atomic<uint16_t> atomic_uint16_t;
439typedef atomic<int32_t>  atomic_int32_t;
440typedef atomic<uint32_t> atomic_uint32_t;
441typedef atomic<int64_t>  atomic_int64_t;
442typedef atomic<uint64_t> atomic_uint64_t;
443
444typedef atomic<intptr_t>  atomic_intptr_t;
445typedef atomic<uintptr_t> atomic_uintptr_t;
446typedef atomic<size_t>    atomic_size_t;
447typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
448typedef atomic<intmax_t>  atomic_intmax_t;
449typedef atomic<uintmax_t> atomic_uintmax_t;
450
451// flag type and operations
452
453typedef struct atomic_flag
454{
455    atomic_flag() noexcept = default; // until C++20
456    constexpr atomic_flag() noexcept; // since C++20
457    atomic_flag(const atomic_flag&) = delete;
458    atomic_flag& operator=(const atomic_flag&) = delete;
459    atomic_flag& operator=(const atomic_flag&) volatile = delete;
460
461    bool test(memory_order m = memory_order_seq_cst) volatile noexcept;
462    bool test(memory_order m = memory_order_seq_cst) noexcept;
463    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
464    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
465    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
466    void clear(memory_order m = memory_order_seq_cst) noexcept;
467
468    void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept;
469    void wait(bool, memory_order = memory_order::seq_cst) const noexcept;
470    void notify_one() volatile noexcept;
471    void notify_one() noexcept;
472    void notify_all() volatile noexcept;
473    void notify_all() noexcept;
474} atomic_flag;
475
476bool atomic_flag_test(volatile atomic_flag* obj) noexcept;
477bool atomic_flag_test(atomic_flag* obj) noexcept;
478bool atomic_flag_test_explicit(volatile atomic_flag* obj,
479                               memory_order m) noexcept;
480bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept;
481bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
482bool atomic_flag_test_and_set(atomic_flag* obj) noexcept;
483bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
484                                       memory_order m) noexcept;
485bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
486void atomic_flag_clear(volatile atomic_flag* obj) noexcept;
487void atomic_flag_clear(atomic_flag* obj) noexcept;
488void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
489void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
490
491void atomic_wait(const volatile atomic_flag* obj, T old) noexcept;
492void atomic_wait(const atomic_flag* obj, T old) noexcept;
493void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept;
494void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept;
495void atomic_one(volatile atomic_flag* obj) noexcept;
496void atomic_one(atomic_flag* obj) noexcept;
497void atomic_all(volatile atomic_flag* obj) noexcept;
498void atomic_all(atomic_flag* obj) noexcept;
499
500// fences
501
502void atomic_thread_fence(memory_order m) noexcept;
503void atomic_signal_fence(memory_order m) noexcept;
504
505// deprecated
506
507template <class T>
508  void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept;
509
510template <class T>
511  void atomic_init(atomic<T>* obj, atomic<T>::value_type desr) noexcept;
512
513#define ATOMIC_VAR_INIT(value) see below
514
515#define ATOMIC_FLAG_INIT see below
516
517}  // std
518
519*/
520
521#include <__assert> // all public C++ headers provide the assertion handler
522#include <__availability>
523#include <__chrono/duration.h>
524#include <__config>
525#include <__thread/poll_with_backoff.h>
526#include <__thread/timed_backoff_policy.h>
527#include <cstddef>
528#include <cstdint>
529#include <cstring>
530#include <type_traits>
531#include <version>
532
533#ifndef _LIBCPP_HAS_NO_THREADS
534# include <__threading_support>
535#endif
536
537#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
538#  pragma GCC system_header
539#endif
540
541#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
542# error <atomic> is not implemented
543#endif
544#ifdef kill_dependency
545# error <atomic> is incompatible with <stdatomic.h> before C++23. Please compile with -std=c++23.
546#endif
547
548#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
549  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
550                           __m == memory_order_acquire || \
551                           __m == memory_order_acq_rel,   \
552                        "memory order argument to atomic operation is invalid")
553
554#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
555  _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
556                           __m == memory_order_acq_rel,   \
557                        "memory order argument to atomic operation is invalid")
558
559#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
560  _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
561                           __f == memory_order_acq_rel,   \
562                        "memory order argument to atomic operation is invalid")
563
564_LIBCPP_BEGIN_NAMESPACE_STD
565
566// Figure out what the underlying type for `memory_order` would be if it were
567// declared as an unscoped enum (accounting for -fshort-enums). Use this result
568// to pin the underlying type in C++20.
569enum __legacy_memory_order {
570    __mo_relaxed,
571    __mo_consume,
572    __mo_acquire,
573    __mo_release,
574    __mo_acq_rel,
575    __mo_seq_cst
576};
577
578typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
579
580#if _LIBCPP_STD_VER > 17
581
582enum class memory_order : __memory_order_underlying_t {
583  relaxed = __mo_relaxed,
584  consume = __mo_consume,
585  acquire = __mo_acquire,
586  release = __mo_release,
587  acq_rel = __mo_acq_rel,
588  seq_cst = __mo_seq_cst
589};
590
591inline constexpr auto memory_order_relaxed = memory_order::relaxed;
592inline constexpr auto memory_order_consume = memory_order::consume;
593inline constexpr auto memory_order_acquire = memory_order::acquire;
594inline constexpr auto memory_order_release = memory_order::release;
595inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
596inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
597
598#else
599
600typedef enum memory_order {
601  memory_order_relaxed = __mo_relaxed,
602  memory_order_consume = __mo_consume,
603  memory_order_acquire = __mo_acquire,
604  memory_order_release = __mo_release,
605  memory_order_acq_rel = __mo_acq_rel,
606  memory_order_seq_cst = __mo_seq_cst,
607} memory_order;
608
609#endif // _LIBCPP_STD_VER > 17
610
611template <typename _Tp> _LIBCPP_INLINE_VISIBILITY
612bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) {
613    return _VSTD::memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0;
614}
615
616static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
617  "unexpected underlying type for std::memory_order");
618
619#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
620    defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
621
622// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
623// the default operator= in an object is not volatile, a byte-by-byte copy
624// is required.
625template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
626typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
627__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
628  __a_value = __val;
629}
630template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
631typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
632__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
633  volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
634  volatile char* __end = __to + sizeof(_Tp);
635  volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
636  while (__to != __end)
637    *__to++ = *__from++;
638}
639
640#endif
641
642#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
643
644template <typename _Tp>
645struct __cxx_atomic_base_impl {
646
647  _LIBCPP_INLINE_VISIBILITY
648#ifndef _LIBCPP_CXX03_LANG
649    __cxx_atomic_base_impl() _NOEXCEPT = default;
650#else
651    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
652#endif // _LIBCPP_CXX03_LANG
653  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
654    : __a_value(value) {}
655  _Tp __a_value;
656};
657
658_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
659  // Avoid switch statement to make this a constexpr.
660  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
661         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
662          (__order == memory_order_release ? __ATOMIC_RELEASE:
663           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
664            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
665              __ATOMIC_CONSUME))));
666}
667
668_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
669  // Avoid switch statement to make this a constexpr.
670  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
671         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
672          (__order == memory_order_release ? __ATOMIC_RELAXED:
673           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
674            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
675              __ATOMIC_CONSUME))));
676}
677
678template <typename _Tp>
679_LIBCPP_INLINE_VISIBILITY
680void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
681  __cxx_atomic_assign_volatile(__a->__a_value, __val);
682}
683
684template <typename _Tp>
685_LIBCPP_INLINE_VISIBILITY
686void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val) {
687  __a->__a_value = __val;
688}
689
690_LIBCPP_INLINE_VISIBILITY inline
691void __cxx_atomic_thread_fence(memory_order __order) {
692  __atomic_thread_fence(__to_gcc_order(__order));
693}
694
695_LIBCPP_INLINE_VISIBILITY inline
696void __cxx_atomic_signal_fence(memory_order __order) {
697  __atomic_signal_fence(__to_gcc_order(__order));
698}
699
700template <typename _Tp>
701_LIBCPP_INLINE_VISIBILITY
702void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
703                        memory_order __order) {
704  __atomic_store(&__a->__a_value, &__val,
705                 __to_gcc_order(__order));
706}
707
708template <typename _Tp>
709_LIBCPP_INLINE_VISIBILITY
710void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a,  _Tp __val,
711                        memory_order __order) {
712  __atomic_store(&__a->__a_value, &__val,
713                 __to_gcc_order(__order));
714}
715
716template <typename _Tp>
717_LIBCPP_INLINE_VISIBILITY
718_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
719                      memory_order __order) {
720  _Tp __ret;
721  __atomic_load(&__a->__a_value, &__ret,
722                __to_gcc_order(__order));
723  return __ret;
724}
725
726template <typename _Tp>
727_LIBCPP_INLINE_VISIBILITY
728_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
729  _Tp __ret;
730  __atomic_load(&__a->__a_value, &__ret,
731                __to_gcc_order(__order));
732  return __ret;
733}
734
735template <typename _Tp>
736_LIBCPP_INLINE_VISIBILITY
737_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
738                          _Tp __value, memory_order __order) {
739  _Tp __ret;
740  __atomic_exchange(&__a->__a_value, &__value, &__ret,
741                    __to_gcc_order(__order));
742  return __ret;
743}
744
745template <typename _Tp>
746_LIBCPP_INLINE_VISIBILITY
747_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
748                          memory_order __order) {
749  _Tp __ret;
750  __atomic_exchange(&__a->__a_value, &__value, &__ret,
751                    __to_gcc_order(__order));
752  return __ret;
753}
754
755template <typename _Tp>
756_LIBCPP_INLINE_VISIBILITY
757bool __cxx_atomic_compare_exchange_strong(
758    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
759    memory_order __success, memory_order __failure) {
760  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
761                                   false,
762                                   __to_gcc_order(__success),
763                                   __to_gcc_failure_order(__failure));
764}
765
766template <typename _Tp>
767_LIBCPP_INLINE_VISIBILITY
768bool __cxx_atomic_compare_exchange_strong(
769    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
770    memory_order __failure) {
771  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
772                                   false,
773                                   __to_gcc_order(__success),
774                                   __to_gcc_failure_order(__failure));
775}
776
777template <typename _Tp>
778_LIBCPP_INLINE_VISIBILITY
779bool __cxx_atomic_compare_exchange_weak(
780    volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
781    memory_order __success, memory_order __failure) {
782  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
783                                   true,
784                                   __to_gcc_order(__success),
785                                   __to_gcc_failure_order(__failure));
786}
787
788template <typename _Tp>
789_LIBCPP_INLINE_VISIBILITY
790bool __cxx_atomic_compare_exchange_weak(
791    __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
792    memory_order __failure) {
793  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
794                                   true,
795                                   __to_gcc_order(__success),
796                                   __to_gcc_failure_order(__failure));
797}
798
799template <typename _Tp>
800struct __skip_amt { enum {value = 1}; };
801
802template <typename _Tp>
803struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
804
805// FIXME: Haven't figured out what the spec says about using arrays with
806// atomic_fetch_add. Force a failure rather than creating bad behavior.
807template <typename _Tp>
808struct __skip_amt<_Tp[]> { };
809template <typename _Tp, int n>
810struct __skip_amt<_Tp[n]> { };
811
812template <typename _Tp, typename _Td>
813_LIBCPP_INLINE_VISIBILITY
814_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
815                           _Td __delta, memory_order __order) {
816  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
817                            __to_gcc_order(__order));
818}
819
820template <typename _Tp, typename _Td>
821_LIBCPP_INLINE_VISIBILITY
822_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
823                           memory_order __order) {
824  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
825                            __to_gcc_order(__order));
826}
827
828template <typename _Tp, typename _Td>
829_LIBCPP_INLINE_VISIBILITY
830_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
831                           _Td __delta, memory_order __order) {
832  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
833                            __to_gcc_order(__order));
834}
835
836template <typename _Tp, typename _Td>
837_LIBCPP_INLINE_VISIBILITY
838_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
839                           memory_order __order) {
840  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
841                            __to_gcc_order(__order));
842}
843
844template <typename _Tp>
845_LIBCPP_INLINE_VISIBILITY
846_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
847                           _Tp __pattern, memory_order __order) {
848  return __atomic_fetch_and(&__a->__a_value, __pattern,
849                            __to_gcc_order(__order));
850}
851
852template <typename _Tp>
853_LIBCPP_INLINE_VISIBILITY
854_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
855                           _Tp __pattern, memory_order __order) {
856  return __atomic_fetch_and(&__a->__a_value, __pattern,
857                            __to_gcc_order(__order));
858}
859
860template <typename _Tp>
861_LIBCPP_INLINE_VISIBILITY
862_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
863                          _Tp __pattern, memory_order __order) {
864  return __atomic_fetch_or(&__a->__a_value, __pattern,
865                           __to_gcc_order(__order));
866}
867
868template <typename _Tp>
869_LIBCPP_INLINE_VISIBILITY
870_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
871                          memory_order __order) {
872  return __atomic_fetch_or(&__a->__a_value, __pattern,
873                           __to_gcc_order(__order));
874}
875
876template <typename _Tp>
877_LIBCPP_INLINE_VISIBILITY
878_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
879                           _Tp __pattern, memory_order __order) {
880  return __atomic_fetch_xor(&__a->__a_value, __pattern,
881                            __to_gcc_order(__order));
882}
883
884template <typename _Tp>
885_LIBCPP_INLINE_VISIBILITY
886_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
887                           memory_order __order) {
888  return __atomic_fetch_xor(&__a->__a_value, __pattern,
889                            __to_gcc_order(__order));
890}
891
892#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
893
894#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
895
896template <typename _Tp>
897struct __cxx_atomic_base_impl {
898
899  _LIBCPP_INLINE_VISIBILITY
900#ifndef _LIBCPP_CXX03_LANG
901    __cxx_atomic_base_impl() _NOEXCEPT = default;
902#else
903    __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
904#endif // _LIBCPP_CXX03_LANG
905  _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
906    : __a_value(value) {}
907  _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
908};
909
910#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
911
912_LIBCPP_INLINE_VISIBILITY inline
913void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
914    __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
915}
916
917_LIBCPP_INLINE_VISIBILITY inline
918void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
919    __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
920}
921
922template<class _Tp>
923_LIBCPP_INLINE_VISIBILITY
924void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
925    __c11_atomic_init(&__a->__a_value, __val);
926}
927template<class _Tp>
928_LIBCPP_INLINE_VISIBILITY
929void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
930    __c11_atomic_init(&__a->__a_value, __val);
931}
932
933template<class _Tp>
934_LIBCPP_INLINE_VISIBILITY
935void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
936    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
937}
938template<class _Tp>
939_LIBCPP_INLINE_VISIBILITY
940void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
941    __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
942}
943
944template<class _Tp>
945_LIBCPP_INLINE_VISIBILITY
946_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
947    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
948    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
949}
950template<class _Tp>
951_LIBCPP_INLINE_VISIBILITY
952_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
953    using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
954    return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
955}
956
957template<class _Tp>
958_LIBCPP_INLINE_VISIBILITY
959_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
960    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
961}
962template<class _Tp>
963_LIBCPP_INLINE_VISIBILITY
964_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
965    return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
966}
967
968_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) {
969  // Avoid switch statement to make this a constexpr.
970  return __order == memory_order_release ? memory_order_relaxed:
971         (__order == memory_order_acq_rel ? memory_order_acquire:
972             __order);
973}
974
975template<class _Tp>
976_LIBCPP_INLINE_VISIBILITY
977bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
978    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
979}
980template<class _Tp>
981_LIBCPP_INLINE_VISIBILITY
982bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
983    return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
984}
985
986template<class _Tp>
987_LIBCPP_INLINE_VISIBILITY
988bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
989    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
990}
991template<class _Tp>
992_LIBCPP_INLINE_VISIBILITY
993bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
994    return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value,  static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure)));
995}
996
997template<class _Tp>
998_LIBCPP_INLINE_VISIBILITY
999_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1000    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1001}
1002template<class _Tp>
1003_LIBCPP_INLINE_VISIBILITY
1004_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1005    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1006}
1007
1008template<class _Tp>
1009_LIBCPP_INLINE_VISIBILITY
1010_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1011    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1012}
1013template<class _Tp>
1014_LIBCPP_INLINE_VISIBILITY
1015_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1016    return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1017}
1018
1019template<class _Tp>
1020_LIBCPP_INLINE_VISIBILITY
1021_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1022    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1023}
1024template<class _Tp>
1025_LIBCPP_INLINE_VISIBILITY
1026_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
1027    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1028}
1029template<class _Tp>
1030_LIBCPP_INLINE_VISIBILITY
1031_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1032    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1033}
1034template<class _Tp>
1035_LIBCPP_INLINE_VISIBILITY
1036_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
1037    return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
1038}
1039
1040template<class _Tp>
1041_LIBCPP_INLINE_VISIBILITY
1042_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1043    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1044}
1045template<class _Tp>
1046_LIBCPP_INLINE_VISIBILITY
1047_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1048    return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1049}
1050
1051template<class _Tp>
1052_LIBCPP_INLINE_VISIBILITY
1053_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1054    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1055}
1056template<class _Tp>
1057_LIBCPP_INLINE_VISIBILITY
1058_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1059    return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1060}
1061
1062template<class _Tp>
1063_LIBCPP_INLINE_VISIBILITY
1064_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1065    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1066}
1067template<class _Tp>
1068_LIBCPP_INLINE_VISIBILITY
1069_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
1070    return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
1071}
1072
1073#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
1074
1075template <class _Tp>
1076_LIBCPP_INLINE_VISIBILITY
1077_Tp kill_dependency(_Tp __y) _NOEXCEPT
1078{
1079    return __y;
1080}
1081
1082#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
1083# define ATOMIC_BOOL_LOCK_FREE      __CLANG_ATOMIC_BOOL_LOCK_FREE
1084# define ATOMIC_CHAR_LOCK_FREE      __CLANG_ATOMIC_CHAR_LOCK_FREE
1085#ifndef _LIBCPP_HAS_NO_CHAR8_T
1086# define ATOMIC_CHAR8_T_LOCK_FREE   __CLANG_ATOMIC_CHAR8_T_LOCK_FREE
1087#endif
1088# define ATOMIC_CHAR16_T_LOCK_FREE  __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
1089# define ATOMIC_CHAR32_T_LOCK_FREE  __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
1090# define ATOMIC_WCHAR_T_LOCK_FREE   __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
1091# define ATOMIC_SHORT_LOCK_FREE     __CLANG_ATOMIC_SHORT_LOCK_FREE
1092# define ATOMIC_INT_LOCK_FREE       __CLANG_ATOMIC_INT_LOCK_FREE
1093# define ATOMIC_LONG_LOCK_FREE      __CLANG_ATOMIC_LONG_LOCK_FREE
1094# define ATOMIC_LLONG_LOCK_FREE     __CLANG_ATOMIC_LLONG_LOCK_FREE
1095# define ATOMIC_POINTER_LOCK_FREE   __CLANG_ATOMIC_POINTER_LOCK_FREE
1096#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
1097# define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
1098# define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
1099#ifndef _LIBCPP_HAS_NO_CHAR8_T
1100# define ATOMIC_CHAR8_T_LOCK_FREE   __GCC_ATOMIC_CHAR8_T_LOCK_FREE
1101#endif
1102# define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
1103# define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
1104# define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
1105# define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
1106# define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
1107# define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
1108# define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
1109# define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
1110#endif
1111
1112#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1113
1114template<typename _Tp>
1115struct __cxx_atomic_lock_impl {
1116
1117  _LIBCPP_INLINE_VISIBILITY
1118  __cxx_atomic_lock_impl() _NOEXCEPT
1119    : __a_value(), __a_lock(0) {}
1120  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
1121  __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
1122    : __a_value(value), __a_lock(0) {}
1123
1124  _Tp __a_value;
1125  mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
1126
1127  _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
1128    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1129        /*spin*/;
1130  }
1131  _LIBCPP_INLINE_VISIBILITY void __lock() const {
1132    while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
1133        /*spin*/;
1134  }
1135  _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
1136    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1137  }
1138  _LIBCPP_INLINE_VISIBILITY void __unlock() const {
1139    __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
1140  }
1141  _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
1142    __lock();
1143    _Tp __old;
1144    __cxx_atomic_assign_volatile(__old, __a_value);
1145    __unlock();
1146    return __old;
1147  }
1148  _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
1149    __lock();
1150    _Tp __old = __a_value;
1151    __unlock();
1152    return __old;
1153  }
1154};
1155
1156template <typename _Tp>
1157_LIBCPP_INLINE_VISIBILITY
1158void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1159  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1160}
1161template <typename _Tp>
1162_LIBCPP_INLINE_VISIBILITY
1163void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val) {
1164  __a->__a_value = __val;
1165}
1166
1167template <typename _Tp>
1168_LIBCPP_INLINE_VISIBILITY
1169void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1170  __a->__lock();
1171  __cxx_atomic_assign_volatile(__a->__a_value, __val);
1172  __a->__unlock();
1173}
1174template <typename _Tp>
1175_LIBCPP_INLINE_VISIBILITY
1176void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a,  _Tp __val, memory_order) {
1177  __a->__lock();
1178  __a->__a_value = __val;
1179  __a->__unlock();
1180}
1181
1182template <typename _Tp>
1183_LIBCPP_INLINE_VISIBILITY
1184_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1185  return __a->__read();
1186}
1187template <typename _Tp>
1188_LIBCPP_INLINE_VISIBILITY
1189_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
1190  return __a->__read();
1191}
1192
1193template <typename _Tp>
1194_LIBCPP_INLINE_VISIBILITY
1195_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1196  __a->__lock();
1197  _Tp __old;
1198  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1199  __cxx_atomic_assign_volatile(__a->__a_value, __value);
1200  __a->__unlock();
1201  return __old;
1202}
1203template <typename _Tp>
1204_LIBCPP_INLINE_VISIBILITY
1205_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
1206  __a->__lock();
1207  _Tp __old = __a->__a_value;
1208  __a->__a_value = __value;
1209  __a->__unlock();
1210  return __old;
1211}
1212
1213template <typename _Tp>
1214_LIBCPP_INLINE_VISIBILITY
1215bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1216                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1217  _Tp __temp;
1218  __a->__lock();
1219  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1220  bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1221  if(__ret)
1222    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1223  else
1224    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1225  __a->__unlock();
1226  return __ret;
1227}
1228template <typename _Tp>
1229_LIBCPP_INLINE_VISIBILITY
1230bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
1231                                          _Tp* __expected, _Tp __value, memory_order, memory_order) {
1232  __a->__lock();
1233  bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1234  if(__ret)
1235    _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1236  else
1237    _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1238  __a->__unlock();
1239  return __ret;
1240}
1241
1242template <typename _Tp>
1243_LIBCPP_INLINE_VISIBILITY
1244bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1245                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1246  _Tp __temp;
1247  __a->__lock();
1248  __cxx_atomic_assign_volatile(__temp, __a->__a_value);
1249  bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0);
1250  if(__ret)
1251    __cxx_atomic_assign_volatile(__a->__a_value, __value);
1252  else
1253    __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
1254  __a->__unlock();
1255  return __ret;
1256}
1257template <typename _Tp>
1258_LIBCPP_INLINE_VISIBILITY
1259bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
1260                                        _Tp* __expected, _Tp __value, memory_order, memory_order) {
1261  __a->__lock();
1262  bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0);
1263  if(__ret)
1264    _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp));
1265  else
1266    _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp));
1267  __a->__unlock();
1268  return __ret;
1269}
1270
1271template <typename _Tp, typename _Td>
1272_LIBCPP_INLINE_VISIBILITY
1273_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1274                           _Td __delta, memory_order) {
1275  __a->__lock();
1276  _Tp __old;
1277  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1278  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
1279  __a->__unlock();
1280  return __old;
1281}
1282template <typename _Tp, typename _Td>
1283_LIBCPP_INLINE_VISIBILITY
1284_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
1285                           _Td __delta, memory_order) {
1286  __a->__lock();
1287  _Tp __old = __a->__a_value;
1288  __a->__a_value += __delta;
1289  __a->__unlock();
1290  return __old;
1291}
1292
1293template <typename _Tp, typename _Td>
1294_LIBCPP_INLINE_VISIBILITY
1295_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
1296                           ptrdiff_t __delta, memory_order) {
1297  __a->__lock();
1298  _Tp* __old;
1299  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1300  __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
1301  __a->__unlock();
1302  return __old;
1303}
1304template <typename _Tp, typename _Td>
1305_LIBCPP_INLINE_VISIBILITY
1306_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
1307                           ptrdiff_t __delta, memory_order) {
1308  __a->__lock();
1309  _Tp* __old = __a->__a_value;
1310  __a->__a_value += __delta;
1311  __a->__unlock();
1312  return __old;
1313}
1314
1315template <typename _Tp, typename _Td>
1316_LIBCPP_INLINE_VISIBILITY
1317_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1318                           _Td __delta, memory_order) {
1319  __a->__lock();
1320  _Tp __old;
1321  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1322  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
1323  __a->__unlock();
1324  return __old;
1325}
1326template <typename _Tp, typename _Td>
1327_LIBCPP_INLINE_VISIBILITY
1328_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
1329                           _Td __delta, memory_order) {
1330  __a->__lock();
1331  _Tp __old = __a->__a_value;
1332  __a->__a_value -= __delta;
1333  __a->__unlock();
1334  return __old;
1335}
1336
1337template <typename _Tp>
1338_LIBCPP_INLINE_VISIBILITY
1339_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1340                           _Tp __pattern, memory_order) {
1341  __a->__lock();
1342  _Tp __old;
1343  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1344  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
1345  __a->__unlock();
1346  return __old;
1347}
1348template <typename _Tp>
1349_LIBCPP_INLINE_VISIBILITY
1350_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
1351                           _Tp __pattern, memory_order) {
1352  __a->__lock();
1353  _Tp __old = __a->__a_value;
1354  __a->__a_value &= __pattern;
1355  __a->__unlock();
1356  return __old;
1357}
1358
1359template <typename _Tp>
1360_LIBCPP_INLINE_VISIBILITY
1361_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1362                          _Tp __pattern, memory_order) {
1363  __a->__lock();
1364  _Tp __old;
1365  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1366  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
1367  __a->__unlock();
1368  return __old;
1369}
1370template <typename _Tp>
1371_LIBCPP_INLINE_VISIBILITY
1372_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
1373                          _Tp __pattern, memory_order) {
1374  __a->__lock();
1375  _Tp __old = __a->__a_value;
1376  __a->__a_value |= __pattern;
1377  __a->__unlock();
1378  return __old;
1379}
1380
1381template <typename _Tp>
1382_LIBCPP_INLINE_VISIBILITY
1383_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
1384                           _Tp __pattern, memory_order) {
1385  __a->__lock();
1386  _Tp __old;
1387  __cxx_atomic_assign_volatile(__old, __a->__a_value);
1388  __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
1389  __a->__unlock();
1390  return __old;
1391}
1392template <typename _Tp>
1393_LIBCPP_INLINE_VISIBILITY
1394_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
1395                           _Tp __pattern, memory_order) {
1396  __a->__lock();
1397  _Tp __old = __a->__a_value;
1398  __a->__a_value ^= __pattern;
1399  __a->__unlock();
1400  return __old;
1401}
1402
1403#ifdef __cpp_lib_atomic_is_always_lock_free
1404
1405template<typename _Tp> struct __cxx_is_always_lock_free {
1406    enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
1407
1408#else
1409
1410template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
1411// Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
1412template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
1413template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1414template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1415template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
1416#ifndef _LIBCPP_HAS_NO_CHAR8_T
1417template<> struct __cxx_is_always_lock_free<char8_t> { enum { __value = 2 == ATOMIC_CHAR8_T_LOCK_FREE }; };
1418#endif
1419template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
1420template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
1421#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
1422template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
1423#endif
1424template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1425template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
1426template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1427template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
1428template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1429template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
1430template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1431template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
1432template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1433template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
1434
1435#endif //__cpp_lib_atomic_is_always_lock_free
1436
1437template <typename _Tp,
1438          typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
1439                                                __cxx_atomic_base_impl<_Tp>,
1440                                                __cxx_atomic_lock_impl<_Tp> >::type>
1441#else
1442template <typename _Tp,
1443          typename _Base = __cxx_atomic_base_impl<_Tp> >
1444#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
1445struct __cxx_atomic_impl : public _Base {
1446    static_assert(is_trivially_copyable<_Tp>::value,
1447      "std::atomic<T> requires that 'T' be a trivially copyable type");
1448
1449  _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT = default;
1450  _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT
1451    : _Base(value) {}
1452};
1453
1454#if defined(__linux__) || (defined(_AIX) && !defined(__64BIT__))
1455    using __cxx_contention_t = int32_t;
1456#else
1457    using __cxx_contention_t = int64_t;
1458#endif // __linux__ || (_AIX && !__64BIT__)
1459
1460using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>;
1461
1462#if defined(_LIBCPP_HAS_NO_THREADS)
1463#   define _LIBCPP_HAS_NO_PLATFORM_WAIT
1464#endif
1465
1466// TODO:
1467// _LIBCPP_HAS_NO_PLATFORM_WAIT is currently a "dead" macro, in the sense that
1468// it is not tied anywhere into the build system or even documented. We should
1469// clean it up because it is technically never defined except when threads are
1470// disabled. We should clean it up in its own changeset in case we break "bad"
1471// users.
1472#ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT
1473
1474_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*);
1475_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*);
1476_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*);
1477_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t);
1478
1479_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*);
1480_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*);
1481_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*);
1482_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t);
1483
1484template <class _Atp, class _Fn>
1485struct __libcpp_atomic_wait_backoff_impl {
1486    _Atp* __a;
1487    _Fn __test_fn;
1488    _LIBCPP_AVAILABILITY_SYNC
1489    _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const
1490    {
1491        if(__elapsed > chrono::microseconds(64))
1492        {
1493            auto const __monitor = __libcpp_atomic_monitor(__a);
1494            if(__test_fn())
1495                return true;
1496            __libcpp_atomic_wait(__a, __monitor);
1497        }
1498        else if(__elapsed > chrono::microseconds(4))
1499            __libcpp_thread_yield();
1500        else
1501            {} // poll
1502        return false;
1503    }
1504};
1505
1506template <class _Atp, class _Fn>
1507_LIBCPP_AVAILABILITY_SYNC
1508_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn)
1509{
1510    __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn};
1511    return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn);
1512}
1513
1514#else // _LIBCPP_HAS_NO_PLATFORM_WAIT
1515
1516template <class _Tp>
1517_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { }
1518template <class _Tp>
1519_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { }
1520template <class _Atp, class _Fn>
1521_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn)
1522{
1523#if defined(_LIBCPP_HAS_NO_THREADS)
1524    using _Policy = __spinning_backoff_policy;
1525#else
1526    using _Policy = __libcpp_timed_backoff_policy;
1527#endif
1528    return __libcpp_thread_poll_with_backoff(__test_fn, _Policy());
1529}
1530
1531#endif // _LIBCPP_HAS_NO_PLATFORM_WAIT
1532
1533template <class _Atp, class _Tp>
1534struct __cxx_atomic_wait_test_fn_impl {
1535    _Atp* __a;
1536    _Tp __val;
1537    memory_order __order;
1538    _LIBCPP_INLINE_VISIBILITY bool operator()() const
1539    {
1540        return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val);
1541    }
1542};
1543
1544template <class _Atp, class _Tp>
1545_LIBCPP_AVAILABILITY_SYNC
1546_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order)
1547{
1548    __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order};
1549    return __cxx_atomic_wait(__a, __test_fn);
1550}
1551
1552// general atomic<T>
1553
1554template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
1555struct __atomic_base  // false
1556{
1557    mutable __cxx_atomic_impl<_Tp> __a_;
1558
1559#if defined(__cpp_lib_atomic_is_always_lock_free)
1560  static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
1561#endif
1562
1563    _LIBCPP_INLINE_VISIBILITY
1564    bool is_lock_free() const volatile _NOEXCEPT
1565        {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
1566    _LIBCPP_INLINE_VISIBILITY
1567    bool is_lock_free() const _NOEXCEPT
1568        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
1569    _LIBCPP_INLINE_VISIBILITY
1570    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1571      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1572        {__cxx_atomic_store(&__a_, __d, __m);}
1573    _LIBCPP_INLINE_VISIBILITY
1574    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1575      _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1576        {__cxx_atomic_store(&__a_, __d, __m);}
1577    _LIBCPP_INLINE_VISIBILITY
1578    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1579      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1580        {return __cxx_atomic_load(&__a_, __m);}
1581    _LIBCPP_INLINE_VISIBILITY
1582    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1583      _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1584        {return __cxx_atomic_load(&__a_, __m);}
1585    _LIBCPP_INLINE_VISIBILITY
1586    operator _Tp() const volatile _NOEXCEPT {return load();}
1587    _LIBCPP_INLINE_VISIBILITY
1588    operator _Tp() const _NOEXCEPT          {return load();}
1589    _LIBCPP_INLINE_VISIBILITY
1590    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1591        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1592    _LIBCPP_INLINE_VISIBILITY
1593    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1594        {return __cxx_atomic_exchange(&__a_, __d, __m);}
1595    _LIBCPP_INLINE_VISIBILITY
1596    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1597                               memory_order __s, memory_order __f) volatile _NOEXCEPT
1598      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1599        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1600    _LIBCPP_INLINE_VISIBILITY
1601    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1602                               memory_order __s, memory_order __f) _NOEXCEPT
1603      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1604        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
1605    _LIBCPP_INLINE_VISIBILITY
1606    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1607                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
1608      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1609        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1610    _LIBCPP_INLINE_VISIBILITY
1611    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1612                                 memory_order __s, memory_order __f) _NOEXCEPT
1613      _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
1614        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
1615    _LIBCPP_INLINE_VISIBILITY
1616    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1617                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1618        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1619    _LIBCPP_INLINE_VISIBILITY
1620    bool compare_exchange_weak(_Tp& __e, _Tp __d,
1621                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
1622        {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
1623    _LIBCPP_INLINE_VISIBILITY
1624    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1625                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1626        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1627    _LIBCPP_INLINE_VISIBILITY
1628    bool compare_exchange_strong(_Tp& __e, _Tp __d,
1629                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
1630        {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
1631
1632    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
1633        {__cxx_atomic_wait(&__a_, __v, __m);}
1634    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
1635        {__cxx_atomic_wait(&__a_, __v, __m);}
1636    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT
1637        {__cxx_atomic_notify_one(&__a_);}
1638    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT
1639        {__cxx_atomic_notify_one(&__a_);}
1640    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT
1641        {__cxx_atomic_notify_all(&__a_);}
1642    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT
1643        {__cxx_atomic_notify_all(&__a_);}
1644
1645#if _LIBCPP_STD_VER > 17
1646    _LIBCPP_INLINE_VISIBILITY constexpr
1647    __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {}
1648#else
1649    _LIBCPP_INLINE_VISIBILITY
1650    __atomic_base() _NOEXCEPT = default;
1651#endif
1652
1653    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
1654    __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
1655
1656    __atomic_base(const __atomic_base&) = delete;
1657};
1658
1659#if defined(__cpp_lib_atomic_is_always_lock_free)
1660template <class _Tp, bool __b>
1661_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
1662#endif
1663
1664// atomic<Integral>
1665
1666template <class _Tp>
1667struct __atomic_base<_Tp, true>
1668    : public __atomic_base<_Tp, false>
1669{
1670    typedef __atomic_base<_Tp, false> __base;
1671
1672    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17
1673    __atomic_base() _NOEXCEPT = default;
1674
1675    _LIBCPP_INLINE_VISIBILITY
1676    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
1677
1678    _LIBCPP_INLINE_VISIBILITY
1679    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1680        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1681    _LIBCPP_INLINE_VISIBILITY
1682    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1683        {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
1684    _LIBCPP_INLINE_VISIBILITY
1685    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1686        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1687    _LIBCPP_INLINE_VISIBILITY
1688    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1689        {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
1690    _LIBCPP_INLINE_VISIBILITY
1691    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1692        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1693    _LIBCPP_INLINE_VISIBILITY
1694    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1695        {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
1696    _LIBCPP_INLINE_VISIBILITY
1697    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1698        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1699    _LIBCPP_INLINE_VISIBILITY
1700    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1701        {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
1702    _LIBCPP_INLINE_VISIBILITY
1703    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1704        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1705    _LIBCPP_INLINE_VISIBILITY
1706    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1707        {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
1708
1709    _LIBCPP_INLINE_VISIBILITY
1710    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
1711    _LIBCPP_INLINE_VISIBILITY
1712    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
1713    _LIBCPP_INLINE_VISIBILITY
1714    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
1715    _LIBCPP_INLINE_VISIBILITY
1716    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
1717    _LIBCPP_INLINE_VISIBILITY
1718    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
1719    _LIBCPP_INLINE_VISIBILITY
1720    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
1721    _LIBCPP_INLINE_VISIBILITY
1722    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
1723    _LIBCPP_INLINE_VISIBILITY
1724    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
1725    _LIBCPP_INLINE_VISIBILITY
1726    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1727    _LIBCPP_INLINE_VISIBILITY
1728    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1729    _LIBCPP_INLINE_VISIBILITY
1730    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1731    _LIBCPP_INLINE_VISIBILITY
1732    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1733    _LIBCPP_INLINE_VISIBILITY
1734    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1735    _LIBCPP_INLINE_VISIBILITY
1736    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
1737    _LIBCPP_INLINE_VISIBILITY
1738    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1739    _LIBCPP_INLINE_VISIBILITY
1740    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
1741    _LIBCPP_INLINE_VISIBILITY
1742    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1743    _LIBCPP_INLINE_VISIBILITY
1744    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1745};
1746
1747// atomic<T>
1748
1749template <class _Tp>
1750struct atomic
1751    : public __atomic_base<_Tp>
1752{
1753    typedef __atomic_base<_Tp> __base;
1754    typedef _Tp value_type;
1755    typedef value_type difference_type;
1756
1757#if _LIBCPP_STD_VER > 17
1758    _LIBCPP_INLINE_VISIBILITY
1759    atomic() = default;
1760#else
1761    _LIBCPP_INLINE_VISIBILITY
1762    atomic() _NOEXCEPT = default;
1763#endif
1764
1765    _LIBCPP_INLINE_VISIBILITY
1766    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1767
1768    _LIBCPP_INLINE_VISIBILITY
1769    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1770        {__base::store(__d); return __d;}
1771    _LIBCPP_INLINE_VISIBILITY
1772    _Tp operator=(_Tp __d) _NOEXCEPT
1773        {__base::store(__d); return __d;}
1774
1775    atomic& operator=(const atomic&) = delete;
1776    atomic& operator=(const atomic&) volatile = delete;
1777};
1778
1779// atomic<T*>
1780
1781template <class _Tp>
1782struct atomic<_Tp*>
1783    : public __atomic_base<_Tp*>
1784{
1785    typedef __atomic_base<_Tp*> __base;
1786    typedef _Tp* value_type;
1787    typedef ptrdiff_t difference_type;
1788
1789    _LIBCPP_INLINE_VISIBILITY
1790    atomic() _NOEXCEPT = default;
1791
1792    _LIBCPP_INLINE_VISIBILITY
1793    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1794
1795    _LIBCPP_INLINE_VISIBILITY
1796    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1797        {__base::store(__d); return __d;}
1798    _LIBCPP_INLINE_VISIBILITY
1799    _Tp* operator=(_Tp* __d) _NOEXCEPT
1800        {__base::store(__d); return __d;}
1801
1802    _LIBCPP_INLINE_VISIBILITY
1803    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
1804        // __atomic_fetch_add accepts function pointers, guard against them.
1805        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1806        return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
1807    }
1808
1809    _LIBCPP_INLINE_VISIBILITY
1810    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
1811        // __atomic_fetch_add accepts function pointers, guard against them.
1812        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1813        return __cxx_atomic_fetch_add(&this->__a_, __op, __m);
1814    }
1815
1816    _LIBCPP_INLINE_VISIBILITY
1817    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT {
1818        // __atomic_fetch_add accepts function pointers, guard against them.
1819        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1820        return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
1821    }
1822
1823    _LIBCPP_INLINE_VISIBILITY
1824    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT {
1825        // __atomic_fetch_add accepts function pointers, guard against them.
1826        static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed");
1827        return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);
1828    }
1829
1830    _LIBCPP_INLINE_VISIBILITY
1831    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1832    _LIBCPP_INLINE_VISIBILITY
1833    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1834    _LIBCPP_INLINE_VISIBILITY
1835    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1836    _LIBCPP_INLINE_VISIBILITY
1837    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1838    _LIBCPP_INLINE_VISIBILITY
1839    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1840    _LIBCPP_INLINE_VISIBILITY
1841    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1842    _LIBCPP_INLINE_VISIBILITY
1843    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1844    _LIBCPP_INLINE_VISIBILITY
1845    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1846    _LIBCPP_INLINE_VISIBILITY
1847    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1848    _LIBCPP_INLINE_VISIBILITY
1849    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1850    _LIBCPP_INLINE_VISIBILITY
1851    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1852    _LIBCPP_INLINE_VISIBILITY
1853    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1854
1855    atomic& operator=(const atomic&) = delete;
1856    atomic& operator=(const atomic&) volatile = delete;
1857};
1858
1859// atomic_is_lock_free
1860
1861template <class _Tp>
1862_LIBCPP_INLINE_VISIBILITY
1863bool
1864atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1865{
1866    return __o->is_lock_free();
1867}
1868
1869template <class _Tp>
1870_LIBCPP_INLINE_VISIBILITY
1871bool
1872atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1873{
1874    return __o->is_lock_free();
1875}
1876
1877// atomic_init
1878
1879template <class _Tp>
1880_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
1881void
1882atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1883{
1884    __cxx_atomic_init(&__o->__a_, __d);
1885}
1886
1887template <class _Tp>
1888_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY
1889void
1890atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1891{
1892    __cxx_atomic_init(&__o->__a_, __d);
1893}
1894
1895// atomic_store
1896
1897template <class _Tp>
1898_LIBCPP_INLINE_VISIBILITY
1899void
1900atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1901{
1902    __o->store(__d);
1903}
1904
1905template <class _Tp>
1906_LIBCPP_INLINE_VISIBILITY
1907void
1908atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1909{
1910    __o->store(__d);
1911}
1912
1913// atomic_store_explicit
1914
1915template <class _Tp>
1916_LIBCPP_INLINE_VISIBILITY
1917void
1918atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1919  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1920{
1921    __o->store(__d, __m);
1922}
1923
1924template <class _Tp>
1925_LIBCPP_INLINE_VISIBILITY
1926void
1927atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1928  _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
1929{
1930    __o->store(__d, __m);
1931}
1932
1933// atomic_load
1934
1935template <class _Tp>
1936_LIBCPP_INLINE_VISIBILITY
1937_Tp
1938atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1939{
1940    return __o->load();
1941}
1942
1943template <class _Tp>
1944_LIBCPP_INLINE_VISIBILITY
1945_Tp
1946atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1947{
1948    return __o->load();
1949}
1950
1951// atomic_load_explicit
1952
1953template <class _Tp>
1954_LIBCPP_INLINE_VISIBILITY
1955_Tp
1956atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1957  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1958{
1959    return __o->load(__m);
1960}
1961
1962template <class _Tp>
1963_LIBCPP_INLINE_VISIBILITY
1964_Tp
1965atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1966  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
1967{
1968    return __o->load(__m);
1969}
1970
1971// atomic_exchange
1972
1973template <class _Tp>
1974_LIBCPP_INLINE_VISIBILITY
1975_Tp
1976atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1977{
1978    return __o->exchange(__d);
1979}
1980
1981template <class _Tp>
1982_LIBCPP_INLINE_VISIBILITY
1983_Tp
1984atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT
1985{
1986    return __o->exchange(__d);
1987}
1988
1989// atomic_exchange_explicit
1990
1991template <class _Tp>
1992_LIBCPP_INLINE_VISIBILITY
1993_Tp
1994atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
1995{
1996    return __o->exchange(__d, __m);
1997}
1998
1999template <class _Tp>
2000_LIBCPP_INLINE_VISIBILITY
2001_Tp
2002atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT
2003{
2004    return __o->exchange(__d, __m);
2005}
2006
2007// atomic_compare_exchange_weak
2008
2009template <class _Tp>
2010_LIBCPP_INLINE_VISIBILITY
2011bool
2012atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2013{
2014    return __o->compare_exchange_weak(*__e, __d);
2015}
2016
2017template <class _Tp>
2018_LIBCPP_INLINE_VISIBILITY
2019bool
2020atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2021{
2022    return __o->compare_exchange_weak(*__e, __d);
2023}
2024
2025// atomic_compare_exchange_strong
2026
2027template <class _Tp>
2028_LIBCPP_INLINE_VISIBILITY
2029bool
2030atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2031{
2032    return __o->compare_exchange_strong(*__e, __d);
2033}
2034
2035template <class _Tp>
2036_LIBCPP_INLINE_VISIBILITY
2037bool
2038atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT
2039{
2040    return __o->compare_exchange_strong(*__e, __d);
2041}
2042
2043// atomic_compare_exchange_weak_explicit
2044
2045template <class _Tp>
2046_LIBCPP_INLINE_VISIBILITY
2047bool
2048atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2049                                      typename atomic<_Tp>::value_type __d,
2050                                      memory_order __s, memory_order __f) _NOEXCEPT
2051  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2052{
2053    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2054}
2055
2056template <class _Tp>
2057_LIBCPP_INLINE_VISIBILITY
2058bool
2059atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2060                                      memory_order __s, memory_order __f) _NOEXCEPT
2061  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2062{
2063    return __o->compare_exchange_weak(*__e, __d, __s, __f);
2064}
2065
2066// atomic_compare_exchange_strong_explicit
2067
2068template <class _Tp>
2069_LIBCPP_INLINE_VISIBILITY
2070bool
2071atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
2072                                        typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d,
2073                                        memory_order __s, memory_order __f) _NOEXCEPT
2074  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2075{
2076    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2077}
2078
2079template <class _Tp>
2080_LIBCPP_INLINE_VISIBILITY
2081bool
2082atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e,
2083                                        typename atomic<_Tp>::value_type __d,
2084                                        memory_order __s, memory_order __f) _NOEXCEPT
2085  _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
2086{
2087    return __o->compare_exchange_strong(*__e, __d, __s, __f);
2088}
2089
2090// atomic_wait
2091
2092template <class _Tp>
2093_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2094void atomic_wait(const volatile atomic<_Tp>* __o,
2095                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2096{
2097    return __o->wait(__v);
2098}
2099
2100template <class _Tp>
2101_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2102void atomic_wait(const atomic<_Tp>* __o,
2103                 typename atomic<_Tp>::value_type __v) _NOEXCEPT
2104{
2105    return __o->wait(__v);
2106}
2107
2108// atomic_wait_explicit
2109
2110template <class _Tp>
2111_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2112void atomic_wait_explicit(const volatile atomic<_Tp>* __o,
2113                          typename atomic<_Tp>::value_type __v,
2114                          memory_order __m) _NOEXCEPT
2115  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2116{
2117    return __o->wait(__v, __m);
2118}
2119
2120template <class _Tp>
2121_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2122void atomic_wait_explicit(const atomic<_Tp>* __o,
2123                          typename atomic<_Tp>::value_type __v,
2124                          memory_order __m) _NOEXCEPT
2125  _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
2126{
2127    return __o->wait(__v, __m);
2128}
2129
2130// atomic_notify_one
2131
2132template <class _Tp>
2133_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2134void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT
2135{
2136    __o->notify_one();
2137}
2138template <class _Tp>
2139_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2140void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT
2141{
2142    __o->notify_one();
2143}
2144
2145// atomic_notify_one
2146
2147template <class _Tp>
2148_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2149void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT
2150{
2151    __o->notify_all();
2152}
2153template <class _Tp>
2154_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2155void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT
2156{
2157    __o->notify_all();
2158}
2159
2160// atomic_fetch_add
2161
2162template <class _Tp>
2163_LIBCPP_INLINE_VISIBILITY
2164_Tp
2165atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2166{
2167    return __o->fetch_add(__op);
2168}
2169
2170template <class _Tp>
2171_LIBCPP_INLINE_VISIBILITY
2172_Tp
2173atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2174{
2175    return __o->fetch_add(__op);
2176}
2177
2178// atomic_fetch_add_explicit
2179
2180template <class _Tp>
2181_LIBCPP_INLINE_VISIBILITY
2182_Tp atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2183{
2184    return __o->fetch_add(__op, __m);
2185}
2186
2187template <class _Tp>
2188_LIBCPP_INLINE_VISIBILITY
2189_Tp atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2190{
2191    return __o->fetch_add(__op, __m);
2192}
2193
2194// atomic_fetch_sub
2195
2196template <class _Tp>
2197_LIBCPP_INLINE_VISIBILITY
2198_Tp atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2199{
2200    return __o->fetch_sub(__op);
2201}
2202
2203template <class _Tp>
2204_LIBCPP_INLINE_VISIBILITY
2205_Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT
2206{
2207    return __o->fetch_sub(__op);
2208}
2209
2210// atomic_fetch_sub_explicit
2211
2212template <class _Tp>
2213_LIBCPP_INLINE_VISIBILITY
2214_Tp atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2215{
2216    return __o->fetch_sub(__op, __m);
2217}
2218
2219template <class _Tp>
2220_LIBCPP_INLINE_VISIBILITY
2221_Tp atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT
2222{
2223    return __o->fetch_sub(__op, __m);
2224}
2225
2226// atomic_fetch_and
2227
2228template <class _Tp>
2229_LIBCPP_INLINE_VISIBILITY
2230typename enable_if
2231<
2232    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2233    _Tp
2234>::type
2235atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2236{
2237    return __o->fetch_and(__op);
2238}
2239
2240template <class _Tp>
2241_LIBCPP_INLINE_VISIBILITY
2242typename enable_if
2243<
2244    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2245    _Tp
2246>::type
2247atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2248{
2249    return __o->fetch_and(__op);
2250}
2251
2252// atomic_fetch_and_explicit
2253
2254template <class _Tp>
2255_LIBCPP_INLINE_VISIBILITY
2256typename enable_if
2257<
2258    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2259    _Tp
2260>::type
2261atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2262{
2263    return __o->fetch_and(__op, __m);
2264}
2265
2266template <class _Tp>
2267_LIBCPP_INLINE_VISIBILITY
2268typename enable_if
2269<
2270    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2271    _Tp
2272>::type
2273atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2274{
2275    return __o->fetch_and(__op, __m);
2276}
2277
2278// atomic_fetch_or
2279
2280template <class _Tp>
2281_LIBCPP_INLINE_VISIBILITY
2282typename enable_if
2283<
2284    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2285    _Tp
2286>::type
2287atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2288{
2289    return __o->fetch_or(__op);
2290}
2291
2292template <class _Tp>
2293_LIBCPP_INLINE_VISIBILITY
2294typename enable_if
2295<
2296    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2297    _Tp
2298>::type
2299atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2300{
2301    return __o->fetch_or(__op);
2302}
2303
2304// atomic_fetch_or_explicit
2305
2306template <class _Tp>
2307_LIBCPP_INLINE_VISIBILITY
2308typename enable_if
2309<
2310    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2311    _Tp
2312>::type
2313atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2314{
2315    return __o->fetch_or(__op, __m);
2316}
2317
2318template <class _Tp>
2319_LIBCPP_INLINE_VISIBILITY
2320typename enable_if
2321<
2322    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2323    _Tp
2324>::type
2325atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2326{
2327    return __o->fetch_or(__op, __m);
2328}
2329
2330// atomic_fetch_xor
2331
2332template <class _Tp>
2333_LIBCPP_INLINE_VISIBILITY
2334typename enable_if
2335<
2336    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2337    _Tp
2338>::type
2339atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2340{
2341    return __o->fetch_xor(__op);
2342}
2343
2344template <class _Tp>
2345_LIBCPP_INLINE_VISIBILITY
2346typename enable_if
2347<
2348    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2349    _Tp
2350>::type
2351atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT
2352{
2353    return __o->fetch_xor(__op);
2354}
2355
2356// atomic_fetch_xor_explicit
2357
2358template <class _Tp>
2359_LIBCPP_INLINE_VISIBILITY
2360typename enable_if
2361<
2362    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2363    _Tp
2364>::type
2365atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2366{
2367    return __o->fetch_xor(__op, __m);
2368}
2369
2370template <class _Tp>
2371_LIBCPP_INLINE_VISIBILITY
2372typename enable_if
2373<
2374    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
2375    _Tp
2376>::type
2377atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT
2378{
2379    return __o->fetch_xor(__op, __m);
2380}
2381
2382// flag type and operations
2383
2384typedef struct atomic_flag
2385{
2386    __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
2387
2388    _LIBCPP_INLINE_VISIBILITY
2389    bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2390        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2391    _LIBCPP_INLINE_VISIBILITY
2392    bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2393        {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);}
2394
2395    _LIBCPP_INLINE_VISIBILITY
2396    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2397        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2398    _LIBCPP_INLINE_VISIBILITY
2399    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2400        {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
2401    _LIBCPP_INLINE_VISIBILITY
2402    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
2403        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2404    _LIBCPP_INLINE_VISIBILITY
2405    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
2406        {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
2407
2408    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2409    void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
2410        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2411    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2412    void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT
2413        {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);}
2414    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2415    void notify_one() volatile _NOEXCEPT
2416        {__cxx_atomic_notify_one(&__a_);}
2417    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2418    void notify_one() _NOEXCEPT
2419        {__cxx_atomic_notify_one(&__a_);}
2420    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2421    void notify_all() volatile _NOEXCEPT
2422        {__cxx_atomic_notify_all(&__a_);}
2423    _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY
2424    void notify_all() _NOEXCEPT
2425        {__cxx_atomic_notify_all(&__a_);}
2426
2427#if _LIBCPP_STD_VER > 17
2428    _LIBCPP_INLINE_VISIBILITY constexpr
2429    atomic_flag() _NOEXCEPT : __a_(false) {}
2430#else
2431    _LIBCPP_INLINE_VISIBILITY
2432    atomic_flag() _NOEXCEPT = default;
2433#endif
2434
2435    _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
2436    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
2437
2438    atomic_flag(const atomic_flag&) = delete;
2439    atomic_flag& operator=(const atomic_flag&) = delete;
2440    atomic_flag& operator=(const atomic_flag&) volatile = delete;
2441
2442} atomic_flag;
2443
2444
2445inline _LIBCPP_INLINE_VISIBILITY
2446bool
2447atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT
2448{
2449    return __o->test();
2450}
2451
2452inline _LIBCPP_INLINE_VISIBILITY
2453bool
2454atomic_flag_test(const atomic_flag* __o) _NOEXCEPT
2455{
2456    return __o->test();
2457}
2458
2459inline _LIBCPP_INLINE_VISIBILITY
2460bool
2461atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2462{
2463    return __o->test(__m);
2464}
2465
2466inline _LIBCPP_INLINE_VISIBILITY
2467bool
2468atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT
2469{
2470    return __o->test(__m);
2471}
2472
2473inline _LIBCPP_INLINE_VISIBILITY
2474bool
2475atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
2476{
2477    return __o->test_and_set();
2478}
2479
2480inline _LIBCPP_INLINE_VISIBILITY
2481bool
2482atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
2483{
2484    return __o->test_and_set();
2485}
2486
2487inline _LIBCPP_INLINE_VISIBILITY
2488bool
2489atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2490{
2491    return __o->test_and_set(__m);
2492}
2493
2494inline _LIBCPP_INLINE_VISIBILITY
2495bool
2496atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2497{
2498    return __o->test_and_set(__m);
2499}
2500
2501inline _LIBCPP_INLINE_VISIBILITY
2502void
2503atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
2504{
2505    __o->clear();
2506}
2507
2508inline _LIBCPP_INLINE_VISIBILITY
2509void
2510atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
2511{
2512    __o->clear();
2513}
2514
2515inline _LIBCPP_INLINE_VISIBILITY
2516void
2517atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
2518{
2519    __o->clear(__m);
2520}
2521
2522inline _LIBCPP_INLINE_VISIBILITY
2523void
2524atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
2525{
2526    __o->clear(__m);
2527}
2528
2529inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2530void
2531atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT
2532{
2533    __o->wait(__v);
2534}
2535
2536inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2537void
2538atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT
2539{
2540    __o->wait(__v);
2541}
2542
2543inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2544void
2545atomic_flag_wait_explicit(const volatile atomic_flag* __o,
2546                          bool __v, memory_order __m) _NOEXCEPT
2547{
2548    __o->wait(__v, __m);
2549}
2550
2551inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2552void
2553atomic_flag_wait_explicit(const atomic_flag* __o,
2554                          bool __v, memory_order __m) _NOEXCEPT
2555{
2556    __o->wait(__v, __m);
2557}
2558
2559inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2560void
2561atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT
2562{
2563    __o->notify_one();
2564}
2565
2566inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2567void
2568atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT
2569{
2570    __o->notify_one();
2571}
2572
2573inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2574void
2575atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT
2576{
2577    __o->notify_all();
2578}
2579
2580inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC
2581void
2582atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT
2583{
2584    __o->notify_all();
2585}
2586
2587// fences
2588
2589inline _LIBCPP_INLINE_VISIBILITY
2590void
2591atomic_thread_fence(memory_order __m) _NOEXCEPT
2592{
2593    __cxx_atomic_thread_fence(__m);
2594}
2595
2596inline _LIBCPP_INLINE_VISIBILITY
2597void
2598atomic_signal_fence(memory_order __m) _NOEXCEPT
2599{
2600    __cxx_atomic_signal_fence(__m);
2601}
2602
2603// Atomics for standard typedef types
2604
2605typedef atomic<bool>               atomic_bool;
2606typedef atomic<char>               atomic_char;
2607typedef atomic<signed char>        atomic_schar;
2608typedef atomic<unsigned char>      atomic_uchar;
2609typedef atomic<short>              atomic_short;
2610typedef atomic<unsigned short>     atomic_ushort;
2611typedef atomic<int>                atomic_int;
2612typedef atomic<unsigned int>       atomic_uint;
2613typedef atomic<long>               atomic_long;
2614typedef atomic<unsigned long>      atomic_ulong;
2615typedef atomic<long long>          atomic_llong;
2616typedef atomic<unsigned long long> atomic_ullong;
2617#ifndef _LIBCPP_HAS_NO_CHAR8_T
2618typedef atomic<char8_t>            atomic_char8_t;
2619#endif
2620typedef atomic<char16_t>           atomic_char16_t;
2621typedef atomic<char32_t>           atomic_char32_t;
2622#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS
2623typedef atomic<wchar_t>            atomic_wchar_t;
2624#endif
2625
2626typedef atomic<int_least8_t>   atomic_int_least8_t;
2627typedef atomic<uint_least8_t>  atomic_uint_least8_t;
2628typedef atomic<int_least16_t>  atomic_int_least16_t;
2629typedef atomic<uint_least16_t> atomic_uint_least16_t;
2630typedef atomic<int_least32_t>  atomic_int_least32_t;
2631typedef atomic<uint_least32_t> atomic_uint_least32_t;
2632typedef atomic<int_least64_t>  atomic_int_least64_t;
2633typedef atomic<uint_least64_t> atomic_uint_least64_t;
2634
2635typedef atomic<int_fast8_t>   atomic_int_fast8_t;
2636typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
2637typedef atomic<int_fast16_t>  atomic_int_fast16_t;
2638typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
2639typedef atomic<int_fast32_t>  atomic_int_fast32_t;
2640typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
2641typedef atomic<int_fast64_t>  atomic_int_fast64_t;
2642typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
2643
2644typedef atomic< int8_t>  atomic_int8_t;
2645typedef atomic<uint8_t>  atomic_uint8_t;
2646typedef atomic< int16_t> atomic_int16_t;
2647typedef atomic<uint16_t> atomic_uint16_t;
2648typedef atomic< int32_t> atomic_int32_t;
2649typedef atomic<uint32_t> atomic_uint32_t;
2650typedef atomic< int64_t> atomic_int64_t;
2651typedef atomic<uint64_t> atomic_uint64_t;
2652
2653typedef atomic<intptr_t>  atomic_intptr_t;
2654typedef atomic<uintptr_t> atomic_uintptr_t;
2655typedef atomic<size_t>    atomic_size_t;
2656typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
2657typedef atomic<intmax_t>  atomic_intmax_t;
2658typedef atomic<uintmax_t> atomic_uintmax_t;
2659
2660// atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type
2661
2662#ifdef __cpp_lib_atomic_is_always_lock_free
2663# define _LIBCPP_CONTENTION_LOCK_FREE __atomic_always_lock_free(sizeof(__cxx_contention_t), 0)
2664#else
2665# define _LIBCPP_CONTENTION_LOCK_FREE false
2666#endif
2667
2668#if ATOMIC_LLONG_LOCK_FREE == 2
2669typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type          __libcpp_signed_lock_free;
2670typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free;
2671#elif ATOMIC_INT_LOCK_FREE == 2
2672typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type                __libcpp_signed_lock_free;
2673typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type       __libcpp_unsigned_lock_free;
2674#elif ATOMIC_SHORT_LOCK_FREE == 2
2675typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type              __libcpp_signed_lock_free;
2676typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type     __libcpp_unsigned_lock_free;
2677#elif ATOMIC_CHAR_LOCK_FREE == 2
2678typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type               __libcpp_signed_lock_free;
2679typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type      __libcpp_unsigned_lock_free;
2680#else
2681    // No signed/unsigned lock-free types
2682#define _LIBCPP_NO_LOCK_FREE_TYPES
2683#endif
2684
2685#if !defined(_LIBCPP_NO_LOCK_FREE_TYPES)
2686typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free;
2687typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free;
2688#endif
2689
2690#define ATOMIC_FLAG_INIT {false}
2691#define ATOMIC_VAR_INIT(__v) {__v}
2692
2693#if _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS)
2694# if defined(_LIBCPP_CLANG_VER) && _LIBCPP_CLANG_VER >= 1400
2695#  pragma clang deprecated(ATOMIC_FLAG_INIT)
2696#  pragma clang deprecated(ATOMIC_VAR_INIT)
2697# endif
2698#endif // _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS)
2699
2700_LIBCPP_END_NAMESPACE_STD
2701
2702#endif // _LIBCPP_ATOMIC
2703