xref: /llvm-project-15.0.7/libcxx/include/atomic (revision 052d95a6)
1// -*- C++ -*-
2//===--------------------------- atomic -----------------------------------===//
3//
4//                     The LLVM Compiler Infrastructure
5//
6// This file is distributed under the University of Illinois Open Source
7// License. See LICENSE.TXT for details.
8//
9//===----------------------------------------------------------------------===//
10
11#ifndef _LIBCPP_ATOMIC
12#define _LIBCPP_ATOMIC
13
14/*
15    atomic synopsis
16
17namespace std
18{
19
20// feature test macro
21
22#define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
23
24// order and consistency
25
26typedef enum memory_order
27{
28    memory_order_relaxed,
29    memory_order_consume,  // load-consume
30    memory_order_acquire,  // load-acquire
31    memory_order_release,  // store-release
32    memory_order_acq_rel,  // store-release load-acquire
33    memory_order_seq_cst   // store-release load-acquire
34} memory_order;
35
36template <class T> T kill_dependency(T y) noexcept;
37
38// lock-free property
39
40#define ATOMIC_BOOL_LOCK_FREE unspecified
41#define ATOMIC_CHAR_LOCK_FREE unspecified
42#define ATOMIC_CHAR16_T_LOCK_FREE unspecified
43#define ATOMIC_CHAR32_T_LOCK_FREE unspecified
44#define ATOMIC_WCHAR_T_LOCK_FREE unspecified
45#define ATOMIC_SHORT_LOCK_FREE unspecified
46#define ATOMIC_INT_LOCK_FREE unspecified
47#define ATOMIC_LONG_LOCK_FREE unspecified
48#define ATOMIC_LLONG_LOCK_FREE unspecified
49#define ATOMIC_POINTER_LOCK_FREE unspecified
50
51// flag type and operations
52
53typedef struct atomic_flag
54{
55    bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
56    bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
57    void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
58    void clear(memory_order m = memory_order_seq_cst) noexcept;
59    atomic_flag()  noexcept = default;
60    atomic_flag(const atomic_flag&) = delete;
61    atomic_flag& operator=(const atomic_flag&) = delete;
62    atomic_flag& operator=(const atomic_flag&) volatile = delete;
63} atomic_flag;
64
65bool
66    atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
67
68bool
69    atomic_flag_test_and_set(atomic_flag* obj) noexcept;
70
71bool
72    atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
73                                      memory_order m) noexcept;
74
75bool
76    atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
77
78void
79    atomic_flag_clear(volatile atomic_flag* obj) noexcept;
80
81void
82    atomic_flag_clear(atomic_flag* obj) noexcept;
83
84void
85    atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
86
87void
88    atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
89
90#define ATOMIC_FLAG_INIT see below
91#define ATOMIC_VAR_INIT(value) see below
92
93template <class T>
94struct atomic
95{
96    static constexpr bool is_always_lock_free;
97    bool is_lock_free() const volatile noexcept;
98    bool is_lock_free() const noexcept;
99    void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
100    void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
101    T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
102    T load(memory_order m = memory_order_seq_cst) const noexcept;
103    operator T() const volatile noexcept;
104    operator T() const noexcept;
105    T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
106    T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
107    bool compare_exchange_weak(T& expc, T desr,
108                               memory_order s, memory_order f) volatile noexcept;
109    bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
110    bool compare_exchange_strong(T& expc, T desr,
111                                 memory_order s, memory_order f) volatile noexcept;
112    bool compare_exchange_strong(T& expc, T desr,
113                                 memory_order s, memory_order f) noexcept;
114    bool compare_exchange_weak(T& expc, T desr,
115                               memory_order m = memory_order_seq_cst) volatile noexcept;
116    bool compare_exchange_weak(T& expc, T desr,
117                               memory_order m = memory_order_seq_cst) noexcept;
118    bool compare_exchange_strong(T& expc, T desr,
119                                memory_order m = memory_order_seq_cst) volatile noexcept;
120    bool compare_exchange_strong(T& expc, T desr,
121                                 memory_order m = memory_order_seq_cst) noexcept;
122
123    atomic() noexcept = default;
124    constexpr atomic(T desr) noexcept;
125    atomic(const atomic&) = delete;
126    atomic& operator=(const atomic&) = delete;
127    atomic& operator=(const atomic&) volatile = delete;
128    T operator=(T) volatile noexcept;
129    T operator=(T) noexcept;
130};
131
132template <>
133struct atomic<integral>
134{
135    static constexpr bool is_always_lock_free;
136    bool is_lock_free() const volatile noexcept;
137    bool is_lock_free() const noexcept;
138    void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
139    void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
140    integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
141    integral load(memory_order m = memory_order_seq_cst) const noexcept;
142    operator integral() const volatile noexcept;
143    operator integral() const noexcept;
144    integral exchange(integral desr,
145                      memory_order m = memory_order_seq_cst) volatile noexcept;
146    integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
147    bool compare_exchange_weak(integral& expc, integral desr,
148                               memory_order s, memory_order f) volatile noexcept;
149    bool compare_exchange_weak(integral& expc, integral desr,
150                               memory_order s, memory_order f) noexcept;
151    bool compare_exchange_strong(integral& expc, integral desr,
152                                 memory_order s, memory_order f) volatile noexcept;
153    bool compare_exchange_strong(integral& expc, integral desr,
154                                 memory_order s, memory_order f) noexcept;
155    bool compare_exchange_weak(integral& expc, integral desr,
156                               memory_order m = memory_order_seq_cst) volatile noexcept;
157    bool compare_exchange_weak(integral& expc, integral desr,
158                               memory_order m = memory_order_seq_cst) noexcept;
159    bool compare_exchange_strong(integral& expc, integral desr,
160                                memory_order m = memory_order_seq_cst) volatile noexcept;
161    bool compare_exchange_strong(integral& expc, integral desr,
162                                 memory_order m = memory_order_seq_cst) noexcept;
163
164    integral
165        fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
166    integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
167    integral
168        fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
169    integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
170    integral
171        fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
172    integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
173    integral
174        fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
175    integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
176    integral
177        fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
178    integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
179
180    atomic() noexcept = default;
181    constexpr atomic(integral desr) noexcept;
182    atomic(const atomic&) = delete;
183    atomic& operator=(const atomic&) = delete;
184    atomic& operator=(const atomic&) volatile = delete;
185    integral operator=(integral desr) volatile noexcept;
186    integral operator=(integral desr) noexcept;
187
188    integral operator++(int) volatile noexcept;
189    integral operator++(int) noexcept;
190    integral operator--(int) volatile noexcept;
191    integral operator--(int) noexcept;
192    integral operator++() volatile noexcept;
193    integral operator++() noexcept;
194    integral operator--() volatile noexcept;
195    integral operator--() noexcept;
196    integral operator+=(integral op) volatile noexcept;
197    integral operator+=(integral op) noexcept;
198    integral operator-=(integral op) volatile noexcept;
199    integral operator-=(integral op) noexcept;
200    integral operator&=(integral op) volatile noexcept;
201    integral operator&=(integral op) noexcept;
202    integral operator|=(integral op) volatile noexcept;
203    integral operator|=(integral op) noexcept;
204    integral operator^=(integral op) volatile noexcept;
205    integral operator^=(integral op) noexcept;
206};
207
208template <class T>
209struct atomic<T*>
210{
211    static constexpr bool is_always_lock_free;
212    bool is_lock_free() const volatile noexcept;
213    bool is_lock_free() const noexcept;
214    void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
215    void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
216    T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
217    T* load(memory_order m = memory_order_seq_cst) const noexcept;
218    operator T*() const volatile noexcept;
219    operator T*() const noexcept;
220    T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
221    T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
222    bool compare_exchange_weak(T*& expc, T* desr,
223                               memory_order s, memory_order f) volatile noexcept;
224    bool compare_exchange_weak(T*& expc, T* desr,
225                               memory_order s, memory_order f) noexcept;
226    bool compare_exchange_strong(T*& expc, T* desr,
227                                 memory_order s, memory_order f) volatile noexcept;
228    bool compare_exchange_strong(T*& expc, T* desr,
229                                 memory_order s, memory_order f) noexcept;
230    bool compare_exchange_weak(T*& expc, T* desr,
231                               memory_order m = memory_order_seq_cst) volatile noexcept;
232    bool compare_exchange_weak(T*& expc, T* desr,
233                               memory_order m = memory_order_seq_cst) noexcept;
234    bool compare_exchange_strong(T*& expc, T* desr,
235                                memory_order m = memory_order_seq_cst) volatile noexcept;
236    bool compare_exchange_strong(T*& expc, T* desr,
237                                 memory_order m = memory_order_seq_cst) noexcept;
238    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
239    T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
240    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
241    T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
242
243    atomic() noexcept = default;
244    constexpr atomic(T* desr) noexcept;
245    atomic(const atomic&) = delete;
246    atomic& operator=(const atomic&) = delete;
247    atomic& operator=(const atomic&) volatile = delete;
248
249    T* operator=(T*) volatile noexcept;
250    T* operator=(T*) noexcept;
251    T* operator++(int) volatile noexcept;
252    T* operator++(int) noexcept;
253    T* operator--(int) volatile noexcept;
254    T* operator--(int) noexcept;
255    T* operator++() volatile noexcept;
256    T* operator++() noexcept;
257    T* operator--() volatile noexcept;
258    T* operator--() noexcept;
259    T* operator+=(ptrdiff_t op) volatile noexcept;
260    T* operator+=(ptrdiff_t op) noexcept;
261    T* operator-=(ptrdiff_t op) volatile noexcept;
262    T* operator-=(ptrdiff_t op) noexcept;
263};
264
265
266template <class T>
267    bool
268    atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
269
270template <class T>
271    bool
272    atomic_is_lock_free(const atomic<T>* obj) noexcept;
273
274template <class T>
275    void
276    atomic_init(volatile atomic<T>* obj, T desr) noexcept;
277
278template <class T>
279    void
280    atomic_init(atomic<T>* obj, T desr) noexcept;
281
282template <class T>
283    void
284    atomic_store(volatile atomic<T>* obj, T desr) noexcept;
285
286template <class T>
287    void
288    atomic_store(atomic<T>* obj, T desr) noexcept;
289
290template <class T>
291    void
292    atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
293
294template <class T>
295    void
296    atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
297
298template <class T>
299    T
300    atomic_load(const volatile atomic<T>* obj) noexcept;
301
302template <class T>
303    T
304    atomic_load(const atomic<T>* obj) noexcept;
305
306template <class T>
307    T
308    atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
309
310template <class T>
311    T
312    atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
313
314template <class T>
315    T
316    atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
317
318template <class T>
319    T
320    atomic_exchange(atomic<T>* obj, T desr) noexcept;
321
322template <class T>
323    T
324    atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
325
326template <class T>
327    T
328    atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
329
330template <class T>
331    bool
332    atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
333
334template <class T>
335    bool
336    atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
337
338template <class T>
339    bool
340    atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
341
342template <class T>
343    bool
344    atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
345
346template <class T>
347    bool
348    atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
349                                          T desr,
350                                          memory_order s, memory_order f) noexcept;
351
352template <class T>
353    bool
354    atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
355                                          memory_order s, memory_order f) noexcept;
356
357template <class T>
358    bool
359    atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
360                                            T* expc, T desr,
361                                            memory_order s, memory_order f) noexcept;
362
363template <class T>
364    bool
365    atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
366                                            T desr,
367                                            memory_order s, memory_order f) noexcept;
368
369template <class Integral>
370    Integral
371    atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
372
373template <class Integral>
374    Integral
375    atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
376
377template <class Integral>
378    Integral
379    atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
380                              memory_order m) noexcept;
381template <class Integral>
382    Integral
383    atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
384                              memory_order m) noexcept;
385template <class Integral>
386    Integral
387    atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
388
389template <class Integral>
390    Integral
391    atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
392
393template <class Integral>
394    Integral
395    atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
396                              memory_order m) noexcept;
397template <class Integral>
398    Integral
399    atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
400                              memory_order m) noexcept;
401template <class Integral>
402    Integral
403    atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
404
405template <class Integral>
406    Integral
407    atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
408
409template <class Integral>
410    Integral
411    atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
412                              memory_order m) noexcept;
413template <class Integral>
414    Integral
415    atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
416                              memory_order m) noexcept;
417template <class Integral>
418    Integral
419    atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
420
421template <class Integral>
422    Integral
423    atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
424
425template <class Integral>
426    Integral
427    atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
428                             memory_order m) noexcept;
429template <class Integral>
430    Integral
431    atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
432                             memory_order m) noexcept;
433template <class Integral>
434    Integral
435    atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
436
437template <class Integral>
438    Integral
439    atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
440
441template <class Integral>
442    Integral
443    atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
444                              memory_order m) noexcept;
445template <class Integral>
446    Integral
447    atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
448                              memory_order m) noexcept;
449
450template <class T>
451    T*
452    atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
453
454template <class T>
455    T*
456    atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
457
458template <class T>
459    T*
460    atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
461                              memory_order m) noexcept;
462template <class T>
463    T*
464    atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
465
466template <class T>
467    T*
468    atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
469
470template <class T>
471    T*
472    atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
473
474template <class T>
475    T*
476    atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
477                              memory_order m) noexcept;
478template <class T>
479    T*
480    atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
481
482// Atomics for standard typedef types
483
484typedef atomic<bool>               atomic_bool;
485typedef atomic<char>               atomic_char;
486typedef atomic<signed char>        atomic_schar;
487typedef atomic<unsigned char>      atomic_uchar;
488typedef atomic<short>              atomic_short;
489typedef atomic<unsigned short>     atomic_ushort;
490typedef atomic<int>                atomic_int;
491typedef atomic<unsigned int>       atomic_uint;
492typedef atomic<long>               atomic_long;
493typedef atomic<unsigned long>      atomic_ulong;
494typedef atomic<long long>          atomic_llong;
495typedef atomic<unsigned long long> atomic_ullong;
496typedef atomic<char16_t>           atomic_char16_t;
497typedef atomic<char32_t>           atomic_char32_t;
498typedef atomic<wchar_t>            atomic_wchar_t;
499
500typedef atomic<int_least8_t>   atomic_int_least8_t;
501typedef atomic<uint_least8_t>  atomic_uint_least8_t;
502typedef atomic<int_least16_t>  atomic_int_least16_t;
503typedef atomic<uint_least16_t> atomic_uint_least16_t;
504typedef atomic<int_least32_t>  atomic_int_least32_t;
505typedef atomic<uint_least32_t> atomic_uint_least32_t;
506typedef atomic<int_least64_t>  atomic_int_least64_t;
507typedef atomic<uint_least64_t> atomic_uint_least64_t;
508
509typedef atomic<int_fast8_t>   atomic_int_fast8_t;
510typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
511typedef atomic<int_fast16_t>  atomic_int_fast16_t;
512typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
513typedef atomic<int_fast32_t>  atomic_int_fast32_t;
514typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
515typedef atomic<int_fast64_t>  atomic_int_fast64_t;
516typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
517
518typedef atomic<int8_t>   atomic_int8_t;
519typedef atomic<uint8_t>  atomic_uint8_t;
520typedef atomic<int16_t>  atomic_int16_t;
521typedef atomic<uint16_t> atomic_uint16_t;
522typedef atomic<int32_t>  atomic_int32_t;
523typedef atomic<uint32_t> atomic_uint32_t;
524typedef atomic<int64_t>  atomic_int64_t;
525typedef atomic<uint64_t> atomic_uint64_t;
526
527typedef atomic<intptr_t>  atomic_intptr_t;
528typedef atomic<uintptr_t> atomic_uintptr_t;
529typedef atomic<size_t>    atomic_size_t;
530typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
531typedef atomic<intmax_t>  atomic_intmax_t;
532typedef atomic<uintmax_t> atomic_uintmax_t;
533
534// fences
535
536void atomic_thread_fence(memory_order m) noexcept;
537void atomic_signal_fence(memory_order m) noexcept;
538
539}  // std
540
541*/
542
543#include <__config>
544#include <cstddef>
545#include <cstdint>
546#include <type_traits>
547
548#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
549#pragma GCC system_header
550#endif
551
552#ifdef _LIBCPP_HAS_NO_THREADS
553#error <atomic> is not supported on this single threaded system
554#endif
555#if !defined(_LIBCPP_HAS_C_ATOMIC_IMP) && !defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
556#error <atomic> is not implemented
557#endif
558
559#if _LIBCPP_STD_VER > 14
560# define __cpp_lib_atomic_is_always_lock_free 201603L
561#endif
562
563_LIBCPP_BEGIN_NAMESPACE_STD
564
565typedef enum memory_order
566{
567    memory_order_relaxed, memory_order_consume, memory_order_acquire,
568    memory_order_release, memory_order_acq_rel, memory_order_seq_cst
569} memory_order;
570
571#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
572namespace __gcc_atomic {
573template <typename _Tp>
574struct __gcc_atomic_t {
575
576#if _GNUC_VER >= 501
577    static_assert(is_trivially_copyable<_Tp>::value,
578      "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
579#endif
580
581  _LIBCPP_INLINE_VISIBILITY
582#ifndef _LIBCPP_CXX03_LANG
583    __gcc_atomic_t() _NOEXCEPT = default;
584#else
585    __gcc_atomic_t() _NOEXCEPT : __a_value() {}
586#endif // _LIBCPP_CXX03_LANG
587  _LIBCPP_CONSTEXPR explicit __gcc_atomic_t(_Tp value) _NOEXCEPT
588    : __a_value(value) {}
589  _Tp __a_value;
590};
591#define _Atomic(x) __gcc_atomic::__gcc_atomic_t<x>
592
593template <typename _Tp> _Tp __create();
594
595template <typename _Tp, typename _Td>
596typename enable_if<sizeof(_Tp()->__a_value = __create<_Td>()), char>::type
597    __test_atomic_assignable(int);
598template <typename _Tp, typename _Up>
599__two __test_atomic_assignable(...);
600
601template <typename _Tp, typename _Td>
602struct __can_assign {
603  static const bool value =
604      sizeof(__test_atomic_assignable<_Tp, _Td>(1)) == sizeof(char);
605};
606
607static inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
608  // Avoid switch statement to make this a constexpr.
609  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
610         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
611          (__order == memory_order_release ? __ATOMIC_RELEASE:
612           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
613            (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
614              __ATOMIC_CONSUME))));
615}
616
617static inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
618  // Avoid switch statement to make this a constexpr.
619  return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
620         (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
621          (__order == memory_order_release ? __ATOMIC_RELAXED:
622           (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
623            (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
624              __ATOMIC_CONSUME))));
625}
626
627} // namespace __gcc_atomic
628
629template <typename _Tp>
630static inline
631typename enable_if<
632    __gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value>::type
633__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
634  __a->__a_value = __val;
635}
636
637template <typename _Tp>
638static inline
639typename enable_if<
640    !__gcc_atomic::__can_assign<volatile _Atomic(_Tp)*, _Tp>::value &&
641     __gcc_atomic::__can_assign<         _Atomic(_Tp)*, _Tp>::value>::type
642__c11_atomic_init(volatile _Atomic(_Tp)* __a,  _Tp __val) {
643  // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
644  // the default operator= in an object is not volatile, a byte-by-byte copy
645  // is required.
646  volatile char* to = reinterpret_cast<volatile char*>(&__a->__a_value);
647  volatile char* end = to + sizeof(_Tp);
648  char* from = reinterpret_cast<char*>(&__val);
649  while (to != end) {
650    *to++ = *from++;
651  }
652}
653
654template <typename _Tp>
655static inline void __c11_atomic_init(_Atomic(_Tp)* __a,  _Tp __val) {
656  __a->__a_value = __val;
657}
658
659static inline void __c11_atomic_thread_fence(memory_order __order) {
660  __atomic_thread_fence(__gcc_atomic::__to_gcc_order(__order));
661}
662
663static inline void __c11_atomic_signal_fence(memory_order __order) {
664  __atomic_signal_fence(__gcc_atomic::__to_gcc_order(__order));
665}
666
667template <typename _Tp>
668static inline void __c11_atomic_store(volatile _Atomic(_Tp)* __a,  _Tp __val,
669                                      memory_order __order) {
670  return __atomic_store(&__a->__a_value, &__val,
671                        __gcc_atomic::__to_gcc_order(__order));
672}
673
674template <typename _Tp>
675static inline void __c11_atomic_store(_Atomic(_Tp)* __a,  _Tp __val,
676                                      memory_order __order) {
677  __atomic_store(&__a->__a_value, &__val,
678                 __gcc_atomic::__to_gcc_order(__order));
679}
680
681template <typename _Tp>
682static inline _Tp __c11_atomic_load(volatile _Atomic(_Tp)* __a,
683                                    memory_order __order) {
684  _Tp __ret;
685  __atomic_load(&__a->__a_value, &__ret,
686                __gcc_atomic::__to_gcc_order(__order));
687  return __ret;
688}
689
690template <typename _Tp>
691static inline _Tp __c11_atomic_load(_Atomic(_Tp)* __a, memory_order __order) {
692  _Tp __ret;
693  __atomic_load(&__a->__a_value, &__ret,
694                __gcc_atomic::__to_gcc_order(__order));
695  return __ret;
696}
697
698template <typename _Tp>
699static inline _Tp __c11_atomic_exchange(volatile _Atomic(_Tp)* __a,
700                                        _Tp __value, memory_order __order) {
701  _Tp __ret;
702  __atomic_exchange(&__a->__a_value, &__value, &__ret,
703                    __gcc_atomic::__to_gcc_order(__order));
704  return __ret;
705}
706
707template <typename _Tp>
708static inline _Tp __c11_atomic_exchange(_Atomic(_Tp)* __a, _Tp __value,
709                                        memory_order __order) {
710  _Tp __ret;
711  __atomic_exchange(&__a->__a_value, &__value, &__ret,
712                    __gcc_atomic::__to_gcc_order(__order));
713  return __ret;
714}
715
716template <typename _Tp>
717static inline bool __c11_atomic_compare_exchange_strong(
718    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
719    memory_order __success, memory_order __failure) {
720  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
721                                   false,
722                                   __gcc_atomic::__to_gcc_order(__success),
723                                   __gcc_atomic::__to_gcc_failure_order(__failure));
724}
725
726template <typename _Tp>
727static inline bool __c11_atomic_compare_exchange_strong(
728    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
729    memory_order __failure) {
730  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
731                                   false,
732                                   __gcc_atomic::__to_gcc_order(__success),
733                                   __gcc_atomic::__to_gcc_failure_order(__failure));
734}
735
736template <typename _Tp>
737static inline bool __c11_atomic_compare_exchange_weak(
738    volatile _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value,
739    memory_order __success, memory_order __failure) {
740  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
741                                   true,
742                                   __gcc_atomic::__to_gcc_order(__success),
743                                   __gcc_atomic::__to_gcc_failure_order(__failure));
744}
745
746template <typename _Tp>
747static inline bool __c11_atomic_compare_exchange_weak(
748    _Atomic(_Tp)* __a, _Tp* __expected, _Tp __value, memory_order __success,
749    memory_order __failure) {
750  return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
751                                   true,
752                                   __gcc_atomic::__to_gcc_order(__success),
753                                   __gcc_atomic::__to_gcc_failure_order(__failure));
754}
755
756template <typename _Tp>
757struct __skip_amt { enum {value = 1}; };
758
759template <typename _Tp>
760struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
761
762// FIXME: Haven't figured out what the spec says about using arrays with
763// atomic_fetch_add. Force a failure rather than creating bad behavior.
764template <typename _Tp>
765struct __skip_amt<_Tp[]> { };
766template <typename _Tp, int n>
767struct __skip_amt<_Tp[n]> { };
768
769template <typename _Tp, typename _Td>
770static inline _Tp __c11_atomic_fetch_add(volatile _Atomic(_Tp)* __a,
771                                         _Td __delta, memory_order __order) {
772  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
773                            __gcc_atomic::__to_gcc_order(__order));
774}
775
776template <typename _Tp, typename _Td>
777static inline _Tp __c11_atomic_fetch_add(_Atomic(_Tp)* __a, _Td __delta,
778                                         memory_order __order) {
779  return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
780                            __gcc_atomic::__to_gcc_order(__order));
781}
782
783template <typename _Tp, typename _Td>
784static inline _Tp __c11_atomic_fetch_sub(volatile _Atomic(_Tp)* __a,
785                                         _Td __delta, memory_order __order) {
786  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
787                            __gcc_atomic::__to_gcc_order(__order));
788}
789
790template <typename _Tp, typename _Td>
791static inline _Tp __c11_atomic_fetch_sub(_Atomic(_Tp)* __a, _Td __delta,
792                                         memory_order __order) {
793  return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
794                            __gcc_atomic::__to_gcc_order(__order));
795}
796
797template <typename _Tp>
798static inline _Tp __c11_atomic_fetch_and(volatile _Atomic(_Tp)* __a,
799                                         _Tp __pattern, memory_order __order) {
800  return __atomic_fetch_and(&__a->__a_value, __pattern,
801                            __gcc_atomic::__to_gcc_order(__order));
802}
803
804template <typename _Tp>
805static inline _Tp __c11_atomic_fetch_and(_Atomic(_Tp)* __a,
806                                         _Tp __pattern, memory_order __order) {
807  return __atomic_fetch_and(&__a->__a_value, __pattern,
808                            __gcc_atomic::__to_gcc_order(__order));
809}
810
811template <typename _Tp>
812static inline _Tp __c11_atomic_fetch_or(volatile _Atomic(_Tp)* __a,
813                                        _Tp __pattern, memory_order __order) {
814  return __atomic_fetch_or(&__a->__a_value, __pattern,
815                           __gcc_atomic::__to_gcc_order(__order));
816}
817
818template <typename _Tp>
819static inline _Tp __c11_atomic_fetch_or(_Atomic(_Tp)* __a, _Tp __pattern,
820                                        memory_order __order) {
821  return __atomic_fetch_or(&__a->__a_value, __pattern,
822                           __gcc_atomic::__to_gcc_order(__order));
823}
824
825template <typename _Tp>
826static inline _Tp __c11_atomic_fetch_xor(volatile _Atomic(_Tp)* __a,
827                                         _Tp __pattern, memory_order __order) {
828  return __atomic_fetch_xor(&__a->__a_value, __pattern,
829                            __gcc_atomic::__to_gcc_order(__order));
830}
831
832template <typename _Tp>
833static inline _Tp __c11_atomic_fetch_xor(_Atomic(_Tp)* __a, _Tp __pattern,
834                                         memory_order __order) {
835  return __atomic_fetch_xor(&__a->__a_value, __pattern,
836                            __gcc_atomic::__to_gcc_order(__order));
837}
838#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP
839
840template <class _Tp>
841inline _LIBCPP_INLINE_VISIBILITY
842_Tp
843kill_dependency(_Tp __y) _NOEXCEPT
844{
845    return __y;
846}
847
848#define ATOMIC_BOOL_LOCK_FREE      __GCC_ATOMIC_BOOL_LOCK_FREE
849#define ATOMIC_CHAR_LOCK_FREE      __GCC_ATOMIC_CHAR_LOCK_FREE
850#define ATOMIC_CHAR16_T_LOCK_FREE  __GCC_ATOMIC_CHAR16_T_LOCK_FREE
851#define ATOMIC_CHAR32_T_LOCK_FREE  __GCC_ATOMIC_CHAR32_T_LOCK_FREE
852#define ATOMIC_WCHAR_T_LOCK_FREE   __GCC_ATOMIC_WCHAR_T_LOCK_FREE
853#define ATOMIC_SHORT_LOCK_FREE     __GCC_ATOMIC_SHORT_LOCK_FREE
854#define ATOMIC_INT_LOCK_FREE       __GCC_ATOMIC_INT_LOCK_FREE
855#define ATOMIC_LONG_LOCK_FREE      __GCC_ATOMIC_LONG_LOCK_FREE
856#define ATOMIC_LLONG_LOCK_FREE     __GCC_ATOMIC_LLONG_LOCK_FREE
857#define ATOMIC_POINTER_LOCK_FREE   __GCC_ATOMIC_POINTER_LOCK_FREE
858
859// general atomic<T>
860
861template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
862struct __atomic_base  // false
863{
864    mutable _Atomic(_Tp) __a_;
865
866#if defined(__cpp_lib_atomic_is_always_lock_free)
867  static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
868#endif
869
870    _LIBCPP_INLINE_VISIBILITY
871    bool is_lock_free() const volatile _NOEXCEPT
872    {
873#if defined(_LIBCPP_HAS_C_ATOMIC_IMP)
874    return __c11_atomic_is_lock_free(sizeof(_Tp));
875#else
876    return __atomic_is_lock_free(sizeof(_Tp), 0);
877#endif
878    }
879    _LIBCPP_INLINE_VISIBILITY
880    bool is_lock_free() const _NOEXCEPT
881        {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
882    _LIBCPP_INLINE_VISIBILITY
883    void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
884        {__c11_atomic_store(&__a_, __d, __m);}
885    _LIBCPP_INLINE_VISIBILITY
886    void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
887        {__c11_atomic_store(&__a_, __d, __m);}
888    _LIBCPP_INLINE_VISIBILITY
889    _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
890        {return __c11_atomic_load(&__a_, __m);}
891    _LIBCPP_INLINE_VISIBILITY
892    _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
893        {return __c11_atomic_load(&__a_, __m);}
894    _LIBCPP_INLINE_VISIBILITY
895    operator _Tp() const volatile _NOEXCEPT {return load();}
896    _LIBCPP_INLINE_VISIBILITY
897    operator _Tp() const _NOEXCEPT          {return load();}
898    _LIBCPP_INLINE_VISIBILITY
899    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
900        {return __c11_atomic_exchange(&__a_, __d, __m);}
901    _LIBCPP_INLINE_VISIBILITY
902    _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
903        {return __c11_atomic_exchange(&__a_, __d, __m);}
904    _LIBCPP_INLINE_VISIBILITY
905    bool compare_exchange_weak(_Tp& __e, _Tp __d,
906                               memory_order __s, memory_order __f) volatile _NOEXCEPT
907        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
908    _LIBCPP_INLINE_VISIBILITY
909    bool compare_exchange_weak(_Tp& __e, _Tp __d,
910                               memory_order __s, memory_order __f) _NOEXCEPT
911        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
912    _LIBCPP_INLINE_VISIBILITY
913    bool compare_exchange_strong(_Tp& __e, _Tp __d,
914                                 memory_order __s, memory_order __f) volatile _NOEXCEPT
915        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
916    _LIBCPP_INLINE_VISIBILITY
917    bool compare_exchange_strong(_Tp& __e, _Tp __d,
918                                 memory_order __s, memory_order __f) _NOEXCEPT
919        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
920    _LIBCPP_INLINE_VISIBILITY
921    bool compare_exchange_weak(_Tp& __e, _Tp __d,
922                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
923        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
924    _LIBCPP_INLINE_VISIBILITY
925    bool compare_exchange_weak(_Tp& __e, _Tp __d,
926                               memory_order __m = memory_order_seq_cst) _NOEXCEPT
927        {return __c11_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
928    _LIBCPP_INLINE_VISIBILITY
929    bool compare_exchange_strong(_Tp& __e, _Tp __d,
930                              memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
931        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
932    _LIBCPP_INLINE_VISIBILITY
933    bool compare_exchange_strong(_Tp& __e, _Tp __d,
934                                 memory_order __m = memory_order_seq_cst) _NOEXCEPT
935        {return __c11_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
936
937    _LIBCPP_INLINE_VISIBILITY
938#ifndef _LIBCPP_CXX03_LANG
939    __atomic_base() _NOEXCEPT = default;
940#else
941    __atomic_base() _NOEXCEPT : __a_() {}
942#endif // _LIBCPP_CXX03_LANG
943
944    _LIBCPP_INLINE_VISIBILITY
945    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
946#ifndef _LIBCPP_CXX03_LANG
947    __atomic_base(const __atomic_base&) = delete;
948    __atomic_base& operator=(const __atomic_base&) = delete;
949    __atomic_base& operator=(const __atomic_base&) volatile = delete;
950#else
951private:
952    __atomic_base(const __atomic_base&);
953    __atomic_base& operator=(const __atomic_base&);
954    __atomic_base& operator=(const __atomic_base&) volatile;
955#endif
956};
957
958#if defined(__cpp_lib_atomic_is_always_lock_free)
959template <class _Tp, bool __b>
960_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
961#endif
962
963// atomic<Integral>
964
965template <class _Tp>
966struct __atomic_base<_Tp, true>
967    : public __atomic_base<_Tp, false>
968{
969    typedef __atomic_base<_Tp, false> __base;
970    _LIBCPP_INLINE_VISIBILITY
971    __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
972    _LIBCPP_INLINE_VISIBILITY
973    _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
974
975    _LIBCPP_INLINE_VISIBILITY
976    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
977        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
978    _LIBCPP_INLINE_VISIBILITY
979    _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
980        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
981    _LIBCPP_INLINE_VISIBILITY
982    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
983        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
984    _LIBCPP_INLINE_VISIBILITY
985    _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
986        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
987    _LIBCPP_INLINE_VISIBILITY
988    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
989        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
990    _LIBCPP_INLINE_VISIBILITY
991    _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
992        {return __c11_atomic_fetch_and(&this->__a_, __op, __m);}
993    _LIBCPP_INLINE_VISIBILITY
994    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
995        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
996    _LIBCPP_INLINE_VISIBILITY
997    _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
998        {return __c11_atomic_fetch_or(&this->__a_, __op, __m);}
999    _LIBCPP_INLINE_VISIBILITY
1000    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1001        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1002    _LIBCPP_INLINE_VISIBILITY
1003    _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1004        {return __c11_atomic_fetch_xor(&this->__a_, __op, __m);}
1005
1006    _LIBCPP_INLINE_VISIBILITY
1007    _Tp operator++(int) volatile _NOEXCEPT      {return fetch_add(_Tp(1));}
1008    _LIBCPP_INLINE_VISIBILITY
1009    _Tp operator++(int) _NOEXCEPT               {return fetch_add(_Tp(1));}
1010    _LIBCPP_INLINE_VISIBILITY
1011    _Tp operator--(int) volatile _NOEXCEPT      {return fetch_sub(_Tp(1));}
1012    _LIBCPP_INLINE_VISIBILITY
1013    _Tp operator--(int) _NOEXCEPT               {return fetch_sub(_Tp(1));}
1014    _LIBCPP_INLINE_VISIBILITY
1015    _Tp operator++() volatile _NOEXCEPT         {return fetch_add(_Tp(1)) + _Tp(1);}
1016    _LIBCPP_INLINE_VISIBILITY
1017    _Tp operator++() _NOEXCEPT                  {return fetch_add(_Tp(1)) + _Tp(1);}
1018    _LIBCPP_INLINE_VISIBILITY
1019    _Tp operator--() volatile _NOEXCEPT         {return fetch_sub(_Tp(1)) - _Tp(1);}
1020    _LIBCPP_INLINE_VISIBILITY
1021    _Tp operator--() _NOEXCEPT                  {return fetch_sub(_Tp(1)) - _Tp(1);}
1022    _LIBCPP_INLINE_VISIBILITY
1023    _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1024    _LIBCPP_INLINE_VISIBILITY
1025    _Tp operator+=(_Tp __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1026    _LIBCPP_INLINE_VISIBILITY
1027    _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1028    _LIBCPP_INLINE_VISIBILITY
1029    _Tp operator-=(_Tp __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1030    _LIBCPP_INLINE_VISIBILITY
1031    _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
1032    _LIBCPP_INLINE_VISIBILITY
1033    _Tp operator&=(_Tp __op) _NOEXCEPT          {return fetch_and(__op) & __op;}
1034    _LIBCPP_INLINE_VISIBILITY
1035    _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
1036    _LIBCPP_INLINE_VISIBILITY
1037    _Tp operator|=(_Tp __op) _NOEXCEPT          {return fetch_or(__op) | __op;}
1038    _LIBCPP_INLINE_VISIBILITY
1039    _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
1040    _LIBCPP_INLINE_VISIBILITY
1041    _Tp operator^=(_Tp __op) _NOEXCEPT          {return fetch_xor(__op) ^ __op;}
1042};
1043
1044// atomic<T>
1045
1046template <class _Tp>
1047struct atomic
1048    : public __atomic_base<_Tp>
1049{
1050    typedef __atomic_base<_Tp> __base;
1051    _LIBCPP_INLINE_VISIBILITY
1052    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1053    _LIBCPP_INLINE_VISIBILITY
1054    _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
1055
1056    _LIBCPP_INLINE_VISIBILITY
1057    _Tp operator=(_Tp __d) volatile _NOEXCEPT
1058        {__base::store(__d); return __d;}
1059    _LIBCPP_INLINE_VISIBILITY
1060    _Tp operator=(_Tp __d) _NOEXCEPT
1061        {__base::store(__d); return __d;}
1062};
1063
1064// atomic<T*>
1065
1066template <class _Tp>
1067struct atomic<_Tp*>
1068    : public __atomic_base<_Tp*>
1069{
1070    typedef __atomic_base<_Tp*> __base;
1071    _LIBCPP_INLINE_VISIBILITY
1072    atomic() _NOEXCEPT _LIBCPP_DEFAULT
1073    _LIBCPP_INLINE_VISIBILITY
1074    _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
1075
1076    _LIBCPP_INLINE_VISIBILITY
1077    _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
1078        {__base::store(__d); return __d;}
1079    _LIBCPP_INLINE_VISIBILITY
1080    _Tp* operator=(_Tp* __d) _NOEXCEPT
1081        {__base::store(__d); return __d;}
1082
1083    _LIBCPP_INLINE_VISIBILITY
1084    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1085                                                                        volatile _NOEXCEPT
1086        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1087    _LIBCPP_INLINE_VISIBILITY
1088    _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1089        {return __c11_atomic_fetch_add(&this->__a_, __op, __m);}
1090    _LIBCPP_INLINE_VISIBILITY
1091    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
1092                                                                        volatile _NOEXCEPT
1093        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1094    _LIBCPP_INLINE_VISIBILITY
1095    _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
1096        {return __c11_atomic_fetch_sub(&this->__a_, __op, __m);}
1097
1098    _LIBCPP_INLINE_VISIBILITY
1099    _Tp* operator++(int) volatile _NOEXCEPT            {return fetch_add(1);}
1100    _LIBCPP_INLINE_VISIBILITY
1101    _Tp* operator++(int) _NOEXCEPT                     {return fetch_add(1);}
1102    _LIBCPP_INLINE_VISIBILITY
1103    _Tp* operator--(int) volatile _NOEXCEPT            {return fetch_sub(1);}
1104    _LIBCPP_INLINE_VISIBILITY
1105    _Tp* operator--(int) _NOEXCEPT                     {return fetch_sub(1);}
1106    _LIBCPP_INLINE_VISIBILITY
1107    _Tp* operator++() volatile _NOEXCEPT               {return fetch_add(1) + 1;}
1108    _LIBCPP_INLINE_VISIBILITY
1109    _Tp* operator++() _NOEXCEPT                        {return fetch_add(1) + 1;}
1110    _LIBCPP_INLINE_VISIBILITY
1111    _Tp* operator--() volatile _NOEXCEPT               {return fetch_sub(1) - 1;}
1112    _LIBCPP_INLINE_VISIBILITY
1113    _Tp* operator--() _NOEXCEPT                        {return fetch_sub(1) - 1;}
1114    _LIBCPP_INLINE_VISIBILITY
1115    _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
1116    _LIBCPP_INLINE_VISIBILITY
1117    _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT          {return fetch_add(__op) + __op;}
1118    _LIBCPP_INLINE_VISIBILITY
1119    _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
1120    _LIBCPP_INLINE_VISIBILITY
1121    _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT          {return fetch_sub(__op) - __op;}
1122};
1123
1124// atomic_is_lock_free
1125
1126template <class _Tp>
1127inline _LIBCPP_INLINE_VISIBILITY
1128bool
1129atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
1130{
1131    return __o->is_lock_free();
1132}
1133
1134template <class _Tp>
1135inline _LIBCPP_INLINE_VISIBILITY
1136bool
1137atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
1138{
1139    return __o->is_lock_free();
1140}
1141
1142// atomic_init
1143
1144template <class _Tp>
1145inline _LIBCPP_INLINE_VISIBILITY
1146void
1147atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1148{
1149    __c11_atomic_init(&__o->__a_, __d);
1150}
1151
1152template <class _Tp>
1153inline _LIBCPP_INLINE_VISIBILITY
1154void
1155atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1156{
1157    __c11_atomic_init(&__o->__a_, __d);
1158}
1159
1160// atomic_store
1161
1162template <class _Tp>
1163inline _LIBCPP_INLINE_VISIBILITY
1164void
1165atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1166{
1167    __o->store(__d);
1168}
1169
1170template <class _Tp>
1171inline _LIBCPP_INLINE_VISIBILITY
1172void
1173atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1174{
1175    __o->store(__d);
1176}
1177
1178// atomic_store_explicit
1179
1180template <class _Tp>
1181inline _LIBCPP_INLINE_VISIBILITY
1182void
1183atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1184{
1185    __o->store(__d, __m);
1186}
1187
1188template <class _Tp>
1189inline _LIBCPP_INLINE_VISIBILITY
1190void
1191atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1192{
1193    __o->store(__d, __m);
1194}
1195
1196// atomic_load
1197
1198template <class _Tp>
1199inline _LIBCPP_INLINE_VISIBILITY
1200_Tp
1201atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
1202{
1203    return __o->load();
1204}
1205
1206template <class _Tp>
1207inline _LIBCPP_INLINE_VISIBILITY
1208_Tp
1209atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
1210{
1211    return __o->load();
1212}
1213
1214// atomic_load_explicit
1215
1216template <class _Tp>
1217inline _LIBCPP_INLINE_VISIBILITY
1218_Tp
1219atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1220{
1221    return __o->load(__m);
1222}
1223
1224template <class _Tp>
1225inline _LIBCPP_INLINE_VISIBILITY
1226_Tp
1227atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
1228{
1229    return __o->load(__m);
1230}
1231
1232// atomic_exchange
1233
1234template <class _Tp>
1235inline _LIBCPP_INLINE_VISIBILITY
1236_Tp
1237atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1238{
1239    return __o->exchange(__d);
1240}
1241
1242template <class _Tp>
1243inline _LIBCPP_INLINE_VISIBILITY
1244_Tp
1245atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
1246{
1247    return __o->exchange(__d);
1248}
1249
1250// atomic_exchange_explicit
1251
1252template <class _Tp>
1253inline _LIBCPP_INLINE_VISIBILITY
1254_Tp
1255atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1256{
1257    return __o->exchange(__d, __m);
1258}
1259
1260template <class _Tp>
1261inline _LIBCPP_INLINE_VISIBILITY
1262_Tp
1263atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
1264{
1265    return __o->exchange(__d, __m);
1266}
1267
1268// atomic_compare_exchange_weak
1269
1270template <class _Tp>
1271inline _LIBCPP_INLINE_VISIBILITY
1272bool
1273atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1274{
1275    return __o->compare_exchange_weak(*__e, __d);
1276}
1277
1278template <class _Tp>
1279inline _LIBCPP_INLINE_VISIBILITY
1280bool
1281atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1282{
1283    return __o->compare_exchange_weak(*__e, __d);
1284}
1285
1286// atomic_compare_exchange_strong
1287
1288template <class _Tp>
1289inline _LIBCPP_INLINE_VISIBILITY
1290bool
1291atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1292{
1293    return __o->compare_exchange_strong(*__e, __d);
1294}
1295
1296template <class _Tp>
1297inline _LIBCPP_INLINE_VISIBILITY
1298bool
1299atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
1300{
1301    return __o->compare_exchange_strong(*__e, __d);
1302}
1303
1304// atomic_compare_exchange_weak_explicit
1305
1306template <class _Tp>
1307inline _LIBCPP_INLINE_VISIBILITY
1308bool
1309atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
1310                                      _Tp __d,
1311                                      memory_order __s, memory_order __f) _NOEXCEPT
1312{
1313    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1314}
1315
1316template <class _Tp>
1317inline _LIBCPP_INLINE_VISIBILITY
1318bool
1319atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
1320                                      memory_order __s, memory_order __f) _NOEXCEPT
1321{
1322    return __o->compare_exchange_weak(*__e, __d, __s, __f);
1323}
1324
1325// atomic_compare_exchange_strong_explicit
1326
1327template <class _Tp>
1328inline _LIBCPP_INLINE_VISIBILITY
1329bool
1330atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
1331                                        _Tp* __e, _Tp __d,
1332                                        memory_order __s, memory_order __f) _NOEXCEPT
1333{
1334    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1335}
1336
1337template <class _Tp>
1338inline _LIBCPP_INLINE_VISIBILITY
1339bool
1340atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
1341                                        _Tp __d,
1342                                        memory_order __s, memory_order __f) _NOEXCEPT
1343{
1344    return __o->compare_exchange_strong(*__e, __d, __s, __f);
1345}
1346
1347// atomic_fetch_add
1348
1349template <class _Tp>
1350inline _LIBCPP_INLINE_VISIBILITY
1351typename enable_if
1352<
1353    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1354    _Tp
1355>::type
1356atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1357{
1358    return __o->fetch_add(__op);
1359}
1360
1361template <class _Tp>
1362inline _LIBCPP_INLINE_VISIBILITY
1363typename enable_if
1364<
1365    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1366    _Tp
1367>::type
1368atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1369{
1370    return __o->fetch_add(__op);
1371}
1372
1373template <class _Tp>
1374inline _LIBCPP_INLINE_VISIBILITY
1375_Tp*
1376atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1377{
1378    return __o->fetch_add(__op);
1379}
1380
1381template <class _Tp>
1382inline _LIBCPP_INLINE_VISIBILITY
1383_Tp*
1384atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1385{
1386    return __o->fetch_add(__op);
1387}
1388
1389// atomic_fetch_add_explicit
1390
1391template <class _Tp>
1392inline _LIBCPP_INLINE_VISIBILITY
1393typename enable_if
1394<
1395    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1396    _Tp
1397>::type
1398atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1399{
1400    return __o->fetch_add(__op, __m);
1401}
1402
1403template <class _Tp>
1404inline _LIBCPP_INLINE_VISIBILITY
1405typename enable_if
1406<
1407    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1408    _Tp
1409>::type
1410atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1411{
1412    return __o->fetch_add(__op, __m);
1413}
1414
1415template <class _Tp>
1416inline _LIBCPP_INLINE_VISIBILITY
1417_Tp*
1418atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1419                          memory_order __m) _NOEXCEPT
1420{
1421    return __o->fetch_add(__op, __m);
1422}
1423
1424template <class _Tp>
1425inline _LIBCPP_INLINE_VISIBILITY
1426_Tp*
1427atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1428{
1429    return __o->fetch_add(__op, __m);
1430}
1431
1432// atomic_fetch_sub
1433
1434template <class _Tp>
1435inline _LIBCPP_INLINE_VISIBILITY
1436typename enable_if
1437<
1438    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1439    _Tp
1440>::type
1441atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1442{
1443    return __o->fetch_sub(__op);
1444}
1445
1446template <class _Tp>
1447inline _LIBCPP_INLINE_VISIBILITY
1448typename enable_if
1449<
1450    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1451    _Tp
1452>::type
1453atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1454{
1455    return __o->fetch_sub(__op);
1456}
1457
1458template <class _Tp>
1459inline _LIBCPP_INLINE_VISIBILITY
1460_Tp*
1461atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1462{
1463    return __o->fetch_sub(__op);
1464}
1465
1466template <class _Tp>
1467inline _LIBCPP_INLINE_VISIBILITY
1468_Tp*
1469atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
1470{
1471    return __o->fetch_sub(__op);
1472}
1473
1474// atomic_fetch_sub_explicit
1475
1476template <class _Tp>
1477inline _LIBCPP_INLINE_VISIBILITY
1478typename enable_if
1479<
1480    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1481    _Tp
1482>::type
1483atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1484{
1485    return __o->fetch_sub(__op, __m);
1486}
1487
1488template <class _Tp>
1489inline _LIBCPP_INLINE_VISIBILITY
1490typename enable_if
1491<
1492    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1493    _Tp
1494>::type
1495atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1496{
1497    return __o->fetch_sub(__op, __m);
1498}
1499
1500template <class _Tp>
1501inline _LIBCPP_INLINE_VISIBILITY
1502_Tp*
1503atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
1504                          memory_order __m) _NOEXCEPT
1505{
1506    return __o->fetch_sub(__op, __m);
1507}
1508
1509template <class _Tp>
1510inline _LIBCPP_INLINE_VISIBILITY
1511_Tp*
1512atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
1513{
1514    return __o->fetch_sub(__op, __m);
1515}
1516
1517// atomic_fetch_and
1518
1519template <class _Tp>
1520inline _LIBCPP_INLINE_VISIBILITY
1521typename enable_if
1522<
1523    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1524    _Tp
1525>::type
1526atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1527{
1528    return __o->fetch_and(__op);
1529}
1530
1531template <class _Tp>
1532inline _LIBCPP_INLINE_VISIBILITY
1533typename enable_if
1534<
1535    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1536    _Tp
1537>::type
1538atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1539{
1540    return __o->fetch_and(__op);
1541}
1542
1543// atomic_fetch_and_explicit
1544
1545template <class _Tp>
1546inline _LIBCPP_INLINE_VISIBILITY
1547typename enable_if
1548<
1549    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1550    _Tp
1551>::type
1552atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1553{
1554    return __o->fetch_and(__op, __m);
1555}
1556
1557template <class _Tp>
1558inline _LIBCPP_INLINE_VISIBILITY
1559typename enable_if
1560<
1561    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1562    _Tp
1563>::type
1564atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1565{
1566    return __o->fetch_and(__op, __m);
1567}
1568
1569// atomic_fetch_or
1570
1571template <class _Tp>
1572inline _LIBCPP_INLINE_VISIBILITY
1573typename enable_if
1574<
1575    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1576    _Tp
1577>::type
1578atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1579{
1580    return __o->fetch_or(__op);
1581}
1582
1583template <class _Tp>
1584inline _LIBCPP_INLINE_VISIBILITY
1585typename enable_if
1586<
1587    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1588    _Tp
1589>::type
1590atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1591{
1592    return __o->fetch_or(__op);
1593}
1594
1595// atomic_fetch_or_explicit
1596
1597template <class _Tp>
1598inline _LIBCPP_INLINE_VISIBILITY
1599typename enable_if
1600<
1601    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1602    _Tp
1603>::type
1604atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1605{
1606    return __o->fetch_or(__op, __m);
1607}
1608
1609template <class _Tp>
1610inline _LIBCPP_INLINE_VISIBILITY
1611typename enable_if
1612<
1613    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1614    _Tp
1615>::type
1616atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1617{
1618    return __o->fetch_or(__op, __m);
1619}
1620
1621// atomic_fetch_xor
1622
1623template <class _Tp>
1624inline _LIBCPP_INLINE_VISIBILITY
1625typename enable_if
1626<
1627    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1628    _Tp
1629>::type
1630atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1631{
1632    return __o->fetch_xor(__op);
1633}
1634
1635template <class _Tp>
1636inline _LIBCPP_INLINE_VISIBILITY
1637typename enable_if
1638<
1639    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1640    _Tp
1641>::type
1642atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
1643{
1644    return __o->fetch_xor(__op);
1645}
1646
1647// atomic_fetch_xor_explicit
1648
1649template <class _Tp>
1650inline _LIBCPP_INLINE_VISIBILITY
1651typename enable_if
1652<
1653    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1654    _Tp
1655>::type
1656atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1657{
1658    return __o->fetch_xor(__op, __m);
1659}
1660
1661template <class _Tp>
1662inline _LIBCPP_INLINE_VISIBILITY
1663typename enable_if
1664<
1665    is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
1666    _Tp
1667>::type
1668atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
1669{
1670    return __o->fetch_xor(__op, __m);
1671}
1672
1673// flag type and operations
1674
1675typedef struct atomic_flag
1676{
1677    _Atomic(bool) __a_;
1678
1679    _LIBCPP_INLINE_VISIBILITY
1680    bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1681        {return __c11_atomic_exchange(&__a_, true, __m);}
1682    _LIBCPP_INLINE_VISIBILITY
1683    bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1684        {return __c11_atomic_exchange(&__a_, true, __m);}
1685    _LIBCPP_INLINE_VISIBILITY
1686    void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
1687        {__c11_atomic_store(&__a_, false, __m);}
1688    _LIBCPP_INLINE_VISIBILITY
1689    void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
1690        {__c11_atomic_store(&__a_, false, __m);}
1691
1692    _LIBCPP_INLINE_VISIBILITY
1693#ifndef _LIBCPP_CXX03_LANG
1694    atomic_flag() _NOEXCEPT = default;
1695#else
1696    atomic_flag() _NOEXCEPT : __a_() {}
1697#endif // _LIBCPP_CXX03_LANG
1698
1699    _LIBCPP_INLINE_VISIBILITY
1700    atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
1701
1702#ifndef _LIBCPP_CXX03_LANG
1703    atomic_flag(const atomic_flag&) = delete;
1704    atomic_flag& operator=(const atomic_flag&) = delete;
1705    atomic_flag& operator=(const atomic_flag&) volatile = delete;
1706#else
1707private:
1708    atomic_flag(const atomic_flag&);
1709    atomic_flag& operator=(const atomic_flag&);
1710    atomic_flag& operator=(const atomic_flag&) volatile;
1711#endif
1712} atomic_flag;
1713
1714inline _LIBCPP_INLINE_VISIBILITY
1715bool
1716atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
1717{
1718    return __o->test_and_set();
1719}
1720
1721inline _LIBCPP_INLINE_VISIBILITY
1722bool
1723atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
1724{
1725    return __o->test_and_set();
1726}
1727
1728inline _LIBCPP_INLINE_VISIBILITY
1729bool
1730atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1731{
1732    return __o->test_and_set(__m);
1733}
1734
1735inline _LIBCPP_INLINE_VISIBILITY
1736bool
1737atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1738{
1739    return __o->test_and_set(__m);
1740}
1741
1742inline _LIBCPP_INLINE_VISIBILITY
1743void
1744atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
1745{
1746    __o->clear();
1747}
1748
1749inline _LIBCPP_INLINE_VISIBILITY
1750void
1751atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
1752{
1753    __o->clear();
1754}
1755
1756inline _LIBCPP_INLINE_VISIBILITY
1757void
1758atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
1759{
1760    __o->clear(__m);
1761}
1762
1763inline _LIBCPP_INLINE_VISIBILITY
1764void
1765atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
1766{
1767    __o->clear(__m);
1768}
1769
1770// fences
1771
1772inline _LIBCPP_INLINE_VISIBILITY
1773void
1774atomic_thread_fence(memory_order __m) _NOEXCEPT
1775{
1776    __c11_atomic_thread_fence(__m);
1777}
1778
1779inline _LIBCPP_INLINE_VISIBILITY
1780void
1781atomic_signal_fence(memory_order __m) _NOEXCEPT
1782{
1783    __c11_atomic_signal_fence(__m);
1784}
1785
1786// Atomics for standard typedef types
1787
1788typedef atomic<bool>               atomic_bool;
1789typedef atomic<char>               atomic_char;
1790typedef atomic<signed char>        atomic_schar;
1791typedef atomic<unsigned char>      atomic_uchar;
1792typedef atomic<short>              atomic_short;
1793typedef atomic<unsigned short>     atomic_ushort;
1794typedef atomic<int>                atomic_int;
1795typedef atomic<unsigned int>       atomic_uint;
1796typedef atomic<long>               atomic_long;
1797typedef atomic<unsigned long>      atomic_ulong;
1798typedef atomic<long long>          atomic_llong;
1799typedef atomic<unsigned long long> atomic_ullong;
1800typedef atomic<char16_t>           atomic_char16_t;
1801typedef atomic<char32_t>           atomic_char32_t;
1802typedef atomic<wchar_t>            atomic_wchar_t;
1803
1804typedef atomic<int_least8_t>   atomic_int_least8_t;
1805typedef atomic<uint_least8_t>  atomic_uint_least8_t;
1806typedef atomic<int_least16_t>  atomic_int_least16_t;
1807typedef atomic<uint_least16_t> atomic_uint_least16_t;
1808typedef atomic<int_least32_t>  atomic_int_least32_t;
1809typedef atomic<uint_least32_t> atomic_uint_least32_t;
1810typedef atomic<int_least64_t>  atomic_int_least64_t;
1811typedef atomic<uint_least64_t> atomic_uint_least64_t;
1812
1813typedef atomic<int_fast8_t>   atomic_int_fast8_t;
1814typedef atomic<uint_fast8_t>  atomic_uint_fast8_t;
1815typedef atomic<int_fast16_t>  atomic_int_fast16_t;
1816typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
1817typedef atomic<int_fast32_t>  atomic_int_fast32_t;
1818typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
1819typedef atomic<int_fast64_t>  atomic_int_fast64_t;
1820typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
1821
1822typedef atomic< int8_t>  atomic_int8_t;
1823typedef atomic<uint8_t>  atomic_uint8_t;
1824typedef atomic< int16_t> atomic_int16_t;
1825typedef atomic<uint16_t> atomic_uint16_t;
1826typedef atomic< int32_t> atomic_int32_t;
1827typedef atomic<uint32_t> atomic_uint32_t;
1828typedef atomic< int64_t> atomic_int64_t;
1829typedef atomic<uint64_t> atomic_uint64_t;
1830
1831typedef atomic<intptr_t>  atomic_intptr_t;
1832typedef atomic<uintptr_t> atomic_uintptr_t;
1833typedef atomic<size_t>    atomic_size_t;
1834typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
1835typedef atomic<intmax_t>  atomic_intmax_t;
1836typedef atomic<uintmax_t> atomic_uintmax_t;
1837
1838#define ATOMIC_FLAG_INIT {false}
1839#define ATOMIC_VAR_INIT(__v) {__v}
1840
1841_LIBCPP_END_NAMESPACE_STD
1842
1843#endif  // _LIBCPP_ATOMIC
1844