1// -*- C++ -*- 2//===----------------------------------------------------------------------===// 3// 4// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 5// See https://llvm.org/LICENSE.txt for license information. 6// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 7// 8//===----------------------------------------------------------------------===// 9 10#ifndef _LIBCPP_ATOMIC 11#define _LIBCPP_ATOMIC 12 13/* 14 atomic synopsis 15 16namespace std 17{ 18 19// feature test macro [version.syn] 20 21#define __cpp_lib_atomic_is_always_lock_free 22#define __cpp_lib_atomic_flag_test 23#define __cpp_lib_atomic_lock_free_type_aliases 24#define __cpp_lib_atomic_wait 25 26 // order and consistency 27 28 enum memory_order: unspecified // enum class in C++20 29 { 30 relaxed, 31 consume, // load-consume 32 acquire, // load-acquire 33 release, // store-release 34 acq_rel, // store-release load-acquire 35 seq_cst // store-release load-acquire 36 }; 37 38 inline constexpr auto memory_order_relaxed = memory_order::relaxed; 39 inline constexpr auto memory_order_consume = memory_order::consume; 40 inline constexpr auto memory_order_acquire = memory_order::acquire; 41 inline constexpr auto memory_order_release = memory_order::release; 42 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel; 43 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst; 44 45template <class T> T kill_dependency(T y) noexcept; 46 47// lock-free property 48 49#define ATOMIC_BOOL_LOCK_FREE unspecified 50#define ATOMIC_CHAR_LOCK_FREE unspecified 51#define ATOMIC_CHAR8_T_LOCK_FREE unspecified // C++20 52#define ATOMIC_CHAR16_T_LOCK_FREE unspecified 53#define ATOMIC_CHAR32_T_LOCK_FREE unspecified 54#define ATOMIC_WCHAR_T_LOCK_FREE unspecified 55#define ATOMIC_SHORT_LOCK_FREE unspecified 56#define ATOMIC_INT_LOCK_FREE unspecified 57#define ATOMIC_LONG_LOCK_FREE unspecified 58#define ATOMIC_LLONG_LOCK_FREE unspecified 59#define ATOMIC_POINTER_LOCK_FREE unspecified 60 61template <class T> 62struct atomic 63{ 64 using value_type = T; 65 66 static constexpr bool is_always_lock_free; 67 bool is_lock_free() const volatile noexcept; 68 bool is_lock_free() const noexcept; 69 70 atomic() noexcept = default; // until C++20 71 constexpr atomic() noexcept(is_nothrow_default_constructible_v<T>); // since C++20 72 constexpr atomic(T desr) noexcept; 73 atomic(const atomic&) = delete; 74 atomic& operator=(const atomic&) = delete; 75 atomic& operator=(const atomic&) volatile = delete; 76 77 T load(memory_order m = memory_order_seq_cst) const volatile noexcept; 78 T load(memory_order m = memory_order_seq_cst) const noexcept; 79 operator T() const volatile noexcept; 80 operator T() const noexcept; 81 void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; 82 void store(T desr, memory_order m = memory_order_seq_cst) noexcept; 83 T operator=(T) volatile noexcept; 84 T operator=(T) noexcept; 85 86 T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept; 87 T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept; 88 bool compare_exchange_weak(T& expc, T desr, 89 memory_order s, memory_order f) volatile noexcept; 90 bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept; 91 bool compare_exchange_strong(T& expc, T desr, 92 memory_order s, memory_order f) volatile noexcept; 93 bool compare_exchange_strong(T& expc, T desr, 94 memory_order s, memory_order f) noexcept; 95 bool compare_exchange_weak(T& expc, T desr, 96 memory_order m = memory_order_seq_cst) volatile noexcept; 97 bool compare_exchange_weak(T& expc, T desr, 98 memory_order m = memory_order_seq_cst) noexcept; 99 bool compare_exchange_strong(T& expc, T desr, 100 memory_order m = memory_order_seq_cst) volatile noexcept; 101 bool compare_exchange_strong(T& expc, T desr, 102 memory_order m = memory_order_seq_cst) noexcept; 103 104 void wait(T, memory_order = memory_order::seq_cst) const volatile noexcept; 105 void wait(T, memory_order = memory_order::seq_cst) const noexcept; 106 void notify_one() volatile noexcept; 107 void notify_one() noexcept; 108 void notify_all() volatile noexcept; 109 void notify_all() noexcept; 110}; 111 112template <> 113struct atomic<integral> 114{ 115 using value_type = integral; 116 using difference_type = value_type; 117 118 static constexpr bool is_always_lock_free; 119 bool is_lock_free() const volatile noexcept; 120 bool is_lock_free() const noexcept; 121 122 atomic() noexcept = default; 123 constexpr atomic(integral desr) noexcept; 124 atomic(const atomic&) = delete; 125 atomic& operator=(const atomic&) = delete; 126 atomic& operator=(const atomic&) volatile = delete; 127 128 integral load(memory_order m = memory_order_seq_cst) const volatile noexcept; 129 integral load(memory_order m = memory_order_seq_cst) const noexcept; 130 operator integral() const volatile noexcept; 131 operator integral() const noexcept; 132 void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept; 133 void store(integral desr, memory_order m = memory_order_seq_cst) noexcept; 134 integral operator=(integral desr) volatile noexcept; 135 integral operator=(integral desr) noexcept; 136 137 integral exchange(integral desr, 138 memory_order m = memory_order_seq_cst) volatile noexcept; 139 integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept; 140 bool compare_exchange_weak(integral& expc, integral desr, 141 memory_order s, memory_order f) volatile noexcept; 142 bool compare_exchange_weak(integral& expc, integral desr, 143 memory_order s, memory_order f) noexcept; 144 bool compare_exchange_strong(integral& expc, integral desr, 145 memory_order s, memory_order f) volatile noexcept; 146 bool compare_exchange_strong(integral& expc, integral desr, 147 memory_order s, memory_order f) noexcept; 148 bool compare_exchange_weak(integral& expc, integral desr, 149 memory_order m = memory_order_seq_cst) volatile noexcept; 150 bool compare_exchange_weak(integral& expc, integral desr, 151 memory_order m = memory_order_seq_cst) noexcept; 152 bool compare_exchange_strong(integral& expc, integral desr, 153 memory_order m = memory_order_seq_cst) volatile noexcept; 154 bool compare_exchange_strong(integral& expc, integral desr, 155 memory_order m = memory_order_seq_cst) noexcept; 156 157 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 158 integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept; 159 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 160 integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept; 161 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 162 integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept; 163 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 164 integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept; 165 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept; 166 integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept; 167 168 integral operator++(int) volatile noexcept; 169 integral operator++(int) noexcept; 170 integral operator--(int) volatile noexcept; 171 integral operator--(int) noexcept; 172 integral operator++() volatile noexcept; 173 integral operator++() noexcept; 174 integral operator--() volatile noexcept; 175 integral operator--() noexcept; 176 integral operator+=(integral op) volatile noexcept; 177 integral operator+=(integral op) noexcept; 178 integral operator-=(integral op) volatile noexcept; 179 integral operator-=(integral op) noexcept; 180 integral operator&=(integral op) volatile noexcept; 181 integral operator&=(integral op) noexcept; 182 integral operator|=(integral op) volatile noexcept; 183 integral operator|=(integral op) noexcept; 184 integral operator^=(integral op) volatile noexcept; 185 integral operator^=(integral op) noexcept; 186 187 void wait(integral, memory_order = memory_order::seq_cst) const volatile noexcept; 188 void wait(integral, memory_order = memory_order::seq_cst) const noexcept; 189 void notify_one() volatile noexcept; 190 void notify_one() noexcept; 191 void notify_all() volatile noexcept; 192 void notify_all() noexcept; 193}; 194 195template <class T> 196struct atomic<T*> 197{ 198 using value_type = T*; 199 using difference_type = ptrdiff_t; 200 201 static constexpr bool is_always_lock_free; 202 bool is_lock_free() const volatile noexcept; 203 bool is_lock_free() const noexcept; 204 205 atomic() noexcept = default; // until C++20 206 constexpr atomic() noexcept; // since C++20 207 constexpr atomic(T* desr) noexcept; 208 atomic(const atomic&) = delete; 209 atomic& operator=(const atomic&) = delete; 210 atomic& operator=(const atomic&) volatile = delete; 211 212 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept; 213 T* load(memory_order m = memory_order_seq_cst) const noexcept; 214 operator T*() const volatile noexcept; 215 operator T*() const noexcept; 216 void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; 217 void store(T* desr, memory_order m = memory_order_seq_cst) noexcept; 218 T* operator=(T*) volatile noexcept; 219 T* operator=(T*) noexcept; 220 221 T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept; 222 T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept; 223 bool compare_exchange_weak(T*& expc, T* desr, 224 memory_order s, memory_order f) volatile noexcept; 225 bool compare_exchange_weak(T*& expc, T* desr, 226 memory_order s, memory_order f) noexcept; 227 bool compare_exchange_strong(T*& expc, T* desr, 228 memory_order s, memory_order f) volatile noexcept; 229 bool compare_exchange_strong(T*& expc, T* desr, 230 memory_order s, memory_order f) noexcept; 231 bool compare_exchange_weak(T*& expc, T* desr, 232 memory_order m = memory_order_seq_cst) volatile noexcept; 233 bool compare_exchange_weak(T*& expc, T* desr, 234 memory_order m = memory_order_seq_cst) noexcept; 235 bool compare_exchange_strong(T*& expc, T* desr, 236 memory_order m = memory_order_seq_cst) volatile noexcept; 237 bool compare_exchange_strong(T*& expc, T* desr, 238 memory_order m = memory_order_seq_cst) noexcept; 239 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; 240 T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; 241 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept; 242 T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept; 243 244 T* operator++(int) volatile noexcept; 245 T* operator++(int) noexcept; 246 T* operator--(int) volatile noexcept; 247 T* operator--(int) noexcept; 248 T* operator++() volatile noexcept; 249 T* operator++() noexcept; 250 T* operator--() volatile noexcept; 251 T* operator--() noexcept; 252 T* operator+=(ptrdiff_t op) volatile noexcept; 253 T* operator+=(ptrdiff_t op) noexcept; 254 T* operator-=(ptrdiff_t op) volatile noexcept; 255 T* operator-=(ptrdiff_t op) noexcept; 256 257 void wait(T*, memory_order = memory_order::seq_cst) const volatile noexcept; 258 void wait(T*, memory_order = memory_order::seq_cst) const noexcept; 259 void notify_one() volatile noexcept; 260 void notify_one() noexcept; 261 void notify_all() volatile noexcept; 262 void notify_all() noexcept; 263}; 264 265 266// [atomics.nonmembers], non-member functions 267template<class T> 268 bool atomic_is_lock_free(const volatile atomic<T>*) noexcept; 269template<class T> 270 bool atomic_is_lock_free(const atomic<T>*) noexcept; 271template<class T> 272 void atomic_store(volatile atomic<T>*, atomic<T>::value_type) noexcept; 273template<class T> 274 void atomic_store(atomic<T>*, atomic<T>::value_type) noexcept; 275template<class T> 276 void atomic_store_explicit(volatile atomic<T>*, atomic<T>::value_type, 277 memory_order) noexcept; 278template<class T> 279 void atomic_store_explicit(atomic<T>*, atomic<T>::value_type, 280 memory_order) noexcept; 281template<class T> 282 T atomic_load(const volatile atomic<T>*) noexcept; 283template<class T> 284 T atomic_load(const atomic<T>*) noexcept; 285template<class T> 286 T atomic_load_explicit(const volatile atomic<T>*, memory_order) noexcept; 287template<class T> 288 T atomic_load_explicit(const atomic<T>*, memory_order) noexcept; 289template<class T> 290 T atomic_exchange(volatile atomic<T>*, atomic<T>::value_type) noexcept; 291template<class T> 292 T atomic_exchange(atomic<T>*, atomic<T>::value_type) noexcept; 293template<class T> 294 T atomic_exchange_explicit(volatile atomic<T>*, atomic<T>::value_type, 295 memory_order) noexcept; 296template<class T> 297 T atomic_exchange_explicit(atomic<T>*, atomic<T>::value_type, 298 memory_order) noexcept; 299template<class T> 300 bool atomic_compare_exchange_weak(volatile atomic<T>*, atomic<T>::value_type*, 301 atomic<T>::value_type) noexcept; 302template<class T> 303 bool atomic_compare_exchange_weak(atomic<T>*, atomic<T>::value_type*, 304 atomic<T>::value_type) noexcept; 305template<class T> 306 bool atomic_compare_exchange_strong(volatile atomic<T>*, atomic<T>::value_type*, 307 atomic<T>::value_type) noexcept; 308template<class T> 309 bool atomic_compare_exchange_strong(atomic<T>*, atomic<T>::value_type*, 310 atomic<T>::value_type) noexcept; 311template<class T> 312 bool atomic_compare_exchange_weak_explicit(volatile atomic<T>*, atomic<T>::value_type*, 313 atomic<T>::value_type, 314 memory_order, memory_order) noexcept; 315template<class T> 316 bool atomic_compare_exchange_weak_explicit(atomic<T>*, atomic<T>::value_type*, 317 atomic<T>::value_type, 318 memory_order, memory_order) noexcept; 319template<class T> 320 bool atomic_compare_exchange_strong_explicit(volatile atomic<T>*, atomic<T>::value_type*, 321 atomic<T>::value_type, 322 memory_order, memory_order) noexcept; 323template<class T> 324 bool atomic_compare_exchange_strong_explicit(atomic<T>*, atomic<T>::value_type*, 325 atomic<T>::value_type, 326 memory_order, memory_order) noexcept; 327 328template<class T> 329 T atomic_fetch_add(volatile atomic<T>*, atomic<T>::difference_type) noexcept; 330template<class T> 331 T atomic_fetch_add(atomic<T>*, atomic<T>::difference_type) noexcept; 332template<class T> 333 T atomic_fetch_add_explicit(volatile atomic<T>*, atomic<T>::difference_type, 334 memory_order) noexcept; 335template<class T> 336 T atomic_fetch_add_explicit(atomic<T>*, atomic<T>::difference_type, 337 memory_order) noexcept; 338template<class T> 339 T atomic_fetch_sub(volatile atomic<T>*, atomic<T>::difference_type) noexcept; 340template<class T> 341 T atomic_fetch_sub(atomic<T>*, atomic<T>::difference_type) noexcept; 342template<class T> 343 T atomic_fetch_sub_explicit(volatile atomic<T>*, atomic<T>::difference_type, 344 memory_order) noexcept; 345template<class T> 346 T atomic_fetch_sub_explicit(atomic<T>*, atomic<T>::difference_type, 347 memory_order) noexcept; 348template<class T> 349 T atomic_fetch_and(volatile atomic<T>*, atomic<T>::value_type) noexcept; 350template<class T> 351 T atomic_fetch_and(atomic<T>*, atomic<T>::value_type) noexcept; 352template<class T> 353 T atomic_fetch_and_explicit(volatile atomic<T>*, atomic<T>::value_type, 354 memory_order) noexcept; 355template<class T> 356 T atomic_fetch_and_explicit(atomic<T>*, atomic<T>::value_type, 357 memory_order) noexcept; 358template<class T> 359 T atomic_fetch_or(volatile atomic<T>*, atomic<T>::value_type) noexcept; 360template<class T> 361 T atomic_fetch_or(atomic<T>*, atomic<T>::value_type) noexcept; 362template<class T> 363 T atomic_fetch_or_explicit(volatile atomic<T>*, atomic<T>::value_type, 364 memory_order) noexcept; 365template<class T> 366 T atomic_fetch_or_explicit(atomic<T>*, atomic<T>::value_type, 367 memory_order) noexcept; 368template<class T> 369 T atomic_fetch_xor(volatile atomic<T>*, atomic<T>::value_type) noexcept; 370template<class T> 371 T atomic_fetch_xor(atomic<T>*, atomic<T>::value_type) noexcept; 372template<class T> 373 T atomic_fetch_xor_explicit(volatile atomic<T>*, atomic<T>::value_type, 374 memory_order) noexcept; 375template<class T> 376 T atomic_fetch_xor_explicit(atomic<T>*, atomic<T>::value_type, 377 memory_order) noexcept; 378 379template<class T> 380 void atomic_wait(const volatile atomic<T>*, atomic<T>::value_type); 381template<class T> 382 void atomic_wait(const atomic<T>*, atomic<T>::value_type); 383template<class T> 384 void atomic_wait_explicit(const volatile atomic<T>*, atomic<T>::value_type, 385 memory_order); 386template<class T> 387 void atomic_wait_explicit(const atomic<T>*, atomic<T>::value_type, 388 memory_order); 389template<class T> 390 void atomic_notify_one(volatile atomic<T>*); 391template<class T> 392 void atomic_notify_one(atomic<T>*); 393template<class T> 394 void atomic_notify_all(volatile atomic<T>*); 395template<class T> 396 void atomic_notify_all(atomic<T>*); 397 398// Atomics for standard typedef types 399 400typedef atomic<bool> atomic_bool; 401typedef atomic<char> atomic_char; 402typedef atomic<signed char> atomic_schar; 403typedef atomic<unsigned char> atomic_uchar; 404typedef atomic<short> atomic_short; 405typedef atomic<unsigned short> atomic_ushort; 406typedef atomic<int> atomic_int; 407typedef atomic<unsigned int> atomic_uint; 408typedef atomic<long> atomic_long; 409typedef atomic<unsigned long> atomic_ulong; 410typedef atomic<long long> atomic_llong; 411typedef atomic<unsigned long long> atomic_ullong; 412typedef atomic<char8_t> atomic_char8_t; // C++20 413typedef atomic<char16_t> atomic_char16_t; 414typedef atomic<char32_t> atomic_char32_t; 415typedef atomic<wchar_t> atomic_wchar_t; 416 417typedef atomic<int_least8_t> atomic_int_least8_t; 418typedef atomic<uint_least8_t> atomic_uint_least8_t; 419typedef atomic<int_least16_t> atomic_int_least16_t; 420typedef atomic<uint_least16_t> atomic_uint_least16_t; 421typedef atomic<int_least32_t> atomic_int_least32_t; 422typedef atomic<uint_least32_t> atomic_uint_least32_t; 423typedef atomic<int_least64_t> atomic_int_least64_t; 424typedef atomic<uint_least64_t> atomic_uint_least64_t; 425 426typedef atomic<int_fast8_t> atomic_int_fast8_t; 427typedef atomic<uint_fast8_t> atomic_uint_fast8_t; 428typedef atomic<int_fast16_t> atomic_int_fast16_t; 429typedef atomic<uint_fast16_t> atomic_uint_fast16_t; 430typedef atomic<int_fast32_t> atomic_int_fast32_t; 431typedef atomic<uint_fast32_t> atomic_uint_fast32_t; 432typedef atomic<int_fast64_t> atomic_int_fast64_t; 433typedef atomic<uint_fast64_t> atomic_uint_fast64_t; 434 435typedef atomic<int8_t> atomic_int8_t; 436typedef atomic<uint8_t> atomic_uint8_t; 437typedef atomic<int16_t> atomic_int16_t; 438typedef atomic<uint16_t> atomic_uint16_t; 439typedef atomic<int32_t> atomic_int32_t; 440typedef atomic<uint32_t> atomic_uint32_t; 441typedef atomic<int64_t> atomic_int64_t; 442typedef atomic<uint64_t> atomic_uint64_t; 443 444typedef atomic<intptr_t> atomic_intptr_t; 445typedef atomic<uintptr_t> atomic_uintptr_t; 446typedef atomic<size_t> atomic_size_t; 447typedef atomic<ptrdiff_t> atomic_ptrdiff_t; 448typedef atomic<intmax_t> atomic_intmax_t; 449typedef atomic<uintmax_t> atomic_uintmax_t; 450 451// flag type and operations 452 453typedef struct atomic_flag 454{ 455 atomic_flag() noexcept = default; // until C++20 456 constexpr atomic_flag() noexcept; // since C++20 457 atomic_flag(const atomic_flag&) = delete; 458 atomic_flag& operator=(const atomic_flag&) = delete; 459 atomic_flag& operator=(const atomic_flag&) volatile = delete; 460 461 bool test(memory_order m = memory_order_seq_cst) volatile noexcept; 462 bool test(memory_order m = memory_order_seq_cst) noexcept; 463 bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept; 464 bool test_and_set(memory_order m = memory_order_seq_cst) noexcept; 465 void clear(memory_order m = memory_order_seq_cst) volatile noexcept; 466 void clear(memory_order m = memory_order_seq_cst) noexcept; 467 468 void wait(bool, memory_order = memory_order::seq_cst) const volatile noexcept; 469 void wait(bool, memory_order = memory_order::seq_cst) const noexcept; 470 void notify_one() volatile noexcept; 471 void notify_one() noexcept; 472 void notify_all() volatile noexcept; 473 void notify_all() noexcept; 474} atomic_flag; 475 476bool atomic_flag_test(volatile atomic_flag* obj) noexcept; 477bool atomic_flag_test(atomic_flag* obj) noexcept; 478bool atomic_flag_test_explicit(volatile atomic_flag* obj, 479 memory_order m) noexcept; 480bool atomic_flag_test_explicit(atomic_flag* obj, memory_order m) noexcept; 481bool atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept; 482bool atomic_flag_test_and_set(atomic_flag* obj) noexcept; 483bool atomic_flag_test_and_set_explicit(volatile atomic_flag* obj, 484 memory_order m) noexcept; 485bool atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept; 486void atomic_flag_clear(volatile atomic_flag* obj) noexcept; 487void atomic_flag_clear(atomic_flag* obj) noexcept; 488void atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept; 489void atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept; 490 491void atomic_wait(const volatile atomic_flag* obj, T old) noexcept; 492void atomic_wait(const atomic_flag* obj, T old) noexcept; 493void atomic_wait_explicit(const volatile atomic_flag* obj, T old, memory_order m) noexcept; 494void atomic_wait_explicit(const atomic_flag* obj, T old, memory_order m) noexcept; 495void atomic_one(volatile atomic_flag* obj) noexcept; 496void atomic_one(atomic_flag* obj) noexcept; 497void atomic_all(volatile atomic_flag* obj) noexcept; 498void atomic_all(atomic_flag* obj) noexcept; 499 500// fences 501 502void atomic_thread_fence(memory_order m) noexcept; 503void atomic_signal_fence(memory_order m) noexcept; 504 505// deprecated 506 507template <class T> 508 void atomic_init(volatile atomic<T>* obj, atomic<T>::value_type desr) noexcept; 509 510template <class T> 511 void atomic_init(atomic<T>* obj, atomic<T>::value_type desr) noexcept; 512 513#define ATOMIC_VAR_INIT(value) see below 514 515#define ATOMIC_FLAG_INIT see below 516 517} // std 518 519*/ 520 521#include <__assert> // all public C++ headers provide the assertion handler 522#include <__availability> 523#include <__chrono/duration.h> 524#include <__config> 525#include <__thread/poll_with_backoff.h> 526#include <__thread/timed_backoff_policy.h> 527#include <cstddef> 528#include <cstdint> 529#include <cstring> 530#include <type_traits> 531#include <version> 532 533#ifndef _LIBCPP_HAS_NO_THREADS 534# include <__threading_support> 535#endif 536 537#ifndef _LIBCPP_REMOVE_TRANSITIVE_INCLUDES 538# include <chrono> 539#endif 540 541#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) 542# pragma GCC system_header 543#endif 544 545#ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER 546# error <atomic> is not implemented 547#endif 548#ifdef kill_dependency 549# error <atomic> is incompatible with <stdatomic.h> before C++23. Please compile with -std=c++23. 550#endif 551 552#define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \ 553 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \ 554 __m == memory_order_acquire || \ 555 __m == memory_order_acq_rel, \ 556 "memory order argument to atomic operation is invalid") 557 558#define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \ 559 _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \ 560 __m == memory_order_acq_rel, \ 561 "memory order argument to atomic operation is invalid") 562 563#define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \ 564 _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \ 565 __f == memory_order_acq_rel, \ 566 "memory order argument to atomic operation is invalid") 567 568_LIBCPP_BEGIN_NAMESPACE_STD 569 570// Figure out what the underlying type for `memory_order` would be if it were 571// declared as an unscoped enum (accounting for -fshort-enums). Use this result 572// to pin the underlying type in C++20. 573enum __legacy_memory_order { 574 __mo_relaxed, 575 __mo_consume, 576 __mo_acquire, 577 __mo_release, 578 __mo_acq_rel, 579 __mo_seq_cst 580}; 581 582typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t; 583 584#if _LIBCPP_STD_VER > 17 585 586enum class memory_order : __memory_order_underlying_t { 587 relaxed = __mo_relaxed, 588 consume = __mo_consume, 589 acquire = __mo_acquire, 590 release = __mo_release, 591 acq_rel = __mo_acq_rel, 592 seq_cst = __mo_seq_cst 593}; 594 595inline constexpr auto memory_order_relaxed = memory_order::relaxed; 596inline constexpr auto memory_order_consume = memory_order::consume; 597inline constexpr auto memory_order_acquire = memory_order::acquire; 598inline constexpr auto memory_order_release = memory_order::release; 599inline constexpr auto memory_order_acq_rel = memory_order::acq_rel; 600inline constexpr auto memory_order_seq_cst = memory_order::seq_cst; 601 602#else 603 604typedef enum memory_order { 605 memory_order_relaxed = __mo_relaxed, 606 memory_order_consume = __mo_consume, 607 memory_order_acquire = __mo_acquire, 608 memory_order_release = __mo_release, 609 memory_order_acq_rel = __mo_acq_rel, 610 memory_order_seq_cst = __mo_seq_cst, 611} memory_order; 612 613#endif // _LIBCPP_STD_VER > 17 614 615template <typename _Tp> _LIBCPP_INLINE_VISIBILITY 616bool __cxx_nonatomic_compare_equal(_Tp const& __lhs, _Tp const& __rhs) { 617 return _VSTD::memcmp(&__lhs, &__rhs, sizeof(_Tp)) == 0; 618} 619 620static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value), 621 "unexpected underlying type for std::memory_order"); 622 623#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \ 624 defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS) 625 626// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because 627// the default operator= in an object is not volatile, a byte-by-byte copy 628// is required. 629template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY 630typename enable_if<is_assignable<_Tp&, _Tv>::value>::type 631__cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) { 632 __a_value = __val; 633} 634template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY 635typename enable_if<is_assignable<_Tp&, _Tv>::value>::type 636__cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) { 637 volatile char* __to = reinterpret_cast<volatile char*>(&__a_value); 638 volatile char* __end = __to + sizeof(_Tp); 639 volatile const char* __from = reinterpret_cast<volatile const char*>(&__val); 640 while (__to != __end) 641 *__to++ = *__from++; 642} 643 644#endif 645 646#if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) 647 648template <typename _Tp> 649struct __cxx_atomic_base_impl { 650 651 _LIBCPP_INLINE_VISIBILITY 652#ifndef _LIBCPP_CXX03_LANG 653 __cxx_atomic_base_impl() _NOEXCEPT = default; 654#else 655 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {} 656#endif // _LIBCPP_CXX03_LANG 657 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT 658 : __a_value(value) {} 659 _Tp __a_value; 660}; 661 662_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) { 663 // Avoid switch statement to make this a constexpr. 664 return __order == memory_order_relaxed ? __ATOMIC_RELAXED: 665 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE: 666 (__order == memory_order_release ? __ATOMIC_RELEASE: 667 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST: 668 (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL: 669 __ATOMIC_CONSUME)))); 670} 671 672_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) { 673 // Avoid switch statement to make this a constexpr. 674 return __order == memory_order_relaxed ? __ATOMIC_RELAXED: 675 (__order == memory_order_acquire ? __ATOMIC_ACQUIRE: 676 (__order == memory_order_release ? __ATOMIC_RELAXED: 677 (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST: 678 (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE: 679 __ATOMIC_CONSUME)))); 680} 681 682template <typename _Tp> 683_LIBCPP_INLINE_VISIBILITY 684void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val) { 685 __cxx_atomic_assign_volatile(__a->__a_value, __val); 686} 687 688template <typename _Tp> 689_LIBCPP_INLINE_VISIBILITY 690void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) { 691 __a->__a_value = __val; 692} 693 694_LIBCPP_INLINE_VISIBILITY inline 695void __cxx_atomic_thread_fence(memory_order __order) { 696 __atomic_thread_fence(__to_gcc_order(__order)); 697} 698 699_LIBCPP_INLINE_VISIBILITY inline 700void __cxx_atomic_signal_fence(memory_order __order) { 701 __atomic_signal_fence(__to_gcc_order(__order)); 702} 703 704template <typename _Tp> 705_LIBCPP_INLINE_VISIBILITY 706void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val, 707 memory_order __order) { 708 __atomic_store(&__a->__a_value, &__val, 709 __to_gcc_order(__order)); 710} 711 712template <typename _Tp> 713_LIBCPP_INLINE_VISIBILITY 714void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val, 715 memory_order __order) { 716 __atomic_store(&__a->__a_value, &__val, 717 __to_gcc_order(__order)); 718} 719 720template <typename _Tp> 721_LIBCPP_INLINE_VISIBILITY 722_Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a, 723 memory_order __order) { 724 _Tp __ret; 725 __atomic_load(&__a->__a_value, &__ret, 726 __to_gcc_order(__order)); 727 return __ret; 728} 729 730template <typename _Tp> 731_LIBCPP_INLINE_VISIBILITY 732_Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) { 733 _Tp __ret; 734 __atomic_load(&__a->__a_value, &__ret, 735 __to_gcc_order(__order)); 736 return __ret; 737} 738 739template <typename _Tp> 740_LIBCPP_INLINE_VISIBILITY 741_Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a, 742 _Tp __value, memory_order __order) { 743 _Tp __ret; 744 __atomic_exchange(&__a->__a_value, &__value, &__ret, 745 __to_gcc_order(__order)); 746 return __ret; 747} 748 749template <typename _Tp> 750_LIBCPP_INLINE_VISIBILITY 751_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value, 752 memory_order __order) { 753 _Tp __ret; 754 __atomic_exchange(&__a->__a_value, &__value, &__ret, 755 __to_gcc_order(__order)); 756 return __ret; 757} 758 759template <typename _Tp> 760_LIBCPP_INLINE_VISIBILITY 761bool __cxx_atomic_compare_exchange_strong( 762 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, 763 memory_order __success, memory_order __failure) { 764 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 765 false, 766 __to_gcc_order(__success), 767 __to_gcc_failure_order(__failure)); 768} 769 770template <typename _Tp> 771_LIBCPP_INLINE_VISIBILITY 772bool __cxx_atomic_compare_exchange_strong( 773 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, 774 memory_order __failure) { 775 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 776 false, 777 __to_gcc_order(__success), 778 __to_gcc_failure_order(__failure)); 779} 780 781template <typename _Tp> 782_LIBCPP_INLINE_VISIBILITY 783bool __cxx_atomic_compare_exchange_weak( 784 volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, 785 memory_order __success, memory_order __failure) { 786 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 787 true, 788 __to_gcc_order(__success), 789 __to_gcc_failure_order(__failure)); 790} 791 792template <typename _Tp> 793_LIBCPP_INLINE_VISIBILITY 794bool __cxx_atomic_compare_exchange_weak( 795 __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success, 796 memory_order __failure) { 797 return __atomic_compare_exchange(&__a->__a_value, __expected, &__value, 798 true, 799 __to_gcc_order(__success), 800 __to_gcc_failure_order(__failure)); 801} 802 803template <typename _Tp> 804struct __skip_amt { enum {value = 1}; }; 805 806template <typename _Tp> 807struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; }; 808 809// FIXME: Haven't figured out what the spec says about using arrays with 810// atomic_fetch_add. Force a failure rather than creating bad behavior. 811template <typename _Tp> 812struct __skip_amt<_Tp[]> { }; 813template <typename _Tp, int n> 814struct __skip_amt<_Tp[n]> { }; 815 816template <typename _Tp, typename _Td> 817_LIBCPP_INLINE_VISIBILITY 818_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a, 819 _Td __delta, memory_order __order) { 820 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 821 __to_gcc_order(__order)); 822} 823 824template <typename _Tp, typename _Td> 825_LIBCPP_INLINE_VISIBILITY 826_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, 827 memory_order __order) { 828 return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 829 __to_gcc_order(__order)); 830} 831 832template <typename _Tp, typename _Td> 833_LIBCPP_INLINE_VISIBILITY 834_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a, 835 _Td __delta, memory_order __order) { 836 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 837 __to_gcc_order(__order)); 838} 839 840template <typename _Tp, typename _Td> 841_LIBCPP_INLINE_VISIBILITY 842_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta, 843 memory_order __order) { 844 return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value, 845 __to_gcc_order(__order)); 846} 847 848template <typename _Tp> 849_LIBCPP_INLINE_VISIBILITY 850_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a, 851 _Tp __pattern, memory_order __order) { 852 return __atomic_fetch_and(&__a->__a_value, __pattern, 853 __to_gcc_order(__order)); 854} 855 856template <typename _Tp> 857_LIBCPP_INLINE_VISIBILITY 858_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a, 859 _Tp __pattern, memory_order __order) { 860 return __atomic_fetch_and(&__a->__a_value, __pattern, 861 __to_gcc_order(__order)); 862} 863 864template <typename _Tp> 865_LIBCPP_INLINE_VISIBILITY 866_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a, 867 _Tp __pattern, memory_order __order) { 868 return __atomic_fetch_or(&__a->__a_value, __pattern, 869 __to_gcc_order(__order)); 870} 871 872template <typename _Tp> 873_LIBCPP_INLINE_VISIBILITY 874_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, 875 memory_order __order) { 876 return __atomic_fetch_or(&__a->__a_value, __pattern, 877 __to_gcc_order(__order)); 878} 879 880template <typename _Tp> 881_LIBCPP_INLINE_VISIBILITY 882_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a, 883 _Tp __pattern, memory_order __order) { 884 return __atomic_fetch_xor(&__a->__a_value, __pattern, 885 __to_gcc_order(__order)); 886} 887 888template <typename _Tp> 889_LIBCPP_INLINE_VISIBILITY 890_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, 891 memory_order __order) { 892 return __atomic_fetch_xor(&__a->__a_value, __pattern, 893 __to_gcc_order(__order)); 894} 895 896#define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0) 897 898#elif defined(_LIBCPP_HAS_C_ATOMIC_IMP) 899 900template <typename _Tp> 901struct __cxx_atomic_base_impl { 902 903 _LIBCPP_INLINE_VISIBILITY 904#ifndef _LIBCPP_CXX03_LANG 905 __cxx_atomic_base_impl() _NOEXCEPT = default; 906#else 907 __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {} 908#endif // _LIBCPP_CXX03_LANG 909 _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp __value) _NOEXCEPT 910 : __a_value(__value) {} 911 _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value; 912}; 913 914#define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s) 915 916_LIBCPP_INLINE_VISIBILITY inline 917void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT { 918 __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order)); 919} 920 921_LIBCPP_INLINE_VISIBILITY inline 922void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT { 923 __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order)); 924} 925 926template<class _Tp> 927_LIBCPP_INLINE_VISIBILITY 928void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT { 929 __c11_atomic_init(&__a->__a_value, __val); 930} 931template<class _Tp> 932_LIBCPP_INLINE_VISIBILITY 933void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT { 934 __c11_atomic_init(&__a->__a_value, __val); 935} 936 937template<class _Tp> 938_LIBCPP_INLINE_VISIBILITY 939void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT { 940 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order)); 941} 942template<class _Tp> 943_LIBCPP_INLINE_VISIBILITY 944void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT { 945 __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order)); 946} 947 948template<class _Tp> 949_LIBCPP_INLINE_VISIBILITY 950_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT { 951 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*; 952 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order)); 953} 954template<class _Tp> 955_LIBCPP_INLINE_VISIBILITY 956_Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT { 957 using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*; 958 return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order)); 959} 960 961template<class _Tp> 962_LIBCPP_INLINE_VISIBILITY 963_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT { 964 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order)); 965} 966template<class _Tp> 967_LIBCPP_INLINE_VISIBILITY 968_Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT { 969 return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order)); 970} 971 972_LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR memory_order __to_failure_order(memory_order __order) { 973 // Avoid switch statement to make this a constexpr. 974 return __order == memory_order_release ? memory_order_relaxed: 975 (__order == memory_order_acq_rel ? memory_order_acquire: 976 __order); 977} 978 979template<class _Tp> 980_LIBCPP_INLINE_VISIBILITY 981bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 982 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 983} 984template<class _Tp> 985_LIBCPP_INLINE_VISIBILITY 986bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 987 return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 988} 989 990template<class _Tp> 991_LIBCPP_INLINE_VISIBILITY 992bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 993 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 994} 995template<class _Tp> 996_LIBCPP_INLINE_VISIBILITY 997bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT { 998 return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__to_failure_order(__failure))); 999} 1000 1001template<class _Tp> 1002_LIBCPP_INLINE_VISIBILITY 1003_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT { 1004 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1005} 1006template<class _Tp> 1007_LIBCPP_INLINE_VISIBILITY 1008_Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT { 1009 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1010} 1011 1012template<class _Tp> 1013_LIBCPP_INLINE_VISIBILITY 1014_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1015 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1016} 1017template<class _Tp> 1018_LIBCPP_INLINE_VISIBILITY 1019_Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1020 return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1021} 1022 1023template<class _Tp> 1024_LIBCPP_INLINE_VISIBILITY 1025_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT { 1026 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1027} 1028template<class _Tp> 1029_LIBCPP_INLINE_VISIBILITY 1030_Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT { 1031 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1032} 1033template<class _Tp> 1034_LIBCPP_INLINE_VISIBILITY 1035_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1036 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1037} 1038template<class _Tp> 1039_LIBCPP_INLINE_VISIBILITY 1040_Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT { 1041 return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order)); 1042} 1043 1044template<class _Tp> 1045_LIBCPP_INLINE_VISIBILITY 1046_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1047 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1048} 1049template<class _Tp> 1050_LIBCPP_INLINE_VISIBILITY 1051_Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1052 return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1053} 1054 1055template<class _Tp> 1056_LIBCPP_INLINE_VISIBILITY 1057_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1058 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1059} 1060template<class _Tp> 1061_LIBCPP_INLINE_VISIBILITY 1062_Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1063 return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1064} 1065 1066template<class _Tp> 1067_LIBCPP_INLINE_VISIBILITY 1068_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1069 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1070} 1071template<class _Tp> 1072_LIBCPP_INLINE_VISIBILITY 1073_Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT { 1074 return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order)); 1075} 1076 1077#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP 1078 1079template <class _Tp> 1080_LIBCPP_INLINE_VISIBILITY 1081_Tp kill_dependency(_Tp __y) _NOEXCEPT 1082{ 1083 return __y; 1084} 1085 1086#if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE) 1087# define ATOMIC_BOOL_LOCK_FREE __CLANG_ATOMIC_BOOL_LOCK_FREE 1088# define ATOMIC_CHAR_LOCK_FREE __CLANG_ATOMIC_CHAR_LOCK_FREE 1089#ifndef _LIBCPP_HAS_NO_CHAR8_T 1090# define ATOMIC_CHAR8_T_LOCK_FREE __CLANG_ATOMIC_CHAR8_T_LOCK_FREE 1091#endif 1092# define ATOMIC_CHAR16_T_LOCK_FREE __CLANG_ATOMIC_CHAR16_T_LOCK_FREE 1093# define ATOMIC_CHAR32_T_LOCK_FREE __CLANG_ATOMIC_CHAR32_T_LOCK_FREE 1094# define ATOMIC_WCHAR_T_LOCK_FREE __CLANG_ATOMIC_WCHAR_T_LOCK_FREE 1095# define ATOMIC_SHORT_LOCK_FREE __CLANG_ATOMIC_SHORT_LOCK_FREE 1096# define ATOMIC_INT_LOCK_FREE __CLANG_ATOMIC_INT_LOCK_FREE 1097# define ATOMIC_LONG_LOCK_FREE __CLANG_ATOMIC_LONG_LOCK_FREE 1098# define ATOMIC_LLONG_LOCK_FREE __CLANG_ATOMIC_LLONG_LOCK_FREE 1099# define ATOMIC_POINTER_LOCK_FREE __CLANG_ATOMIC_POINTER_LOCK_FREE 1100#elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE) 1101# define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE 1102# define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE 1103#ifndef _LIBCPP_HAS_NO_CHAR8_T 1104# define ATOMIC_CHAR8_T_LOCK_FREE __GCC_ATOMIC_CHAR8_T_LOCK_FREE 1105#endif 1106# define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE 1107# define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE 1108# define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE 1109# define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE 1110# define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE 1111# define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE 1112# define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE 1113# define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE 1114#endif 1115 1116template <class _Tp> 1117struct __libcpp_is_always_lock_free { 1118 // __atomic_always_lock_free is available in all Standard modes 1119 static const bool __value = __atomic_always_lock_free(sizeof(_Tp), 0); 1120}; 1121 1122#ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS 1123 1124template<typename _Tp> 1125struct __cxx_atomic_lock_impl { 1126 1127 _LIBCPP_INLINE_VISIBILITY 1128 __cxx_atomic_lock_impl() _NOEXCEPT 1129 : __a_value(), __a_lock(0) {} 1130 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit 1131 __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT 1132 : __a_value(value), __a_lock(0) {} 1133 1134 _Tp __a_value; 1135 mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock; 1136 1137 _LIBCPP_INLINE_VISIBILITY void __lock() const volatile { 1138 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire)) 1139 /*spin*/; 1140 } 1141 _LIBCPP_INLINE_VISIBILITY void __lock() const { 1142 while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire)) 1143 /*spin*/; 1144 } 1145 _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile { 1146 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release); 1147 } 1148 _LIBCPP_INLINE_VISIBILITY void __unlock() const { 1149 __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release); 1150 } 1151 _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile { 1152 __lock(); 1153 _Tp __old; 1154 __cxx_atomic_assign_volatile(__old, __a_value); 1155 __unlock(); 1156 return __old; 1157 } 1158 _LIBCPP_INLINE_VISIBILITY _Tp __read() const { 1159 __lock(); 1160 _Tp __old = __a_value; 1161 __unlock(); 1162 return __old; 1163 } 1164}; 1165 1166template <typename _Tp> 1167_LIBCPP_INLINE_VISIBILITY 1168void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) { 1169 __cxx_atomic_assign_volatile(__a->__a_value, __val); 1170} 1171template <typename _Tp> 1172_LIBCPP_INLINE_VISIBILITY 1173void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) { 1174 __a->__a_value = __val; 1175} 1176 1177template <typename _Tp> 1178_LIBCPP_INLINE_VISIBILITY 1179void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) { 1180 __a->__lock(); 1181 __cxx_atomic_assign_volatile(__a->__a_value, __val); 1182 __a->__unlock(); 1183} 1184template <typename _Tp> 1185_LIBCPP_INLINE_VISIBILITY 1186void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) { 1187 __a->__lock(); 1188 __a->__a_value = __val; 1189 __a->__unlock(); 1190} 1191 1192template <typename _Tp> 1193_LIBCPP_INLINE_VISIBILITY 1194_Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) { 1195 return __a->__read(); 1196} 1197template <typename _Tp> 1198_LIBCPP_INLINE_VISIBILITY 1199_Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) { 1200 return __a->__read(); 1201} 1202 1203template <typename _Tp> 1204_LIBCPP_INLINE_VISIBILITY 1205_Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) { 1206 __a->__lock(); 1207 _Tp __old; 1208 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1209 __cxx_atomic_assign_volatile(__a->__a_value, __value); 1210 __a->__unlock(); 1211 return __old; 1212} 1213template <typename _Tp> 1214_LIBCPP_INLINE_VISIBILITY 1215_Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) { 1216 __a->__lock(); 1217 _Tp __old = __a->__a_value; 1218 __a->__a_value = __value; 1219 __a->__unlock(); 1220 return __old; 1221} 1222 1223template <typename _Tp> 1224_LIBCPP_INLINE_VISIBILITY 1225bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1226 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1227 _Tp __temp; 1228 __a->__lock(); 1229 __cxx_atomic_assign_volatile(__temp, __a->__a_value); 1230 bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0); 1231 if(__ret) 1232 __cxx_atomic_assign_volatile(__a->__a_value, __value); 1233 else 1234 __cxx_atomic_assign_volatile(*__expected, __a->__a_value); 1235 __a->__unlock(); 1236 return __ret; 1237} 1238template <typename _Tp> 1239_LIBCPP_INLINE_VISIBILITY 1240bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a, 1241 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1242 __a->__lock(); 1243 bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0); 1244 if(__ret) 1245 _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp)); 1246 else 1247 _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp)); 1248 __a->__unlock(); 1249 return __ret; 1250} 1251 1252template <typename _Tp> 1253_LIBCPP_INLINE_VISIBILITY 1254bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1255 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1256 _Tp __temp; 1257 __a->__lock(); 1258 __cxx_atomic_assign_volatile(__temp, __a->__a_value); 1259 bool __ret = (_VSTD::memcmp(&__temp, __expected, sizeof(_Tp)) == 0); 1260 if(__ret) 1261 __cxx_atomic_assign_volatile(__a->__a_value, __value); 1262 else 1263 __cxx_atomic_assign_volatile(*__expected, __a->__a_value); 1264 __a->__unlock(); 1265 return __ret; 1266} 1267template <typename _Tp> 1268_LIBCPP_INLINE_VISIBILITY 1269bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a, 1270 _Tp* __expected, _Tp __value, memory_order, memory_order) { 1271 __a->__lock(); 1272 bool __ret = (_VSTD::memcmp(&__a->__a_value, __expected, sizeof(_Tp)) == 0); 1273 if(__ret) 1274 _VSTD::memcpy(&__a->__a_value, &__value, sizeof(_Tp)); 1275 else 1276 _VSTD::memcpy(__expected, &__a->__a_value, sizeof(_Tp)); 1277 __a->__unlock(); 1278 return __ret; 1279} 1280 1281template <typename _Tp, typename _Td> 1282_LIBCPP_INLINE_VISIBILITY 1283_Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1284 _Td __delta, memory_order) { 1285 __a->__lock(); 1286 _Tp __old; 1287 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1288 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta)); 1289 __a->__unlock(); 1290 return __old; 1291} 1292template <typename _Tp, typename _Td> 1293_LIBCPP_INLINE_VISIBILITY 1294_Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a, 1295 _Td __delta, memory_order) { 1296 __a->__lock(); 1297 _Tp __old = __a->__a_value; 1298 __a->__a_value += __delta; 1299 __a->__unlock(); 1300 return __old; 1301} 1302 1303template <typename _Tp, typename _Td> 1304_LIBCPP_INLINE_VISIBILITY 1305_Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a, 1306 ptrdiff_t __delta, memory_order) { 1307 __a->__lock(); 1308 _Tp* __old; 1309 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1310 __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta); 1311 __a->__unlock(); 1312 return __old; 1313} 1314template <typename _Tp, typename _Td> 1315_LIBCPP_INLINE_VISIBILITY 1316_Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a, 1317 ptrdiff_t __delta, memory_order) { 1318 __a->__lock(); 1319 _Tp* __old = __a->__a_value; 1320 __a->__a_value += __delta; 1321 __a->__unlock(); 1322 return __old; 1323} 1324 1325template <typename _Tp, typename _Td> 1326_LIBCPP_INLINE_VISIBILITY 1327_Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1328 _Td __delta, memory_order) { 1329 __a->__lock(); 1330 _Tp __old; 1331 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1332 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta)); 1333 __a->__unlock(); 1334 return __old; 1335} 1336template <typename _Tp, typename _Td> 1337_LIBCPP_INLINE_VISIBILITY 1338_Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a, 1339 _Td __delta, memory_order) { 1340 __a->__lock(); 1341 _Tp __old = __a->__a_value; 1342 __a->__a_value -= __delta; 1343 __a->__unlock(); 1344 return __old; 1345} 1346 1347template <typename _Tp> 1348_LIBCPP_INLINE_VISIBILITY 1349_Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1350 _Tp __pattern, memory_order) { 1351 __a->__lock(); 1352 _Tp __old; 1353 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1354 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern)); 1355 __a->__unlock(); 1356 return __old; 1357} 1358template <typename _Tp> 1359_LIBCPP_INLINE_VISIBILITY 1360_Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a, 1361 _Tp __pattern, memory_order) { 1362 __a->__lock(); 1363 _Tp __old = __a->__a_value; 1364 __a->__a_value &= __pattern; 1365 __a->__unlock(); 1366 return __old; 1367} 1368 1369template <typename _Tp> 1370_LIBCPP_INLINE_VISIBILITY 1371_Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1372 _Tp __pattern, memory_order) { 1373 __a->__lock(); 1374 _Tp __old; 1375 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1376 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern)); 1377 __a->__unlock(); 1378 return __old; 1379} 1380template <typename _Tp> 1381_LIBCPP_INLINE_VISIBILITY 1382_Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a, 1383 _Tp __pattern, memory_order) { 1384 __a->__lock(); 1385 _Tp __old = __a->__a_value; 1386 __a->__a_value |= __pattern; 1387 __a->__unlock(); 1388 return __old; 1389} 1390 1391template <typename _Tp> 1392_LIBCPP_INLINE_VISIBILITY 1393_Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a, 1394 _Tp __pattern, memory_order) { 1395 __a->__lock(); 1396 _Tp __old; 1397 __cxx_atomic_assign_volatile(__old, __a->__a_value); 1398 __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern)); 1399 __a->__unlock(); 1400 return __old; 1401} 1402template <typename _Tp> 1403_LIBCPP_INLINE_VISIBILITY 1404_Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a, 1405 _Tp __pattern, memory_order) { 1406 __a->__lock(); 1407 _Tp __old = __a->__a_value; 1408 __a->__a_value ^= __pattern; 1409 __a->__unlock(); 1410 return __old; 1411} 1412 1413template <typename _Tp, 1414 typename _Base = typename conditional<__libcpp_is_always_lock_free<_Tp>::__value, 1415 __cxx_atomic_base_impl<_Tp>, 1416 __cxx_atomic_lock_impl<_Tp> >::type> 1417#else 1418template <typename _Tp, 1419 typename _Base = __cxx_atomic_base_impl<_Tp> > 1420#endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS 1421struct __cxx_atomic_impl : public _Base { 1422 static_assert(is_trivially_copyable<_Tp>::value, 1423 "std::atomic<T> requires that 'T' be a trivially copyable type"); 1424 1425 _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT = default; 1426 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp __value) _NOEXCEPT 1427 : _Base(__value) {} 1428}; 1429 1430#if defined(__linux__) || (defined(_AIX) && !defined(__64BIT__)) 1431 using __cxx_contention_t = int32_t; 1432#else 1433 using __cxx_contention_t = int64_t; 1434#endif // __linux__ || (_AIX && !__64BIT__) 1435 1436using __cxx_atomic_contention_t = __cxx_atomic_impl<__cxx_contention_t>; 1437 1438#if defined(_LIBCPP_HAS_NO_THREADS) 1439# define _LIBCPP_HAS_NO_PLATFORM_WAIT 1440#endif 1441 1442// TODO: 1443// _LIBCPP_HAS_NO_PLATFORM_WAIT is currently a "dead" macro, in the sense that 1444// it is not tied anywhere into the build system or even documented. We should 1445// clean it up because it is technically never defined except when threads are 1446// disabled. We should clean it up in its own changeset in case we break "bad" 1447// users. 1448#ifndef _LIBCPP_HAS_NO_PLATFORM_WAIT 1449 1450_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(void const volatile*); 1451_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(void const volatile*); 1452_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(void const volatile*); 1453_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(void const volatile*, __cxx_contention_t); 1454 1455_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_one(__cxx_atomic_contention_t const volatile*); 1456_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __cxx_atomic_notify_all(__cxx_atomic_contention_t const volatile*); 1457_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI __cxx_contention_t __libcpp_atomic_monitor(__cxx_atomic_contention_t const volatile*); 1458_LIBCPP_AVAILABILITY_SYNC _LIBCPP_EXPORTED_FROM_ABI void __libcpp_atomic_wait(__cxx_atomic_contention_t const volatile*, __cxx_contention_t); 1459 1460template <class _Atp, class _Fn> 1461struct __libcpp_atomic_wait_backoff_impl { 1462 _Atp* __a; 1463 _Fn __test_fn; 1464 _LIBCPP_AVAILABILITY_SYNC 1465 _LIBCPP_INLINE_VISIBILITY bool operator()(chrono::nanoseconds __elapsed) const 1466 { 1467 if(__elapsed > chrono::microseconds(64)) 1468 { 1469 auto const __monitor = __libcpp_atomic_monitor(__a); 1470 if(__test_fn()) 1471 return true; 1472 __libcpp_atomic_wait(__a, __monitor); 1473 } 1474 else if(__elapsed > chrono::microseconds(4)) 1475 __libcpp_thread_yield(); 1476 else 1477 {} // poll 1478 return false; 1479 } 1480}; 1481 1482template <class _Atp, class _Fn> 1483_LIBCPP_AVAILABILITY_SYNC 1484_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Fn && __test_fn) 1485{ 1486 __libcpp_atomic_wait_backoff_impl<_Atp, typename decay<_Fn>::type> __backoff_fn = {__a, __test_fn}; 1487 return __libcpp_thread_poll_with_backoff(__test_fn, __backoff_fn); 1488} 1489 1490#else // _LIBCPP_HAS_NO_PLATFORM_WAIT 1491 1492template <class _Tp> 1493_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_all(__cxx_atomic_impl<_Tp> const volatile*) { } 1494template <class _Tp> 1495_LIBCPP_INLINE_VISIBILITY void __cxx_atomic_notify_one(__cxx_atomic_impl<_Tp> const volatile*) { } 1496template <class _Atp, class _Fn> 1497_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp*, _Fn && __test_fn) 1498{ 1499#if defined(_LIBCPP_HAS_NO_THREADS) 1500 using _Policy = __spinning_backoff_policy; 1501#else 1502 using _Policy = __libcpp_timed_backoff_policy; 1503#endif 1504 return __libcpp_thread_poll_with_backoff(__test_fn, _Policy()); 1505} 1506 1507#endif // _LIBCPP_HAS_NO_PLATFORM_WAIT 1508 1509template <class _Atp, class _Tp> 1510struct __cxx_atomic_wait_test_fn_impl { 1511 _Atp* __a; 1512 _Tp __val; 1513 memory_order __order; 1514 _LIBCPP_INLINE_VISIBILITY bool operator()() const 1515 { 1516 return !__cxx_nonatomic_compare_equal(__cxx_atomic_load(__a, __order), __val); 1517 } 1518}; 1519 1520template <class _Atp, class _Tp> 1521_LIBCPP_AVAILABILITY_SYNC 1522_LIBCPP_INLINE_VISIBILITY bool __cxx_atomic_wait(_Atp* __a, _Tp const __val, memory_order __order) 1523{ 1524 __cxx_atomic_wait_test_fn_impl<_Atp, _Tp> __test_fn = {__a, __val, __order}; 1525 return __cxx_atomic_wait(__a, __test_fn); 1526} 1527 1528// general atomic<T> 1529 1530template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value> 1531struct __atomic_base // false 1532{ 1533 mutable __cxx_atomic_impl<_Tp> __a_; 1534 1535#if defined(__cpp_lib_atomic_is_always_lock_free) 1536 static _LIBCPP_CONSTEXPR bool is_always_lock_free = __libcpp_is_always_lock_free<__cxx_atomic_impl<_Tp> >::__value; 1537#endif 1538 1539 _LIBCPP_INLINE_VISIBILITY 1540 bool is_lock_free() const volatile _NOEXCEPT 1541 {return __cxx_atomic_is_lock_free(sizeof(_Tp));} 1542 _LIBCPP_INLINE_VISIBILITY 1543 bool is_lock_free() const _NOEXCEPT 1544 {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();} 1545 _LIBCPP_INLINE_VISIBILITY 1546 void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1547 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1548 {__cxx_atomic_store(&__a_, __d, __m);} 1549 _LIBCPP_INLINE_VISIBILITY 1550 void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1551 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1552 {__cxx_atomic_store(&__a_, __d, __m);} 1553 _LIBCPP_INLINE_VISIBILITY 1554 _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 1555 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1556 {return __cxx_atomic_load(&__a_, __m);} 1557 _LIBCPP_INLINE_VISIBILITY 1558 _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT 1559 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1560 {return __cxx_atomic_load(&__a_, __m);} 1561 _LIBCPP_INLINE_VISIBILITY 1562 operator _Tp() const volatile _NOEXCEPT {return load();} 1563 _LIBCPP_INLINE_VISIBILITY 1564 operator _Tp() const _NOEXCEPT {return load();} 1565 _LIBCPP_INLINE_VISIBILITY 1566 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1567 {return __cxx_atomic_exchange(&__a_, __d, __m);} 1568 _LIBCPP_INLINE_VISIBILITY 1569 _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1570 {return __cxx_atomic_exchange(&__a_, __d, __m);} 1571 _LIBCPP_INLINE_VISIBILITY 1572 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1573 memory_order __s, memory_order __f) volatile _NOEXCEPT 1574 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1575 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} 1576 _LIBCPP_INLINE_VISIBILITY 1577 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1578 memory_order __s, memory_order __f) _NOEXCEPT 1579 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1580 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);} 1581 _LIBCPP_INLINE_VISIBILITY 1582 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1583 memory_order __s, memory_order __f) volatile _NOEXCEPT 1584 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1585 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} 1586 _LIBCPP_INLINE_VISIBILITY 1587 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1588 memory_order __s, memory_order __f) _NOEXCEPT 1589 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 1590 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);} 1591 _LIBCPP_INLINE_VISIBILITY 1592 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1593 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1594 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} 1595 _LIBCPP_INLINE_VISIBILITY 1596 bool compare_exchange_weak(_Tp& __e, _Tp __d, 1597 memory_order __m = memory_order_seq_cst) _NOEXCEPT 1598 {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);} 1599 _LIBCPP_INLINE_VISIBILITY 1600 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1601 memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1602 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} 1603 _LIBCPP_INLINE_VISIBILITY 1604 bool compare_exchange_strong(_Tp& __e, _Tp __d, 1605 memory_order __m = memory_order_seq_cst) _NOEXCEPT 1606 {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);} 1607 1608 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 1609 {__cxx_atomic_wait(&__a_, __v, __m);} 1610 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void wait(_Tp __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT 1611 {__cxx_atomic_wait(&__a_, __v, __m);} 1612 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() volatile _NOEXCEPT 1613 {__cxx_atomic_notify_one(&__a_);} 1614 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_one() _NOEXCEPT 1615 {__cxx_atomic_notify_one(&__a_);} 1616 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() volatile _NOEXCEPT 1617 {__cxx_atomic_notify_all(&__a_);} 1618 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY void notify_all() _NOEXCEPT 1619 {__cxx_atomic_notify_all(&__a_);} 1620 1621#if _LIBCPP_STD_VER > 17 1622 _LIBCPP_INLINE_VISIBILITY constexpr 1623 __atomic_base() noexcept(is_nothrow_default_constructible_v<_Tp>) : __a_(_Tp()) {} 1624#else 1625 _LIBCPP_INLINE_VISIBILITY 1626 __atomic_base() _NOEXCEPT = default; 1627#endif 1628 1629 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR 1630 __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {} 1631 1632 __atomic_base(const __atomic_base&) = delete; 1633}; 1634 1635#if defined(__cpp_lib_atomic_is_always_lock_free) 1636template <class _Tp, bool __b> 1637_LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free; 1638#endif 1639 1640// atomic<Integral> 1641 1642template <class _Tp> 1643struct __atomic_base<_Tp, true> 1644 : public __atomic_base<_Tp, false> 1645{ 1646 typedef __atomic_base<_Tp, false> __base; 1647 1648 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR_AFTER_CXX17 1649 __atomic_base() _NOEXCEPT = default; 1650 1651 _LIBCPP_INLINE_VISIBILITY 1652 _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {} 1653 1654 _LIBCPP_INLINE_VISIBILITY 1655 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1656 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);} 1657 _LIBCPP_INLINE_VISIBILITY 1658 _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1659 {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);} 1660 _LIBCPP_INLINE_VISIBILITY 1661 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1662 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);} 1663 _LIBCPP_INLINE_VISIBILITY 1664 _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1665 {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);} 1666 _LIBCPP_INLINE_VISIBILITY 1667 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1668 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);} 1669 _LIBCPP_INLINE_VISIBILITY 1670 _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1671 {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);} 1672 _LIBCPP_INLINE_VISIBILITY 1673 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1674 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);} 1675 _LIBCPP_INLINE_VISIBILITY 1676 _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1677 {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);} 1678 _LIBCPP_INLINE_VISIBILITY 1679 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 1680 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);} 1681 _LIBCPP_INLINE_VISIBILITY 1682 _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT 1683 {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);} 1684 1685 _LIBCPP_INLINE_VISIBILITY 1686 _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));} 1687 _LIBCPP_INLINE_VISIBILITY 1688 _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));} 1689 _LIBCPP_INLINE_VISIBILITY 1690 _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));} 1691 _LIBCPP_INLINE_VISIBILITY 1692 _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));} 1693 _LIBCPP_INLINE_VISIBILITY 1694 _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} 1695 _LIBCPP_INLINE_VISIBILITY 1696 _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);} 1697 _LIBCPP_INLINE_VISIBILITY 1698 _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} 1699 _LIBCPP_INLINE_VISIBILITY 1700 _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);} 1701 _LIBCPP_INLINE_VISIBILITY 1702 _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} 1703 _LIBCPP_INLINE_VISIBILITY 1704 _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;} 1705 _LIBCPP_INLINE_VISIBILITY 1706 _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} 1707 _LIBCPP_INLINE_VISIBILITY 1708 _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;} 1709 _LIBCPP_INLINE_VISIBILITY 1710 _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;} 1711 _LIBCPP_INLINE_VISIBILITY 1712 _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;} 1713 _LIBCPP_INLINE_VISIBILITY 1714 _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;} 1715 _LIBCPP_INLINE_VISIBILITY 1716 _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;} 1717 _LIBCPP_INLINE_VISIBILITY 1718 _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;} 1719 _LIBCPP_INLINE_VISIBILITY 1720 _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;} 1721}; 1722 1723// atomic<T> 1724 1725template <class _Tp> 1726struct atomic 1727 : public __atomic_base<_Tp> 1728{ 1729 typedef __atomic_base<_Tp> __base; 1730 typedef _Tp value_type; 1731 typedef value_type difference_type; 1732 1733#if _LIBCPP_STD_VER > 17 1734 _LIBCPP_INLINE_VISIBILITY 1735 atomic() = default; 1736#else 1737 _LIBCPP_INLINE_VISIBILITY 1738 atomic() _NOEXCEPT = default; 1739#endif 1740 1741 _LIBCPP_INLINE_VISIBILITY 1742 _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {} 1743 1744 _LIBCPP_INLINE_VISIBILITY 1745 _Tp operator=(_Tp __d) volatile _NOEXCEPT 1746 {__base::store(__d); return __d;} 1747 _LIBCPP_INLINE_VISIBILITY 1748 _Tp operator=(_Tp __d) _NOEXCEPT 1749 {__base::store(__d); return __d;} 1750 1751 atomic& operator=(const atomic&) = delete; 1752 atomic& operator=(const atomic&) volatile = delete; 1753}; 1754 1755// atomic<T*> 1756 1757template <class _Tp> 1758struct atomic<_Tp*> 1759 : public __atomic_base<_Tp*> 1760{ 1761 typedef __atomic_base<_Tp*> __base; 1762 typedef _Tp* value_type; 1763 typedef ptrdiff_t difference_type; 1764 1765 _LIBCPP_INLINE_VISIBILITY 1766 atomic() _NOEXCEPT = default; 1767 1768 _LIBCPP_INLINE_VISIBILITY 1769 _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {} 1770 1771 _LIBCPP_INLINE_VISIBILITY 1772 _Tp* operator=(_Tp* __d) volatile _NOEXCEPT 1773 {__base::store(__d); return __d;} 1774 _LIBCPP_INLINE_VISIBILITY 1775 _Tp* operator=(_Tp* __d) _NOEXCEPT 1776 {__base::store(__d); return __d;} 1777 1778 _LIBCPP_INLINE_VISIBILITY 1779 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 1780 // __atomic_fetch_add accepts function pointers, guard against them. 1781 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1782 return __cxx_atomic_fetch_add(&this->__a_, __op, __m); 1783 } 1784 1785 _LIBCPP_INLINE_VISIBILITY 1786 _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 1787 // __atomic_fetch_add accepts function pointers, guard against them. 1788 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1789 return __cxx_atomic_fetch_add(&this->__a_, __op, __m); 1790 } 1791 1792 _LIBCPP_INLINE_VISIBILITY 1793 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT { 1794 // __atomic_fetch_add accepts function pointers, guard against them. 1795 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1796 return __cxx_atomic_fetch_sub(&this->__a_, __op, __m); 1797 } 1798 1799 _LIBCPP_INLINE_VISIBILITY 1800 _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT { 1801 // __atomic_fetch_add accepts function pointers, guard against them. 1802 static_assert(!is_function<typename remove_pointer<_Tp>::type>::value, "Pointer to function isn't allowed"); 1803 return __cxx_atomic_fetch_sub(&this->__a_, __op, __m); 1804 } 1805 1806 _LIBCPP_INLINE_VISIBILITY 1807 _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);} 1808 _LIBCPP_INLINE_VISIBILITY 1809 _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);} 1810 _LIBCPP_INLINE_VISIBILITY 1811 _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);} 1812 _LIBCPP_INLINE_VISIBILITY 1813 _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);} 1814 _LIBCPP_INLINE_VISIBILITY 1815 _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;} 1816 _LIBCPP_INLINE_VISIBILITY 1817 _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;} 1818 _LIBCPP_INLINE_VISIBILITY 1819 _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;} 1820 _LIBCPP_INLINE_VISIBILITY 1821 _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;} 1822 _LIBCPP_INLINE_VISIBILITY 1823 _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;} 1824 _LIBCPP_INLINE_VISIBILITY 1825 _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;} 1826 _LIBCPP_INLINE_VISIBILITY 1827 _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;} 1828 _LIBCPP_INLINE_VISIBILITY 1829 _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;} 1830 1831 atomic& operator=(const atomic&) = delete; 1832 atomic& operator=(const atomic&) volatile = delete; 1833}; 1834 1835// atomic_is_lock_free 1836 1837template <class _Tp> 1838_LIBCPP_INLINE_VISIBILITY 1839bool 1840atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT 1841{ 1842 return __o->is_lock_free(); 1843} 1844 1845template <class _Tp> 1846_LIBCPP_INLINE_VISIBILITY 1847bool 1848atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT 1849{ 1850 return __o->is_lock_free(); 1851} 1852 1853// atomic_init 1854 1855template <class _Tp> 1856_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY 1857void 1858atomic_init(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1859{ 1860 __cxx_atomic_init(&__o->__a_, __d); 1861} 1862 1863template <class _Tp> 1864_LIBCPP_DEPRECATED_IN_CXX20 _LIBCPP_INLINE_VISIBILITY 1865void 1866atomic_init(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1867{ 1868 __cxx_atomic_init(&__o->__a_, __d); 1869} 1870 1871// atomic_store 1872 1873template <class _Tp> 1874_LIBCPP_INLINE_VISIBILITY 1875void 1876atomic_store(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1877{ 1878 __o->store(__d); 1879} 1880 1881template <class _Tp> 1882_LIBCPP_INLINE_VISIBILITY 1883void 1884atomic_store(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1885{ 1886 __o->store(__d); 1887} 1888 1889// atomic_store_explicit 1890 1891template <class _Tp> 1892_LIBCPP_INLINE_VISIBILITY 1893void 1894atomic_store_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 1895 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1896{ 1897 __o->store(__d, __m); 1898} 1899 1900template <class _Tp> 1901_LIBCPP_INLINE_VISIBILITY 1902void 1903atomic_store_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 1904 _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) 1905{ 1906 __o->store(__d, __m); 1907} 1908 1909// atomic_load 1910 1911template <class _Tp> 1912_LIBCPP_INLINE_VISIBILITY 1913_Tp 1914atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT 1915{ 1916 return __o->load(); 1917} 1918 1919template <class _Tp> 1920_LIBCPP_INLINE_VISIBILITY 1921_Tp 1922atomic_load(const atomic<_Tp>* __o) _NOEXCEPT 1923{ 1924 return __o->load(); 1925} 1926 1927// atomic_load_explicit 1928 1929template <class _Tp> 1930_LIBCPP_INLINE_VISIBILITY 1931_Tp 1932atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT 1933 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1934{ 1935 return __o->load(__m); 1936} 1937 1938template <class _Tp> 1939_LIBCPP_INLINE_VISIBILITY 1940_Tp 1941atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT 1942 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 1943{ 1944 return __o->load(__m); 1945} 1946 1947// atomic_exchange 1948 1949template <class _Tp> 1950_LIBCPP_INLINE_VISIBILITY 1951_Tp 1952atomic_exchange(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1953{ 1954 return __o->exchange(__d); 1955} 1956 1957template <class _Tp> 1958_LIBCPP_INLINE_VISIBILITY 1959_Tp 1960atomic_exchange(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1961{ 1962 return __o->exchange(__d); 1963} 1964 1965// atomic_exchange_explicit 1966 1967template <class _Tp> 1968_LIBCPP_INLINE_VISIBILITY 1969_Tp 1970atomic_exchange_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 1971{ 1972 return __o->exchange(__d, __m); 1973} 1974 1975template <class _Tp> 1976_LIBCPP_INLINE_VISIBILITY 1977_Tp 1978atomic_exchange_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __d, memory_order __m) _NOEXCEPT 1979{ 1980 return __o->exchange(__d, __m); 1981} 1982 1983// atomic_compare_exchange_weak 1984 1985template <class _Tp> 1986_LIBCPP_INLINE_VISIBILITY 1987bool 1988atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1989{ 1990 return __o->compare_exchange_weak(*__e, __d); 1991} 1992 1993template <class _Tp> 1994_LIBCPP_INLINE_VISIBILITY 1995bool 1996atomic_compare_exchange_weak(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 1997{ 1998 return __o->compare_exchange_weak(*__e, __d); 1999} 2000 2001// atomic_compare_exchange_strong 2002 2003template <class _Tp> 2004_LIBCPP_INLINE_VISIBILITY 2005bool 2006atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 2007{ 2008 return __o->compare_exchange_strong(*__e, __d); 2009} 2010 2011template <class _Tp> 2012_LIBCPP_INLINE_VISIBILITY 2013bool 2014atomic_compare_exchange_strong(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d) _NOEXCEPT 2015{ 2016 return __o->compare_exchange_strong(*__e, __d); 2017} 2018 2019// atomic_compare_exchange_weak_explicit 2020 2021template <class _Tp> 2022_LIBCPP_INLINE_VISIBILITY 2023bool 2024atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, 2025 typename atomic<_Tp>::value_type __d, 2026 memory_order __s, memory_order __f) _NOEXCEPT 2027 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2028{ 2029 return __o->compare_exchange_weak(*__e, __d, __s, __f); 2030} 2031 2032template <class _Tp> 2033_LIBCPP_INLINE_VISIBILITY 2034bool 2035atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d, 2036 memory_order __s, memory_order __f) _NOEXCEPT 2037 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2038{ 2039 return __o->compare_exchange_weak(*__e, __d, __s, __f); 2040} 2041 2042// atomic_compare_exchange_strong_explicit 2043 2044template <class _Tp> 2045_LIBCPP_INLINE_VISIBILITY 2046bool 2047atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o, 2048 typename atomic<_Tp>::value_type* __e, typename atomic<_Tp>::value_type __d, 2049 memory_order __s, memory_order __f) _NOEXCEPT 2050 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2051{ 2052 return __o->compare_exchange_strong(*__e, __d, __s, __f); 2053} 2054 2055template <class _Tp> 2056_LIBCPP_INLINE_VISIBILITY 2057bool 2058atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type* __e, 2059 typename atomic<_Tp>::value_type __d, 2060 memory_order __s, memory_order __f) _NOEXCEPT 2061 _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f) 2062{ 2063 return __o->compare_exchange_strong(*__e, __d, __s, __f); 2064} 2065 2066// atomic_wait 2067 2068template <class _Tp> 2069_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2070void atomic_wait(const volatile atomic<_Tp>* __o, 2071 typename atomic<_Tp>::value_type __v) _NOEXCEPT 2072{ 2073 return __o->wait(__v); 2074} 2075 2076template <class _Tp> 2077_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2078void atomic_wait(const atomic<_Tp>* __o, 2079 typename atomic<_Tp>::value_type __v) _NOEXCEPT 2080{ 2081 return __o->wait(__v); 2082} 2083 2084// atomic_wait_explicit 2085 2086template <class _Tp> 2087_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2088void atomic_wait_explicit(const volatile atomic<_Tp>* __o, 2089 typename atomic<_Tp>::value_type __v, 2090 memory_order __m) _NOEXCEPT 2091 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 2092{ 2093 return __o->wait(__v, __m); 2094} 2095 2096template <class _Tp> 2097_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2098void atomic_wait_explicit(const atomic<_Tp>* __o, 2099 typename atomic<_Tp>::value_type __v, 2100 memory_order __m) _NOEXCEPT 2101 _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) 2102{ 2103 return __o->wait(__v, __m); 2104} 2105 2106// atomic_notify_one 2107 2108template <class _Tp> 2109_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2110void atomic_notify_one(volatile atomic<_Tp>* __o) _NOEXCEPT 2111{ 2112 __o->notify_one(); 2113} 2114template <class _Tp> 2115_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2116void atomic_notify_one(atomic<_Tp>* __o) _NOEXCEPT 2117{ 2118 __o->notify_one(); 2119} 2120 2121// atomic_notify_one 2122 2123template <class _Tp> 2124_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2125void atomic_notify_all(volatile atomic<_Tp>* __o) _NOEXCEPT 2126{ 2127 __o->notify_all(); 2128} 2129template <class _Tp> 2130_LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2131void atomic_notify_all(atomic<_Tp>* __o) _NOEXCEPT 2132{ 2133 __o->notify_all(); 2134} 2135 2136// atomic_fetch_add 2137 2138template <class _Tp> 2139_LIBCPP_INLINE_VISIBILITY 2140_Tp 2141atomic_fetch_add(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2142{ 2143 return __o->fetch_add(__op); 2144} 2145 2146template <class _Tp> 2147_LIBCPP_INLINE_VISIBILITY 2148_Tp 2149atomic_fetch_add(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2150{ 2151 return __o->fetch_add(__op); 2152} 2153 2154// atomic_fetch_add_explicit 2155 2156template <class _Tp> 2157_LIBCPP_INLINE_VISIBILITY 2158_Tp atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2159{ 2160 return __o->fetch_add(__op, __m); 2161} 2162 2163template <class _Tp> 2164_LIBCPP_INLINE_VISIBILITY 2165_Tp atomic_fetch_add_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2166{ 2167 return __o->fetch_add(__op, __m); 2168} 2169 2170// atomic_fetch_sub 2171 2172template <class _Tp> 2173_LIBCPP_INLINE_VISIBILITY 2174_Tp atomic_fetch_sub(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2175{ 2176 return __o->fetch_sub(__op); 2177} 2178 2179template <class _Tp> 2180_LIBCPP_INLINE_VISIBILITY 2181_Tp atomic_fetch_sub(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op) _NOEXCEPT 2182{ 2183 return __o->fetch_sub(__op); 2184} 2185 2186// atomic_fetch_sub_explicit 2187 2188template <class _Tp> 2189_LIBCPP_INLINE_VISIBILITY 2190_Tp atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2191{ 2192 return __o->fetch_sub(__op, __m); 2193} 2194 2195template <class _Tp> 2196_LIBCPP_INLINE_VISIBILITY 2197_Tp atomic_fetch_sub_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::difference_type __op, memory_order __m) _NOEXCEPT 2198{ 2199 return __o->fetch_sub(__op, __m); 2200} 2201 2202// atomic_fetch_and 2203 2204template <class _Tp> 2205_LIBCPP_INLINE_VISIBILITY 2206typename enable_if 2207< 2208 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2209 _Tp 2210>::type 2211atomic_fetch_and(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2212{ 2213 return __o->fetch_and(__op); 2214} 2215 2216template <class _Tp> 2217_LIBCPP_INLINE_VISIBILITY 2218typename enable_if 2219< 2220 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2221 _Tp 2222>::type 2223atomic_fetch_and(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2224{ 2225 return __o->fetch_and(__op); 2226} 2227 2228// atomic_fetch_and_explicit 2229 2230template <class _Tp> 2231_LIBCPP_INLINE_VISIBILITY 2232typename enable_if 2233< 2234 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2235 _Tp 2236>::type 2237atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2238{ 2239 return __o->fetch_and(__op, __m); 2240} 2241 2242template <class _Tp> 2243_LIBCPP_INLINE_VISIBILITY 2244typename enable_if 2245< 2246 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2247 _Tp 2248>::type 2249atomic_fetch_and_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2250{ 2251 return __o->fetch_and(__op, __m); 2252} 2253 2254// atomic_fetch_or 2255 2256template <class _Tp> 2257_LIBCPP_INLINE_VISIBILITY 2258typename enable_if 2259< 2260 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2261 _Tp 2262>::type 2263atomic_fetch_or(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2264{ 2265 return __o->fetch_or(__op); 2266} 2267 2268template <class _Tp> 2269_LIBCPP_INLINE_VISIBILITY 2270typename enable_if 2271< 2272 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2273 _Tp 2274>::type 2275atomic_fetch_or(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2276{ 2277 return __o->fetch_or(__op); 2278} 2279 2280// atomic_fetch_or_explicit 2281 2282template <class _Tp> 2283_LIBCPP_INLINE_VISIBILITY 2284typename enable_if 2285< 2286 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2287 _Tp 2288>::type 2289atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2290{ 2291 return __o->fetch_or(__op, __m); 2292} 2293 2294template <class _Tp> 2295_LIBCPP_INLINE_VISIBILITY 2296typename enable_if 2297< 2298 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2299 _Tp 2300>::type 2301atomic_fetch_or_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2302{ 2303 return __o->fetch_or(__op, __m); 2304} 2305 2306// atomic_fetch_xor 2307 2308template <class _Tp> 2309_LIBCPP_INLINE_VISIBILITY 2310typename enable_if 2311< 2312 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2313 _Tp 2314>::type 2315atomic_fetch_xor(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2316{ 2317 return __o->fetch_xor(__op); 2318} 2319 2320template <class _Tp> 2321_LIBCPP_INLINE_VISIBILITY 2322typename enable_if 2323< 2324 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2325 _Tp 2326>::type 2327atomic_fetch_xor(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op) _NOEXCEPT 2328{ 2329 return __o->fetch_xor(__op); 2330} 2331 2332// atomic_fetch_xor_explicit 2333 2334template <class _Tp> 2335_LIBCPP_INLINE_VISIBILITY 2336typename enable_if 2337< 2338 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2339 _Tp 2340>::type 2341atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2342{ 2343 return __o->fetch_xor(__op, __m); 2344} 2345 2346template <class _Tp> 2347_LIBCPP_INLINE_VISIBILITY 2348typename enable_if 2349< 2350 is_integral<_Tp>::value && !is_same<_Tp, bool>::value, 2351 _Tp 2352>::type 2353atomic_fetch_xor_explicit(atomic<_Tp>* __o, typename atomic<_Tp>::value_type __op, memory_order __m) _NOEXCEPT 2354{ 2355 return __o->fetch_xor(__op, __m); 2356} 2357 2358// flag type and operations 2359 2360typedef struct atomic_flag 2361{ 2362 __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_; 2363 2364 _LIBCPP_INLINE_VISIBILITY 2365 bool test(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 2366 {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);} 2367 _LIBCPP_INLINE_VISIBILITY 2368 bool test(memory_order __m = memory_order_seq_cst) const _NOEXCEPT 2369 {return _LIBCPP_ATOMIC_FLAG_TYPE(true) == __cxx_atomic_load(&__a_, __m);} 2370 2371 _LIBCPP_INLINE_VISIBILITY 2372 bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 2373 {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);} 2374 _LIBCPP_INLINE_VISIBILITY 2375 bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT 2376 {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);} 2377 _LIBCPP_INLINE_VISIBILITY 2378 void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT 2379 {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);} 2380 _LIBCPP_INLINE_VISIBILITY 2381 void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT 2382 {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);} 2383 2384 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2385 void wait(bool __v, memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT 2386 {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);} 2387 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2388 void wait(bool __v, memory_order __m = memory_order_seq_cst) const _NOEXCEPT 2389 {__cxx_atomic_wait(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(__v), __m);} 2390 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2391 void notify_one() volatile _NOEXCEPT 2392 {__cxx_atomic_notify_one(&__a_);} 2393 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2394 void notify_one() _NOEXCEPT 2395 {__cxx_atomic_notify_one(&__a_);} 2396 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2397 void notify_all() volatile _NOEXCEPT 2398 {__cxx_atomic_notify_all(&__a_);} 2399 _LIBCPP_AVAILABILITY_SYNC _LIBCPP_INLINE_VISIBILITY 2400 void notify_all() _NOEXCEPT 2401 {__cxx_atomic_notify_all(&__a_);} 2402 2403#if _LIBCPP_STD_VER > 17 2404 _LIBCPP_INLINE_VISIBILITY constexpr 2405 atomic_flag() _NOEXCEPT : __a_(false) {} 2406#else 2407 _LIBCPP_INLINE_VISIBILITY 2408 atomic_flag() _NOEXCEPT = default; 2409#endif 2410 2411 _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR 2412 atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION 2413 2414 atomic_flag(const atomic_flag&) = delete; 2415 atomic_flag& operator=(const atomic_flag&) = delete; 2416 atomic_flag& operator=(const atomic_flag&) volatile = delete; 2417 2418} atomic_flag; 2419 2420 2421inline _LIBCPP_INLINE_VISIBILITY 2422bool 2423atomic_flag_test(const volatile atomic_flag* __o) _NOEXCEPT 2424{ 2425 return __o->test(); 2426} 2427 2428inline _LIBCPP_INLINE_VISIBILITY 2429bool 2430atomic_flag_test(const atomic_flag* __o) _NOEXCEPT 2431{ 2432 return __o->test(); 2433} 2434 2435inline _LIBCPP_INLINE_VISIBILITY 2436bool 2437atomic_flag_test_explicit(const volatile atomic_flag* __o, memory_order __m) _NOEXCEPT 2438{ 2439 return __o->test(__m); 2440} 2441 2442inline _LIBCPP_INLINE_VISIBILITY 2443bool 2444atomic_flag_test_explicit(const atomic_flag* __o, memory_order __m) _NOEXCEPT 2445{ 2446 return __o->test(__m); 2447} 2448 2449inline _LIBCPP_INLINE_VISIBILITY 2450bool 2451atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT 2452{ 2453 return __o->test_and_set(); 2454} 2455 2456inline _LIBCPP_INLINE_VISIBILITY 2457bool 2458atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT 2459{ 2460 return __o->test_and_set(); 2461} 2462 2463inline _LIBCPP_INLINE_VISIBILITY 2464bool 2465atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT 2466{ 2467 return __o->test_and_set(__m); 2468} 2469 2470inline _LIBCPP_INLINE_VISIBILITY 2471bool 2472atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT 2473{ 2474 return __o->test_and_set(__m); 2475} 2476 2477inline _LIBCPP_INLINE_VISIBILITY 2478void 2479atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT 2480{ 2481 __o->clear(); 2482} 2483 2484inline _LIBCPP_INLINE_VISIBILITY 2485void 2486atomic_flag_clear(atomic_flag* __o) _NOEXCEPT 2487{ 2488 __o->clear(); 2489} 2490 2491inline _LIBCPP_INLINE_VISIBILITY 2492void 2493atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT 2494{ 2495 __o->clear(__m); 2496} 2497 2498inline _LIBCPP_INLINE_VISIBILITY 2499void 2500atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT 2501{ 2502 __o->clear(__m); 2503} 2504 2505inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2506void 2507atomic_flag_wait(const volatile atomic_flag* __o, bool __v) _NOEXCEPT 2508{ 2509 __o->wait(__v); 2510} 2511 2512inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2513void 2514atomic_flag_wait(const atomic_flag* __o, bool __v) _NOEXCEPT 2515{ 2516 __o->wait(__v); 2517} 2518 2519inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2520void 2521atomic_flag_wait_explicit(const volatile atomic_flag* __o, 2522 bool __v, memory_order __m) _NOEXCEPT 2523{ 2524 __o->wait(__v, __m); 2525} 2526 2527inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2528void 2529atomic_flag_wait_explicit(const atomic_flag* __o, 2530 bool __v, memory_order __m) _NOEXCEPT 2531{ 2532 __o->wait(__v, __m); 2533} 2534 2535inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2536void 2537atomic_flag_notify_one(volatile atomic_flag* __o) _NOEXCEPT 2538{ 2539 __o->notify_one(); 2540} 2541 2542inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2543void 2544atomic_flag_notify_one(atomic_flag* __o) _NOEXCEPT 2545{ 2546 __o->notify_one(); 2547} 2548 2549inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2550void 2551atomic_flag_notify_all(volatile atomic_flag* __o) _NOEXCEPT 2552{ 2553 __o->notify_all(); 2554} 2555 2556inline _LIBCPP_INLINE_VISIBILITY _LIBCPP_AVAILABILITY_SYNC 2557void 2558atomic_flag_notify_all(atomic_flag* __o) _NOEXCEPT 2559{ 2560 __o->notify_all(); 2561} 2562 2563// fences 2564 2565inline _LIBCPP_INLINE_VISIBILITY 2566void 2567atomic_thread_fence(memory_order __m) _NOEXCEPT 2568{ 2569 __cxx_atomic_thread_fence(__m); 2570} 2571 2572inline _LIBCPP_INLINE_VISIBILITY 2573void 2574atomic_signal_fence(memory_order __m) _NOEXCEPT 2575{ 2576 __cxx_atomic_signal_fence(__m); 2577} 2578 2579// Atomics for standard typedef types 2580 2581typedef atomic<bool> atomic_bool; 2582typedef atomic<char> atomic_char; 2583typedef atomic<signed char> atomic_schar; 2584typedef atomic<unsigned char> atomic_uchar; 2585typedef atomic<short> atomic_short; 2586typedef atomic<unsigned short> atomic_ushort; 2587typedef atomic<int> atomic_int; 2588typedef atomic<unsigned int> atomic_uint; 2589typedef atomic<long> atomic_long; 2590typedef atomic<unsigned long> atomic_ulong; 2591typedef atomic<long long> atomic_llong; 2592typedef atomic<unsigned long long> atomic_ullong; 2593#ifndef _LIBCPP_HAS_NO_CHAR8_T 2594typedef atomic<char8_t> atomic_char8_t; 2595#endif 2596typedef atomic<char16_t> atomic_char16_t; 2597typedef atomic<char32_t> atomic_char32_t; 2598#ifndef _LIBCPP_HAS_NO_WIDE_CHARACTERS 2599typedef atomic<wchar_t> atomic_wchar_t; 2600#endif 2601 2602typedef atomic<int_least8_t> atomic_int_least8_t; 2603typedef atomic<uint_least8_t> atomic_uint_least8_t; 2604typedef atomic<int_least16_t> atomic_int_least16_t; 2605typedef atomic<uint_least16_t> atomic_uint_least16_t; 2606typedef atomic<int_least32_t> atomic_int_least32_t; 2607typedef atomic<uint_least32_t> atomic_uint_least32_t; 2608typedef atomic<int_least64_t> atomic_int_least64_t; 2609typedef atomic<uint_least64_t> atomic_uint_least64_t; 2610 2611typedef atomic<int_fast8_t> atomic_int_fast8_t; 2612typedef atomic<uint_fast8_t> atomic_uint_fast8_t; 2613typedef atomic<int_fast16_t> atomic_int_fast16_t; 2614typedef atomic<uint_fast16_t> atomic_uint_fast16_t; 2615typedef atomic<int_fast32_t> atomic_int_fast32_t; 2616typedef atomic<uint_fast32_t> atomic_uint_fast32_t; 2617typedef atomic<int_fast64_t> atomic_int_fast64_t; 2618typedef atomic<uint_fast64_t> atomic_uint_fast64_t; 2619 2620typedef atomic< int8_t> atomic_int8_t; 2621typedef atomic<uint8_t> atomic_uint8_t; 2622typedef atomic< int16_t> atomic_int16_t; 2623typedef atomic<uint16_t> atomic_uint16_t; 2624typedef atomic< int32_t> atomic_int32_t; 2625typedef atomic<uint32_t> atomic_uint32_t; 2626typedef atomic< int64_t> atomic_int64_t; 2627typedef atomic<uint64_t> atomic_uint64_t; 2628 2629typedef atomic<intptr_t> atomic_intptr_t; 2630typedef atomic<uintptr_t> atomic_uintptr_t; 2631typedef atomic<size_t> atomic_size_t; 2632typedef atomic<ptrdiff_t> atomic_ptrdiff_t; 2633typedef atomic<intmax_t> atomic_intmax_t; 2634typedef atomic<uintmax_t> atomic_uintmax_t; 2635 2636// atomic_*_lock_free : prefer the contention type most highly, then the largest lock-free type 2637 2638#ifdef __cpp_lib_atomic_is_always_lock_free 2639# define _LIBCPP_CONTENTION_LOCK_FREE ::std::__libcpp_is_always_lock_free<__cxx_contention_t>::__value 2640#else 2641# define _LIBCPP_CONTENTION_LOCK_FREE false 2642#endif 2643 2644#if ATOMIC_LLONG_LOCK_FREE == 2 2645typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, long long>::type __libcpp_signed_lock_free; 2646typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned long long>::type __libcpp_unsigned_lock_free; 2647#elif ATOMIC_INT_LOCK_FREE == 2 2648typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, int>::type __libcpp_signed_lock_free; 2649typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned int>::type __libcpp_unsigned_lock_free; 2650#elif ATOMIC_SHORT_LOCK_FREE == 2 2651typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, short>::type __libcpp_signed_lock_free; 2652typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned short>::type __libcpp_unsigned_lock_free; 2653#elif ATOMIC_CHAR_LOCK_FREE == 2 2654typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, char>::type __libcpp_signed_lock_free; 2655typedef conditional<_LIBCPP_CONTENTION_LOCK_FREE, __cxx_contention_t, unsigned char>::type __libcpp_unsigned_lock_free; 2656#else 2657 // No signed/unsigned lock-free types 2658#define _LIBCPP_NO_LOCK_FREE_TYPES 2659#endif 2660 2661#if !defined(_LIBCPP_NO_LOCK_FREE_TYPES) 2662typedef atomic<__libcpp_signed_lock_free> atomic_signed_lock_free; 2663typedef atomic<__libcpp_unsigned_lock_free> atomic_unsigned_lock_free; 2664#endif 2665 2666#define ATOMIC_FLAG_INIT {false} 2667#define ATOMIC_VAR_INIT(__v) {__v} 2668 2669#if _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS) 2670# if defined(_LIBCPP_CLANG_VER) && _LIBCPP_CLANG_VER >= 1400 2671# pragma clang deprecated(ATOMIC_VAR_INIT) 2672# endif 2673#endif // _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_DISABLE_DEPRECATION_WARNINGS) 2674 2675_LIBCPP_END_NAMESPACE_STD 2676 2677#endif // _LIBCPP_ATOMIC 2678