1 // SPDX-License-Identifier: GPL-2.0 2 3 // Generated by scripts/atomic/gen-atomic-fallback.sh 4 // DO NOT MODIFY THIS FILE DIRECTLY 5 6 #ifndef _LINUX_ATOMIC_FALLBACK_H 7 #define _LINUX_ATOMIC_FALLBACK_H 8 9 #include <linux/compiler.h> 10 11 #ifndef arch_xchg_relaxed 12 #define arch_xchg_acquire arch_xchg 13 #define arch_xchg_release arch_xchg 14 #define arch_xchg_relaxed arch_xchg 15 #else /* arch_xchg_relaxed */ 16 17 #ifndef arch_xchg_acquire 18 #define arch_xchg_acquire(...) \ 19 __atomic_op_acquire(arch_xchg, __VA_ARGS__) 20 #endif 21 22 #ifndef arch_xchg_release 23 #define arch_xchg_release(...) \ 24 __atomic_op_release(arch_xchg, __VA_ARGS__) 25 #endif 26 27 #ifndef arch_xchg 28 #define arch_xchg(...) \ 29 __atomic_op_fence(arch_xchg, __VA_ARGS__) 30 #endif 31 32 #endif /* arch_xchg_relaxed */ 33 34 #ifndef arch_cmpxchg_relaxed 35 #define arch_cmpxchg_acquire arch_cmpxchg 36 #define arch_cmpxchg_release arch_cmpxchg 37 #define arch_cmpxchg_relaxed arch_cmpxchg 38 #else /* arch_cmpxchg_relaxed */ 39 40 #ifndef arch_cmpxchg_acquire 41 #define arch_cmpxchg_acquire(...) \ 42 __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__) 43 #endif 44 45 #ifndef arch_cmpxchg_release 46 #define arch_cmpxchg_release(...) \ 47 __atomic_op_release(arch_cmpxchg, __VA_ARGS__) 48 #endif 49 50 #ifndef arch_cmpxchg 51 #define arch_cmpxchg(...) \ 52 __atomic_op_fence(arch_cmpxchg, __VA_ARGS__) 53 #endif 54 55 #endif /* arch_cmpxchg_relaxed */ 56 57 #ifndef arch_cmpxchg64_relaxed 58 #define arch_cmpxchg64_acquire arch_cmpxchg64 59 #define arch_cmpxchg64_release arch_cmpxchg64 60 #define arch_cmpxchg64_relaxed arch_cmpxchg64 61 #else /* arch_cmpxchg64_relaxed */ 62 63 #ifndef arch_cmpxchg64_acquire 64 #define arch_cmpxchg64_acquire(...) \ 65 __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__) 66 #endif 67 68 #ifndef arch_cmpxchg64_release 69 #define arch_cmpxchg64_release(...) \ 70 __atomic_op_release(arch_cmpxchg64, __VA_ARGS__) 71 #endif 72 73 #ifndef arch_cmpxchg64 74 #define arch_cmpxchg64(...) \ 75 __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__) 76 #endif 77 78 #endif /* arch_cmpxchg64_relaxed */ 79 80 #ifndef arch_cmpxchg128_relaxed 81 #define arch_cmpxchg128_acquire arch_cmpxchg128 82 #define arch_cmpxchg128_release arch_cmpxchg128 83 #define arch_cmpxchg128_relaxed arch_cmpxchg128 84 #else /* arch_cmpxchg128_relaxed */ 85 86 #ifndef arch_cmpxchg128_acquire 87 #define arch_cmpxchg128_acquire(...) \ 88 __atomic_op_acquire(arch_cmpxchg128, __VA_ARGS__) 89 #endif 90 91 #ifndef arch_cmpxchg128_release 92 #define arch_cmpxchg128_release(...) \ 93 __atomic_op_release(arch_cmpxchg128, __VA_ARGS__) 94 #endif 95 96 #ifndef arch_cmpxchg128 97 #define arch_cmpxchg128(...) \ 98 __atomic_op_fence(arch_cmpxchg128, __VA_ARGS__) 99 #endif 100 101 #endif /* arch_cmpxchg128_relaxed */ 102 103 #ifndef arch_try_cmpxchg_relaxed 104 #ifdef arch_try_cmpxchg 105 #define arch_try_cmpxchg_acquire arch_try_cmpxchg 106 #define arch_try_cmpxchg_release arch_try_cmpxchg 107 #define arch_try_cmpxchg_relaxed arch_try_cmpxchg 108 #endif /* arch_try_cmpxchg */ 109 110 #ifndef arch_try_cmpxchg 111 #define arch_try_cmpxchg(_ptr, _oldp, _new) \ 112 ({ \ 113 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 114 ___r = arch_cmpxchg((_ptr), ___o, (_new)); \ 115 if (unlikely(___r != ___o)) \ 116 *___op = ___r; \ 117 likely(___r == ___o); \ 118 }) 119 #endif /* arch_try_cmpxchg */ 120 121 #ifndef arch_try_cmpxchg_acquire 122 #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \ 123 ({ \ 124 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 125 ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \ 126 if (unlikely(___r != ___o)) \ 127 *___op = ___r; \ 128 likely(___r == ___o); \ 129 }) 130 #endif /* arch_try_cmpxchg_acquire */ 131 132 #ifndef arch_try_cmpxchg_release 133 #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \ 134 ({ \ 135 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 136 ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \ 137 if (unlikely(___r != ___o)) \ 138 *___op = ___r; \ 139 likely(___r == ___o); \ 140 }) 141 #endif /* arch_try_cmpxchg_release */ 142 143 #ifndef arch_try_cmpxchg_relaxed 144 #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \ 145 ({ \ 146 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 147 ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \ 148 if (unlikely(___r != ___o)) \ 149 *___op = ___r; \ 150 likely(___r == ___o); \ 151 }) 152 #endif /* arch_try_cmpxchg_relaxed */ 153 154 #else /* arch_try_cmpxchg_relaxed */ 155 156 #ifndef arch_try_cmpxchg_acquire 157 #define arch_try_cmpxchg_acquire(...) \ 158 __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__) 159 #endif 160 161 #ifndef arch_try_cmpxchg_release 162 #define arch_try_cmpxchg_release(...) \ 163 __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__) 164 #endif 165 166 #ifndef arch_try_cmpxchg 167 #define arch_try_cmpxchg(...) \ 168 __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__) 169 #endif 170 171 #endif /* arch_try_cmpxchg_relaxed */ 172 173 #ifndef arch_try_cmpxchg64_relaxed 174 #ifdef arch_try_cmpxchg64 175 #define arch_try_cmpxchg64_acquire arch_try_cmpxchg64 176 #define arch_try_cmpxchg64_release arch_try_cmpxchg64 177 #define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64 178 #endif /* arch_try_cmpxchg64 */ 179 180 #ifndef arch_try_cmpxchg64 181 #define arch_try_cmpxchg64(_ptr, _oldp, _new) \ 182 ({ \ 183 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 184 ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \ 185 if (unlikely(___r != ___o)) \ 186 *___op = ___r; \ 187 likely(___r == ___o); \ 188 }) 189 #endif /* arch_try_cmpxchg64 */ 190 191 #ifndef arch_try_cmpxchg64_acquire 192 #define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \ 193 ({ \ 194 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 195 ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \ 196 if (unlikely(___r != ___o)) \ 197 *___op = ___r; \ 198 likely(___r == ___o); \ 199 }) 200 #endif /* arch_try_cmpxchg64_acquire */ 201 202 #ifndef arch_try_cmpxchg64_release 203 #define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \ 204 ({ \ 205 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 206 ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \ 207 if (unlikely(___r != ___o)) \ 208 *___op = ___r; \ 209 likely(___r == ___o); \ 210 }) 211 #endif /* arch_try_cmpxchg64_release */ 212 213 #ifndef arch_try_cmpxchg64_relaxed 214 #define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \ 215 ({ \ 216 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 217 ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \ 218 if (unlikely(___r != ___o)) \ 219 *___op = ___r; \ 220 likely(___r == ___o); \ 221 }) 222 #endif /* arch_try_cmpxchg64_relaxed */ 223 224 #else /* arch_try_cmpxchg64_relaxed */ 225 226 #ifndef arch_try_cmpxchg64_acquire 227 #define arch_try_cmpxchg64_acquire(...) \ 228 __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__) 229 #endif 230 231 #ifndef arch_try_cmpxchg64_release 232 #define arch_try_cmpxchg64_release(...) \ 233 __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__) 234 #endif 235 236 #ifndef arch_try_cmpxchg64 237 #define arch_try_cmpxchg64(...) \ 238 __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__) 239 #endif 240 241 #endif /* arch_try_cmpxchg64_relaxed */ 242 243 #ifndef arch_try_cmpxchg128_relaxed 244 #ifdef arch_try_cmpxchg128 245 #define arch_try_cmpxchg128_acquire arch_try_cmpxchg128 246 #define arch_try_cmpxchg128_release arch_try_cmpxchg128 247 #define arch_try_cmpxchg128_relaxed arch_try_cmpxchg128 248 #endif /* arch_try_cmpxchg128 */ 249 250 #ifndef arch_try_cmpxchg128 251 #define arch_try_cmpxchg128(_ptr, _oldp, _new) \ 252 ({ \ 253 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 254 ___r = arch_cmpxchg128((_ptr), ___o, (_new)); \ 255 if (unlikely(___r != ___o)) \ 256 *___op = ___r; \ 257 likely(___r == ___o); \ 258 }) 259 #endif /* arch_try_cmpxchg128 */ 260 261 #ifndef arch_try_cmpxchg128_acquire 262 #define arch_try_cmpxchg128_acquire(_ptr, _oldp, _new) \ 263 ({ \ 264 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 265 ___r = arch_cmpxchg128_acquire((_ptr), ___o, (_new)); \ 266 if (unlikely(___r != ___o)) \ 267 *___op = ___r; \ 268 likely(___r == ___o); \ 269 }) 270 #endif /* arch_try_cmpxchg128_acquire */ 271 272 #ifndef arch_try_cmpxchg128_release 273 #define arch_try_cmpxchg128_release(_ptr, _oldp, _new) \ 274 ({ \ 275 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 276 ___r = arch_cmpxchg128_release((_ptr), ___o, (_new)); \ 277 if (unlikely(___r != ___o)) \ 278 *___op = ___r; \ 279 likely(___r == ___o); \ 280 }) 281 #endif /* arch_try_cmpxchg128_release */ 282 283 #ifndef arch_try_cmpxchg128_relaxed 284 #define arch_try_cmpxchg128_relaxed(_ptr, _oldp, _new) \ 285 ({ \ 286 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 287 ___r = arch_cmpxchg128_relaxed((_ptr), ___o, (_new)); \ 288 if (unlikely(___r != ___o)) \ 289 *___op = ___r; \ 290 likely(___r == ___o); \ 291 }) 292 #endif /* arch_try_cmpxchg128_relaxed */ 293 294 #else /* arch_try_cmpxchg128_relaxed */ 295 296 #ifndef arch_try_cmpxchg128_acquire 297 #define arch_try_cmpxchg128_acquire(...) \ 298 __atomic_op_acquire(arch_try_cmpxchg128, __VA_ARGS__) 299 #endif 300 301 #ifndef arch_try_cmpxchg128_release 302 #define arch_try_cmpxchg128_release(...) \ 303 __atomic_op_release(arch_try_cmpxchg128, __VA_ARGS__) 304 #endif 305 306 #ifndef arch_try_cmpxchg128 307 #define arch_try_cmpxchg128(...) \ 308 __atomic_op_fence(arch_try_cmpxchg128, __VA_ARGS__) 309 #endif 310 311 #endif /* arch_try_cmpxchg128_relaxed */ 312 313 #ifndef arch_try_cmpxchg_local 314 #define arch_try_cmpxchg_local(_ptr, _oldp, _new) \ 315 ({ \ 316 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 317 ___r = arch_cmpxchg_local((_ptr), ___o, (_new)); \ 318 if (unlikely(___r != ___o)) \ 319 *___op = ___r; \ 320 likely(___r == ___o); \ 321 }) 322 #endif /* arch_try_cmpxchg_local */ 323 324 #ifndef arch_try_cmpxchg64_local 325 #define arch_try_cmpxchg64_local(_ptr, _oldp, _new) \ 326 ({ \ 327 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 328 ___r = arch_cmpxchg64_local((_ptr), ___o, (_new)); \ 329 if (unlikely(___r != ___o)) \ 330 *___op = ___r; \ 331 likely(___r == ___o); \ 332 }) 333 #endif /* arch_try_cmpxchg64_local */ 334 335 #ifndef arch_atomic_read_acquire 336 static __always_inline int 337 arch_atomic_read_acquire(const atomic_t *v) 338 { 339 int ret; 340 341 if (__native_word(atomic_t)) { 342 ret = smp_load_acquire(&(v)->counter); 343 } else { 344 ret = arch_atomic_read(v); 345 __atomic_acquire_fence(); 346 } 347 348 return ret; 349 } 350 #define arch_atomic_read_acquire arch_atomic_read_acquire 351 #endif 352 353 #ifndef arch_atomic_set_release 354 static __always_inline void 355 arch_atomic_set_release(atomic_t *v, int i) 356 { 357 if (__native_word(atomic_t)) { 358 smp_store_release(&(v)->counter, i); 359 } else { 360 __atomic_release_fence(); 361 arch_atomic_set(v, i); 362 } 363 } 364 #define arch_atomic_set_release arch_atomic_set_release 365 #endif 366 367 #ifndef arch_atomic_add_return_relaxed 368 #define arch_atomic_add_return_acquire arch_atomic_add_return 369 #define arch_atomic_add_return_release arch_atomic_add_return 370 #define arch_atomic_add_return_relaxed arch_atomic_add_return 371 #else /* arch_atomic_add_return_relaxed */ 372 373 #ifndef arch_atomic_add_return_acquire 374 static __always_inline int 375 arch_atomic_add_return_acquire(int i, atomic_t *v) 376 { 377 int ret = arch_atomic_add_return_relaxed(i, v); 378 __atomic_acquire_fence(); 379 return ret; 380 } 381 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire 382 #endif 383 384 #ifndef arch_atomic_add_return_release 385 static __always_inline int 386 arch_atomic_add_return_release(int i, atomic_t *v) 387 { 388 __atomic_release_fence(); 389 return arch_atomic_add_return_relaxed(i, v); 390 } 391 #define arch_atomic_add_return_release arch_atomic_add_return_release 392 #endif 393 394 #ifndef arch_atomic_add_return 395 static __always_inline int 396 arch_atomic_add_return(int i, atomic_t *v) 397 { 398 int ret; 399 __atomic_pre_full_fence(); 400 ret = arch_atomic_add_return_relaxed(i, v); 401 __atomic_post_full_fence(); 402 return ret; 403 } 404 #define arch_atomic_add_return arch_atomic_add_return 405 #endif 406 407 #endif /* arch_atomic_add_return_relaxed */ 408 409 #ifndef arch_atomic_fetch_add_relaxed 410 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add 411 #define arch_atomic_fetch_add_release arch_atomic_fetch_add 412 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add 413 #else /* arch_atomic_fetch_add_relaxed */ 414 415 #ifndef arch_atomic_fetch_add_acquire 416 static __always_inline int 417 arch_atomic_fetch_add_acquire(int i, atomic_t *v) 418 { 419 int ret = arch_atomic_fetch_add_relaxed(i, v); 420 __atomic_acquire_fence(); 421 return ret; 422 } 423 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire 424 #endif 425 426 #ifndef arch_atomic_fetch_add_release 427 static __always_inline int 428 arch_atomic_fetch_add_release(int i, atomic_t *v) 429 { 430 __atomic_release_fence(); 431 return arch_atomic_fetch_add_relaxed(i, v); 432 } 433 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release 434 #endif 435 436 #ifndef arch_atomic_fetch_add 437 static __always_inline int 438 arch_atomic_fetch_add(int i, atomic_t *v) 439 { 440 int ret; 441 __atomic_pre_full_fence(); 442 ret = arch_atomic_fetch_add_relaxed(i, v); 443 __atomic_post_full_fence(); 444 return ret; 445 } 446 #define arch_atomic_fetch_add arch_atomic_fetch_add 447 #endif 448 449 #endif /* arch_atomic_fetch_add_relaxed */ 450 451 #ifndef arch_atomic_sub_return_relaxed 452 #define arch_atomic_sub_return_acquire arch_atomic_sub_return 453 #define arch_atomic_sub_return_release arch_atomic_sub_return 454 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return 455 #else /* arch_atomic_sub_return_relaxed */ 456 457 #ifndef arch_atomic_sub_return_acquire 458 static __always_inline int 459 arch_atomic_sub_return_acquire(int i, atomic_t *v) 460 { 461 int ret = arch_atomic_sub_return_relaxed(i, v); 462 __atomic_acquire_fence(); 463 return ret; 464 } 465 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire 466 #endif 467 468 #ifndef arch_atomic_sub_return_release 469 static __always_inline int 470 arch_atomic_sub_return_release(int i, atomic_t *v) 471 { 472 __atomic_release_fence(); 473 return arch_atomic_sub_return_relaxed(i, v); 474 } 475 #define arch_atomic_sub_return_release arch_atomic_sub_return_release 476 #endif 477 478 #ifndef arch_atomic_sub_return 479 static __always_inline int 480 arch_atomic_sub_return(int i, atomic_t *v) 481 { 482 int ret; 483 __atomic_pre_full_fence(); 484 ret = arch_atomic_sub_return_relaxed(i, v); 485 __atomic_post_full_fence(); 486 return ret; 487 } 488 #define arch_atomic_sub_return arch_atomic_sub_return 489 #endif 490 491 #endif /* arch_atomic_sub_return_relaxed */ 492 493 #ifndef arch_atomic_fetch_sub_relaxed 494 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub 495 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub 496 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub 497 #else /* arch_atomic_fetch_sub_relaxed */ 498 499 #ifndef arch_atomic_fetch_sub_acquire 500 static __always_inline int 501 arch_atomic_fetch_sub_acquire(int i, atomic_t *v) 502 { 503 int ret = arch_atomic_fetch_sub_relaxed(i, v); 504 __atomic_acquire_fence(); 505 return ret; 506 } 507 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire 508 #endif 509 510 #ifndef arch_atomic_fetch_sub_release 511 static __always_inline int 512 arch_atomic_fetch_sub_release(int i, atomic_t *v) 513 { 514 __atomic_release_fence(); 515 return arch_atomic_fetch_sub_relaxed(i, v); 516 } 517 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release 518 #endif 519 520 #ifndef arch_atomic_fetch_sub 521 static __always_inline int 522 arch_atomic_fetch_sub(int i, atomic_t *v) 523 { 524 int ret; 525 __atomic_pre_full_fence(); 526 ret = arch_atomic_fetch_sub_relaxed(i, v); 527 __atomic_post_full_fence(); 528 return ret; 529 } 530 #define arch_atomic_fetch_sub arch_atomic_fetch_sub 531 #endif 532 533 #endif /* arch_atomic_fetch_sub_relaxed */ 534 535 #ifndef arch_atomic_inc 536 static __always_inline void 537 arch_atomic_inc(atomic_t *v) 538 { 539 arch_atomic_add(1, v); 540 } 541 #define arch_atomic_inc arch_atomic_inc 542 #endif 543 544 #ifndef arch_atomic_inc_return_relaxed 545 #ifdef arch_atomic_inc_return 546 #define arch_atomic_inc_return_acquire arch_atomic_inc_return 547 #define arch_atomic_inc_return_release arch_atomic_inc_return 548 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return 549 #endif /* arch_atomic_inc_return */ 550 551 #ifndef arch_atomic_inc_return 552 static __always_inline int 553 arch_atomic_inc_return(atomic_t *v) 554 { 555 return arch_atomic_add_return(1, v); 556 } 557 #define arch_atomic_inc_return arch_atomic_inc_return 558 #endif 559 560 #ifndef arch_atomic_inc_return_acquire 561 static __always_inline int 562 arch_atomic_inc_return_acquire(atomic_t *v) 563 { 564 return arch_atomic_add_return_acquire(1, v); 565 } 566 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire 567 #endif 568 569 #ifndef arch_atomic_inc_return_release 570 static __always_inline int 571 arch_atomic_inc_return_release(atomic_t *v) 572 { 573 return arch_atomic_add_return_release(1, v); 574 } 575 #define arch_atomic_inc_return_release arch_atomic_inc_return_release 576 #endif 577 578 #ifndef arch_atomic_inc_return_relaxed 579 static __always_inline int 580 arch_atomic_inc_return_relaxed(atomic_t *v) 581 { 582 return arch_atomic_add_return_relaxed(1, v); 583 } 584 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed 585 #endif 586 587 #else /* arch_atomic_inc_return_relaxed */ 588 589 #ifndef arch_atomic_inc_return_acquire 590 static __always_inline int 591 arch_atomic_inc_return_acquire(atomic_t *v) 592 { 593 int ret = arch_atomic_inc_return_relaxed(v); 594 __atomic_acquire_fence(); 595 return ret; 596 } 597 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire 598 #endif 599 600 #ifndef arch_atomic_inc_return_release 601 static __always_inline int 602 arch_atomic_inc_return_release(atomic_t *v) 603 { 604 __atomic_release_fence(); 605 return arch_atomic_inc_return_relaxed(v); 606 } 607 #define arch_atomic_inc_return_release arch_atomic_inc_return_release 608 #endif 609 610 #ifndef arch_atomic_inc_return 611 static __always_inline int 612 arch_atomic_inc_return(atomic_t *v) 613 { 614 int ret; 615 __atomic_pre_full_fence(); 616 ret = arch_atomic_inc_return_relaxed(v); 617 __atomic_post_full_fence(); 618 return ret; 619 } 620 #define arch_atomic_inc_return arch_atomic_inc_return 621 #endif 622 623 #endif /* arch_atomic_inc_return_relaxed */ 624 625 #ifndef arch_atomic_fetch_inc_relaxed 626 #ifdef arch_atomic_fetch_inc 627 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc 628 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc 629 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc 630 #endif /* arch_atomic_fetch_inc */ 631 632 #ifndef arch_atomic_fetch_inc 633 static __always_inline int 634 arch_atomic_fetch_inc(atomic_t *v) 635 { 636 return arch_atomic_fetch_add(1, v); 637 } 638 #define arch_atomic_fetch_inc arch_atomic_fetch_inc 639 #endif 640 641 #ifndef arch_atomic_fetch_inc_acquire 642 static __always_inline int 643 arch_atomic_fetch_inc_acquire(atomic_t *v) 644 { 645 return arch_atomic_fetch_add_acquire(1, v); 646 } 647 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire 648 #endif 649 650 #ifndef arch_atomic_fetch_inc_release 651 static __always_inline int 652 arch_atomic_fetch_inc_release(atomic_t *v) 653 { 654 return arch_atomic_fetch_add_release(1, v); 655 } 656 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release 657 #endif 658 659 #ifndef arch_atomic_fetch_inc_relaxed 660 static __always_inline int 661 arch_atomic_fetch_inc_relaxed(atomic_t *v) 662 { 663 return arch_atomic_fetch_add_relaxed(1, v); 664 } 665 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed 666 #endif 667 668 #else /* arch_atomic_fetch_inc_relaxed */ 669 670 #ifndef arch_atomic_fetch_inc_acquire 671 static __always_inline int 672 arch_atomic_fetch_inc_acquire(atomic_t *v) 673 { 674 int ret = arch_atomic_fetch_inc_relaxed(v); 675 __atomic_acquire_fence(); 676 return ret; 677 } 678 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire 679 #endif 680 681 #ifndef arch_atomic_fetch_inc_release 682 static __always_inline int 683 arch_atomic_fetch_inc_release(atomic_t *v) 684 { 685 __atomic_release_fence(); 686 return arch_atomic_fetch_inc_relaxed(v); 687 } 688 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release 689 #endif 690 691 #ifndef arch_atomic_fetch_inc 692 static __always_inline int 693 arch_atomic_fetch_inc(atomic_t *v) 694 { 695 int ret; 696 __atomic_pre_full_fence(); 697 ret = arch_atomic_fetch_inc_relaxed(v); 698 __atomic_post_full_fence(); 699 return ret; 700 } 701 #define arch_atomic_fetch_inc arch_atomic_fetch_inc 702 #endif 703 704 #endif /* arch_atomic_fetch_inc_relaxed */ 705 706 #ifndef arch_atomic_dec 707 static __always_inline void 708 arch_atomic_dec(atomic_t *v) 709 { 710 arch_atomic_sub(1, v); 711 } 712 #define arch_atomic_dec arch_atomic_dec 713 #endif 714 715 #ifndef arch_atomic_dec_return_relaxed 716 #ifdef arch_atomic_dec_return 717 #define arch_atomic_dec_return_acquire arch_atomic_dec_return 718 #define arch_atomic_dec_return_release arch_atomic_dec_return 719 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return 720 #endif /* arch_atomic_dec_return */ 721 722 #ifndef arch_atomic_dec_return 723 static __always_inline int 724 arch_atomic_dec_return(atomic_t *v) 725 { 726 return arch_atomic_sub_return(1, v); 727 } 728 #define arch_atomic_dec_return arch_atomic_dec_return 729 #endif 730 731 #ifndef arch_atomic_dec_return_acquire 732 static __always_inline int 733 arch_atomic_dec_return_acquire(atomic_t *v) 734 { 735 return arch_atomic_sub_return_acquire(1, v); 736 } 737 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire 738 #endif 739 740 #ifndef arch_atomic_dec_return_release 741 static __always_inline int 742 arch_atomic_dec_return_release(atomic_t *v) 743 { 744 return arch_atomic_sub_return_release(1, v); 745 } 746 #define arch_atomic_dec_return_release arch_atomic_dec_return_release 747 #endif 748 749 #ifndef arch_atomic_dec_return_relaxed 750 static __always_inline int 751 arch_atomic_dec_return_relaxed(atomic_t *v) 752 { 753 return arch_atomic_sub_return_relaxed(1, v); 754 } 755 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed 756 #endif 757 758 #else /* arch_atomic_dec_return_relaxed */ 759 760 #ifndef arch_atomic_dec_return_acquire 761 static __always_inline int 762 arch_atomic_dec_return_acquire(atomic_t *v) 763 { 764 int ret = arch_atomic_dec_return_relaxed(v); 765 __atomic_acquire_fence(); 766 return ret; 767 } 768 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire 769 #endif 770 771 #ifndef arch_atomic_dec_return_release 772 static __always_inline int 773 arch_atomic_dec_return_release(atomic_t *v) 774 { 775 __atomic_release_fence(); 776 return arch_atomic_dec_return_relaxed(v); 777 } 778 #define arch_atomic_dec_return_release arch_atomic_dec_return_release 779 #endif 780 781 #ifndef arch_atomic_dec_return 782 static __always_inline int 783 arch_atomic_dec_return(atomic_t *v) 784 { 785 int ret; 786 __atomic_pre_full_fence(); 787 ret = arch_atomic_dec_return_relaxed(v); 788 __atomic_post_full_fence(); 789 return ret; 790 } 791 #define arch_atomic_dec_return arch_atomic_dec_return 792 #endif 793 794 #endif /* arch_atomic_dec_return_relaxed */ 795 796 #ifndef arch_atomic_fetch_dec_relaxed 797 #ifdef arch_atomic_fetch_dec 798 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec 799 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec 800 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec 801 #endif /* arch_atomic_fetch_dec */ 802 803 #ifndef arch_atomic_fetch_dec 804 static __always_inline int 805 arch_atomic_fetch_dec(atomic_t *v) 806 { 807 return arch_atomic_fetch_sub(1, v); 808 } 809 #define arch_atomic_fetch_dec arch_atomic_fetch_dec 810 #endif 811 812 #ifndef arch_atomic_fetch_dec_acquire 813 static __always_inline int 814 arch_atomic_fetch_dec_acquire(atomic_t *v) 815 { 816 return arch_atomic_fetch_sub_acquire(1, v); 817 } 818 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire 819 #endif 820 821 #ifndef arch_atomic_fetch_dec_release 822 static __always_inline int 823 arch_atomic_fetch_dec_release(atomic_t *v) 824 { 825 return arch_atomic_fetch_sub_release(1, v); 826 } 827 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release 828 #endif 829 830 #ifndef arch_atomic_fetch_dec_relaxed 831 static __always_inline int 832 arch_atomic_fetch_dec_relaxed(atomic_t *v) 833 { 834 return arch_atomic_fetch_sub_relaxed(1, v); 835 } 836 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed 837 #endif 838 839 #else /* arch_atomic_fetch_dec_relaxed */ 840 841 #ifndef arch_atomic_fetch_dec_acquire 842 static __always_inline int 843 arch_atomic_fetch_dec_acquire(atomic_t *v) 844 { 845 int ret = arch_atomic_fetch_dec_relaxed(v); 846 __atomic_acquire_fence(); 847 return ret; 848 } 849 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire 850 #endif 851 852 #ifndef arch_atomic_fetch_dec_release 853 static __always_inline int 854 arch_atomic_fetch_dec_release(atomic_t *v) 855 { 856 __atomic_release_fence(); 857 return arch_atomic_fetch_dec_relaxed(v); 858 } 859 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release 860 #endif 861 862 #ifndef arch_atomic_fetch_dec 863 static __always_inline int 864 arch_atomic_fetch_dec(atomic_t *v) 865 { 866 int ret; 867 __atomic_pre_full_fence(); 868 ret = arch_atomic_fetch_dec_relaxed(v); 869 __atomic_post_full_fence(); 870 return ret; 871 } 872 #define arch_atomic_fetch_dec arch_atomic_fetch_dec 873 #endif 874 875 #endif /* arch_atomic_fetch_dec_relaxed */ 876 877 #ifndef arch_atomic_fetch_and_relaxed 878 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and 879 #define arch_atomic_fetch_and_release arch_atomic_fetch_and 880 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and 881 #else /* arch_atomic_fetch_and_relaxed */ 882 883 #ifndef arch_atomic_fetch_and_acquire 884 static __always_inline int 885 arch_atomic_fetch_and_acquire(int i, atomic_t *v) 886 { 887 int ret = arch_atomic_fetch_and_relaxed(i, v); 888 __atomic_acquire_fence(); 889 return ret; 890 } 891 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire 892 #endif 893 894 #ifndef arch_atomic_fetch_and_release 895 static __always_inline int 896 arch_atomic_fetch_and_release(int i, atomic_t *v) 897 { 898 __atomic_release_fence(); 899 return arch_atomic_fetch_and_relaxed(i, v); 900 } 901 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release 902 #endif 903 904 #ifndef arch_atomic_fetch_and 905 static __always_inline int 906 arch_atomic_fetch_and(int i, atomic_t *v) 907 { 908 int ret; 909 __atomic_pre_full_fence(); 910 ret = arch_atomic_fetch_and_relaxed(i, v); 911 __atomic_post_full_fence(); 912 return ret; 913 } 914 #define arch_atomic_fetch_and arch_atomic_fetch_and 915 #endif 916 917 #endif /* arch_atomic_fetch_and_relaxed */ 918 919 #ifndef arch_atomic_andnot 920 static __always_inline void 921 arch_atomic_andnot(int i, atomic_t *v) 922 { 923 arch_atomic_and(~i, v); 924 } 925 #define arch_atomic_andnot arch_atomic_andnot 926 #endif 927 928 #ifndef arch_atomic_fetch_andnot_relaxed 929 #ifdef arch_atomic_fetch_andnot 930 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot 931 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot 932 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot 933 #endif /* arch_atomic_fetch_andnot */ 934 935 #ifndef arch_atomic_fetch_andnot 936 static __always_inline int 937 arch_atomic_fetch_andnot(int i, atomic_t *v) 938 { 939 return arch_atomic_fetch_and(~i, v); 940 } 941 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot 942 #endif 943 944 #ifndef arch_atomic_fetch_andnot_acquire 945 static __always_inline int 946 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v) 947 { 948 return arch_atomic_fetch_and_acquire(~i, v); 949 } 950 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire 951 #endif 952 953 #ifndef arch_atomic_fetch_andnot_release 954 static __always_inline int 955 arch_atomic_fetch_andnot_release(int i, atomic_t *v) 956 { 957 return arch_atomic_fetch_and_release(~i, v); 958 } 959 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release 960 #endif 961 962 #ifndef arch_atomic_fetch_andnot_relaxed 963 static __always_inline int 964 arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v) 965 { 966 return arch_atomic_fetch_and_relaxed(~i, v); 967 } 968 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed 969 #endif 970 971 #else /* arch_atomic_fetch_andnot_relaxed */ 972 973 #ifndef arch_atomic_fetch_andnot_acquire 974 static __always_inline int 975 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v) 976 { 977 int ret = arch_atomic_fetch_andnot_relaxed(i, v); 978 __atomic_acquire_fence(); 979 return ret; 980 } 981 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire 982 #endif 983 984 #ifndef arch_atomic_fetch_andnot_release 985 static __always_inline int 986 arch_atomic_fetch_andnot_release(int i, atomic_t *v) 987 { 988 __atomic_release_fence(); 989 return arch_atomic_fetch_andnot_relaxed(i, v); 990 } 991 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release 992 #endif 993 994 #ifndef arch_atomic_fetch_andnot 995 static __always_inline int 996 arch_atomic_fetch_andnot(int i, atomic_t *v) 997 { 998 int ret; 999 __atomic_pre_full_fence(); 1000 ret = arch_atomic_fetch_andnot_relaxed(i, v); 1001 __atomic_post_full_fence(); 1002 return ret; 1003 } 1004 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot 1005 #endif 1006 1007 #endif /* arch_atomic_fetch_andnot_relaxed */ 1008 1009 #ifndef arch_atomic_fetch_or_relaxed 1010 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or 1011 #define arch_atomic_fetch_or_release arch_atomic_fetch_or 1012 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or 1013 #else /* arch_atomic_fetch_or_relaxed */ 1014 1015 #ifndef arch_atomic_fetch_or_acquire 1016 static __always_inline int 1017 arch_atomic_fetch_or_acquire(int i, atomic_t *v) 1018 { 1019 int ret = arch_atomic_fetch_or_relaxed(i, v); 1020 __atomic_acquire_fence(); 1021 return ret; 1022 } 1023 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire 1024 #endif 1025 1026 #ifndef arch_atomic_fetch_or_release 1027 static __always_inline int 1028 arch_atomic_fetch_or_release(int i, atomic_t *v) 1029 { 1030 __atomic_release_fence(); 1031 return arch_atomic_fetch_or_relaxed(i, v); 1032 } 1033 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release 1034 #endif 1035 1036 #ifndef arch_atomic_fetch_or 1037 static __always_inline int 1038 arch_atomic_fetch_or(int i, atomic_t *v) 1039 { 1040 int ret; 1041 __atomic_pre_full_fence(); 1042 ret = arch_atomic_fetch_or_relaxed(i, v); 1043 __atomic_post_full_fence(); 1044 return ret; 1045 } 1046 #define arch_atomic_fetch_or arch_atomic_fetch_or 1047 #endif 1048 1049 #endif /* arch_atomic_fetch_or_relaxed */ 1050 1051 #ifndef arch_atomic_fetch_xor_relaxed 1052 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor 1053 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor 1054 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor 1055 #else /* arch_atomic_fetch_xor_relaxed */ 1056 1057 #ifndef arch_atomic_fetch_xor_acquire 1058 static __always_inline int 1059 arch_atomic_fetch_xor_acquire(int i, atomic_t *v) 1060 { 1061 int ret = arch_atomic_fetch_xor_relaxed(i, v); 1062 __atomic_acquire_fence(); 1063 return ret; 1064 } 1065 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire 1066 #endif 1067 1068 #ifndef arch_atomic_fetch_xor_release 1069 static __always_inline int 1070 arch_atomic_fetch_xor_release(int i, atomic_t *v) 1071 { 1072 __atomic_release_fence(); 1073 return arch_atomic_fetch_xor_relaxed(i, v); 1074 } 1075 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release 1076 #endif 1077 1078 #ifndef arch_atomic_fetch_xor 1079 static __always_inline int 1080 arch_atomic_fetch_xor(int i, atomic_t *v) 1081 { 1082 int ret; 1083 __atomic_pre_full_fence(); 1084 ret = arch_atomic_fetch_xor_relaxed(i, v); 1085 __atomic_post_full_fence(); 1086 return ret; 1087 } 1088 #define arch_atomic_fetch_xor arch_atomic_fetch_xor 1089 #endif 1090 1091 #endif /* arch_atomic_fetch_xor_relaxed */ 1092 1093 #ifndef arch_atomic_xchg_relaxed 1094 #ifdef arch_atomic_xchg 1095 #define arch_atomic_xchg_acquire arch_atomic_xchg 1096 #define arch_atomic_xchg_release arch_atomic_xchg 1097 #define arch_atomic_xchg_relaxed arch_atomic_xchg 1098 #endif /* arch_atomic_xchg */ 1099 1100 #ifndef arch_atomic_xchg 1101 static __always_inline int 1102 arch_atomic_xchg(atomic_t *v, int new) 1103 { 1104 return arch_xchg(&v->counter, new); 1105 } 1106 #define arch_atomic_xchg arch_atomic_xchg 1107 #endif 1108 1109 #ifndef arch_atomic_xchg_acquire 1110 static __always_inline int 1111 arch_atomic_xchg_acquire(atomic_t *v, int new) 1112 { 1113 return arch_xchg_acquire(&v->counter, new); 1114 } 1115 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire 1116 #endif 1117 1118 #ifndef arch_atomic_xchg_release 1119 static __always_inline int 1120 arch_atomic_xchg_release(atomic_t *v, int new) 1121 { 1122 return arch_xchg_release(&v->counter, new); 1123 } 1124 #define arch_atomic_xchg_release arch_atomic_xchg_release 1125 #endif 1126 1127 #ifndef arch_atomic_xchg_relaxed 1128 static __always_inline int 1129 arch_atomic_xchg_relaxed(atomic_t *v, int new) 1130 { 1131 return arch_xchg_relaxed(&v->counter, new); 1132 } 1133 #define arch_atomic_xchg_relaxed arch_atomic_xchg_relaxed 1134 #endif 1135 1136 #else /* arch_atomic_xchg_relaxed */ 1137 1138 #ifndef arch_atomic_xchg_acquire 1139 static __always_inline int 1140 arch_atomic_xchg_acquire(atomic_t *v, int i) 1141 { 1142 int ret = arch_atomic_xchg_relaxed(v, i); 1143 __atomic_acquire_fence(); 1144 return ret; 1145 } 1146 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire 1147 #endif 1148 1149 #ifndef arch_atomic_xchg_release 1150 static __always_inline int 1151 arch_atomic_xchg_release(atomic_t *v, int i) 1152 { 1153 __atomic_release_fence(); 1154 return arch_atomic_xchg_relaxed(v, i); 1155 } 1156 #define arch_atomic_xchg_release arch_atomic_xchg_release 1157 #endif 1158 1159 #ifndef arch_atomic_xchg 1160 static __always_inline int 1161 arch_atomic_xchg(atomic_t *v, int i) 1162 { 1163 int ret; 1164 __atomic_pre_full_fence(); 1165 ret = arch_atomic_xchg_relaxed(v, i); 1166 __atomic_post_full_fence(); 1167 return ret; 1168 } 1169 #define arch_atomic_xchg arch_atomic_xchg 1170 #endif 1171 1172 #endif /* arch_atomic_xchg_relaxed */ 1173 1174 #ifndef arch_atomic_cmpxchg_relaxed 1175 #ifdef arch_atomic_cmpxchg 1176 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg 1177 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg 1178 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg 1179 #endif /* arch_atomic_cmpxchg */ 1180 1181 #ifndef arch_atomic_cmpxchg 1182 static __always_inline int 1183 arch_atomic_cmpxchg(atomic_t *v, int old, int new) 1184 { 1185 return arch_cmpxchg(&v->counter, old, new); 1186 } 1187 #define arch_atomic_cmpxchg arch_atomic_cmpxchg 1188 #endif 1189 1190 #ifndef arch_atomic_cmpxchg_acquire 1191 static __always_inline int 1192 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) 1193 { 1194 return arch_cmpxchg_acquire(&v->counter, old, new); 1195 } 1196 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire 1197 #endif 1198 1199 #ifndef arch_atomic_cmpxchg_release 1200 static __always_inline int 1201 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new) 1202 { 1203 return arch_cmpxchg_release(&v->counter, old, new); 1204 } 1205 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release 1206 #endif 1207 1208 #ifndef arch_atomic_cmpxchg_relaxed 1209 static __always_inline int 1210 arch_atomic_cmpxchg_relaxed(atomic_t *v, int old, int new) 1211 { 1212 return arch_cmpxchg_relaxed(&v->counter, old, new); 1213 } 1214 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed 1215 #endif 1216 1217 #else /* arch_atomic_cmpxchg_relaxed */ 1218 1219 #ifndef arch_atomic_cmpxchg_acquire 1220 static __always_inline int 1221 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) 1222 { 1223 int ret = arch_atomic_cmpxchg_relaxed(v, old, new); 1224 __atomic_acquire_fence(); 1225 return ret; 1226 } 1227 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire 1228 #endif 1229 1230 #ifndef arch_atomic_cmpxchg_release 1231 static __always_inline int 1232 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new) 1233 { 1234 __atomic_release_fence(); 1235 return arch_atomic_cmpxchg_relaxed(v, old, new); 1236 } 1237 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release 1238 #endif 1239 1240 #ifndef arch_atomic_cmpxchg 1241 static __always_inline int 1242 arch_atomic_cmpxchg(atomic_t *v, int old, int new) 1243 { 1244 int ret; 1245 __atomic_pre_full_fence(); 1246 ret = arch_atomic_cmpxchg_relaxed(v, old, new); 1247 __atomic_post_full_fence(); 1248 return ret; 1249 } 1250 #define arch_atomic_cmpxchg arch_atomic_cmpxchg 1251 #endif 1252 1253 #endif /* arch_atomic_cmpxchg_relaxed */ 1254 1255 #ifndef arch_atomic_try_cmpxchg_relaxed 1256 #ifdef arch_atomic_try_cmpxchg 1257 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg 1258 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg 1259 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg 1260 #endif /* arch_atomic_try_cmpxchg */ 1261 1262 #ifndef arch_atomic_try_cmpxchg 1263 static __always_inline bool 1264 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new) 1265 { 1266 int r, o = *old; 1267 r = arch_atomic_cmpxchg(v, o, new); 1268 if (unlikely(r != o)) 1269 *old = r; 1270 return likely(r == o); 1271 } 1272 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg 1273 #endif 1274 1275 #ifndef arch_atomic_try_cmpxchg_acquire 1276 static __always_inline bool 1277 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 1278 { 1279 int r, o = *old; 1280 r = arch_atomic_cmpxchg_acquire(v, o, new); 1281 if (unlikely(r != o)) 1282 *old = r; 1283 return likely(r == o); 1284 } 1285 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire 1286 #endif 1287 1288 #ifndef arch_atomic_try_cmpxchg_release 1289 static __always_inline bool 1290 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 1291 { 1292 int r, o = *old; 1293 r = arch_atomic_cmpxchg_release(v, o, new); 1294 if (unlikely(r != o)) 1295 *old = r; 1296 return likely(r == o); 1297 } 1298 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release 1299 #endif 1300 1301 #ifndef arch_atomic_try_cmpxchg_relaxed 1302 static __always_inline bool 1303 arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new) 1304 { 1305 int r, o = *old; 1306 r = arch_atomic_cmpxchg_relaxed(v, o, new); 1307 if (unlikely(r != o)) 1308 *old = r; 1309 return likely(r == o); 1310 } 1311 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed 1312 #endif 1313 1314 #else /* arch_atomic_try_cmpxchg_relaxed */ 1315 1316 #ifndef arch_atomic_try_cmpxchg_acquire 1317 static __always_inline bool 1318 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 1319 { 1320 bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); 1321 __atomic_acquire_fence(); 1322 return ret; 1323 } 1324 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire 1325 #endif 1326 1327 #ifndef arch_atomic_try_cmpxchg_release 1328 static __always_inline bool 1329 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 1330 { 1331 __atomic_release_fence(); 1332 return arch_atomic_try_cmpxchg_relaxed(v, old, new); 1333 } 1334 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release 1335 #endif 1336 1337 #ifndef arch_atomic_try_cmpxchg 1338 static __always_inline bool 1339 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new) 1340 { 1341 bool ret; 1342 __atomic_pre_full_fence(); 1343 ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); 1344 __atomic_post_full_fence(); 1345 return ret; 1346 } 1347 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg 1348 #endif 1349 1350 #endif /* arch_atomic_try_cmpxchg_relaxed */ 1351 1352 #ifndef arch_atomic_sub_and_test 1353 static __always_inline bool 1354 arch_atomic_sub_and_test(int i, atomic_t *v) 1355 { 1356 return arch_atomic_sub_return(i, v) == 0; 1357 } 1358 #define arch_atomic_sub_and_test arch_atomic_sub_and_test 1359 #endif 1360 1361 #ifndef arch_atomic_dec_and_test 1362 static __always_inline bool 1363 arch_atomic_dec_and_test(atomic_t *v) 1364 { 1365 return arch_atomic_dec_return(v) == 0; 1366 } 1367 #define arch_atomic_dec_and_test arch_atomic_dec_and_test 1368 #endif 1369 1370 #ifndef arch_atomic_inc_and_test 1371 static __always_inline bool 1372 arch_atomic_inc_and_test(atomic_t *v) 1373 { 1374 return arch_atomic_inc_return(v) == 0; 1375 } 1376 #define arch_atomic_inc_and_test arch_atomic_inc_and_test 1377 #endif 1378 1379 #ifndef arch_atomic_add_negative_relaxed 1380 #ifdef arch_atomic_add_negative 1381 #define arch_atomic_add_negative_acquire arch_atomic_add_negative 1382 #define arch_atomic_add_negative_release arch_atomic_add_negative 1383 #define arch_atomic_add_negative_relaxed arch_atomic_add_negative 1384 #endif /* arch_atomic_add_negative */ 1385 1386 #ifndef arch_atomic_add_negative 1387 static __always_inline bool 1388 arch_atomic_add_negative(int i, atomic_t *v) 1389 { 1390 return arch_atomic_add_return(i, v) < 0; 1391 } 1392 #define arch_atomic_add_negative arch_atomic_add_negative 1393 #endif 1394 1395 #ifndef arch_atomic_add_negative_acquire 1396 static __always_inline bool 1397 arch_atomic_add_negative_acquire(int i, atomic_t *v) 1398 { 1399 return arch_atomic_add_return_acquire(i, v) < 0; 1400 } 1401 #define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire 1402 #endif 1403 1404 #ifndef arch_atomic_add_negative_release 1405 static __always_inline bool 1406 arch_atomic_add_negative_release(int i, atomic_t *v) 1407 { 1408 return arch_atomic_add_return_release(i, v) < 0; 1409 } 1410 #define arch_atomic_add_negative_release arch_atomic_add_negative_release 1411 #endif 1412 1413 #ifndef arch_atomic_add_negative_relaxed 1414 static __always_inline bool 1415 arch_atomic_add_negative_relaxed(int i, atomic_t *v) 1416 { 1417 return arch_atomic_add_return_relaxed(i, v) < 0; 1418 } 1419 #define arch_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed 1420 #endif 1421 1422 #else /* arch_atomic_add_negative_relaxed */ 1423 1424 #ifndef arch_atomic_add_negative_acquire 1425 static __always_inline bool 1426 arch_atomic_add_negative_acquire(int i, atomic_t *v) 1427 { 1428 bool ret = arch_atomic_add_negative_relaxed(i, v); 1429 __atomic_acquire_fence(); 1430 return ret; 1431 } 1432 #define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire 1433 #endif 1434 1435 #ifndef arch_atomic_add_negative_release 1436 static __always_inline bool 1437 arch_atomic_add_negative_release(int i, atomic_t *v) 1438 { 1439 __atomic_release_fence(); 1440 return arch_atomic_add_negative_relaxed(i, v); 1441 } 1442 #define arch_atomic_add_negative_release arch_atomic_add_negative_release 1443 #endif 1444 1445 #ifndef arch_atomic_add_negative 1446 static __always_inline bool 1447 arch_atomic_add_negative(int i, atomic_t *v) 1448 { 1449 bool ret; 1450 __atomic_pre_full_fence(); 1451 ret = arch_atomic_add_negative_relaxed(i, v); 1452 __atomic_post_full_fence(); 1453 return ret; 1454 } 1455 #define arch_atomic_add_negative arch_atomic_add_negative 1456 #endif 1457 1458 #endif /* arch_atomic_add_negative_relaxed */ 1459 1460 #ifndef arch_atomic_fetch_add_unless 1461 static __always_inline int 1462 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) 1463 { 1464 int c = arch_atomic_read(v); 1465 1466 do { 1467 if (unlikely(c == u)) 1468 break; 1469 } while (!arch_atomic_try_cmpxchg(v, &c, c + a)); 1470 1471 return c; 1472 } 1473 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless 1474 #endif 1475 1476 #ifndef arch_atomic_add_unless 1477 static __always_inline bool 1478 arch_atomic_add_unless(atomic_t *v, int a, int u) 1479 { 1480 return arch_atomic_fetch_add_unless(v, a, u) != u; 1481 } 1482 #define arch_atomic_add_unless arch_atomic_add_unless 1483 #endif 1484 1485 #ifndef arch_atomic_inc_not_zero 1486 static __always_inline bool 1487 arch_atomic_inc_not_zero(atomic_t *v) 1488 { 1489 return arch_atomic_add_unless(v, 1, 0); 1490 } 1491 #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero 1492 #endif 1493 1494 #ifndef arch_atomic_inc_unless_negative 1495 static __always_inline bool 1496 arch_atomic_inc_unless_negative(atomic_t *v) 1497 { 1498 int c = arch_atomic_read(v); 1499 1500 do { 1501 if (unlikely(c < 0)) 1502 return false; 1503 } while (!arch_atomic_try_cmpxchg(v, &c, c + 1)); 1504 1505 return true; 1506 } 1507 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative 1508 #endif 1509 1510 #ifndef arch_atomic_dec_unless_positive 1511 static __always_inline bool 1512 arch_atomic_dec_unless_positive(atomic_t *v) 1513 { 1514 int c = arch_atomic_read(v); 1515 1516 do { 1517 if (unlikely(c > 0)) 1518 return false; 1519 } while (!arch_atomic_try_cmpxchg(v, &c, c - 1)); 1520 1521 return true; 1522 } 1523 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive 1524 #endif 1525 1526 #ifndef arch_atomic_dec_if_positive 1527 static __always_inline int 1528 arch_atomic_dec_if_positive(atomic_t *v) 1529 { 1530 int dec, c = arch_atomic_read(v); 1531 1532 do { 1533 dec = c - 1; 1534 if (unlikely(dec < 0)) 1535 break; 1536 } while (!arch_atomic_try_cmpxchg(v, &c, dec)); 1537 1538 return dec; 1539 } 1540 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive 1541 #endif 1542 1543 #ifdef CONFIG_GENERIC_ATOMIC64 1544 #include <asm-generic/atomic64.h> 1545 #endif 1546 1547 #ifndef arch_atomic64_read_acquire 1548 static __always_inline s64 1549 arch_atomic64_read_acquire(const atomic64_t *v) 1550 { 1551 s64 ret; 1552 1553 if (__native_word(atomic64_t)) { 1554 ret = smp_load_acquire(&(v)->counter); 1555 } else { 1556 ret = arch_atomic64_read(v); 1557 __atomic_acquire_fence(); 1558 } 1559 1560 return ret; 1561 } 1562 #define arch_atomic64_read_acquire arch_atomic64_read_acquire 1563 #endif 1564 1565 #ifndef arch_atomic64_set_release 1566 static __always_inline void 1567 arch_atomic64_set_release(atomic64_t *v, s64 i) 1568 { 1569 if (__native_word(atomic64_t)) { 1570 smp_store_release(&(v)->counter, i); 1571 } else { 1572 __atomic_release_fence(); 1573 arch_atomic64_set(v, i); 1574 } 1575 } 1576 #define arch_atomic64_set_release arch_atomic64_set_release 1577 #endif 1578 1579 #ifndef arch_atomic64_add_return_relaxed 1580 #define arch_atomic64_add_return_acquire arch_atomic64_add_return 1581 #define arch_atomic64_add_return_release arch_atomic64_add_return 1582 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return 1583 #else /* arch_atomic64_add_return_relaxed */ 1584 1585 #ifndef arch_atomic64_add_return_acquire 1586 static __always_inline s64 1587 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v) 1588 { 1589 s64 ret = arch_atomic64_add_return_relaxed(i, v); 1590 __atomic_acquire_fence(); 1591 return ret; 1592 } 1593 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire 1594 #endif 1595 1596 #ifndef arch_atomic64_add_return_release 1597 static __always_inline s64 1598 arch_atomic64_add_return_release(s64 i, atomic64_t *v) 1599 { 1600 __atomic_release_fence(); 1601 return arch_atomic64_add_return_relaxed(i, v); 1602 } 1603 #define arch_atomic64_add_return_release arch_atomic64_add_return_release 1604 #endif 1605 1606 #ifndef arch_atomic64_add_return 1607 static __always_inline s64 1608 arch_atomic64_add_return(s64 i, atomic64_t *v) 1609 { 1610 s64 ret; 1611 __atomic_pre_full_fence(); 1612 ret = arch_atomic64_add_return_relaxed(i, v); 1613 __atomic_post_full_fence(); 1614 return ret; 1615 } 1616 #define arch_atomic64_add_return arch_atomic64_add_return 1617 #endif 1618 1619 #endif /* arch_atomic64_add_return_relaxed */ 1620 1621 #ifndef arch_atomic64_fetch_add_relaxed 1622 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add 1623 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add 1624 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add 1625 #else /* arch_atomic64_fetch_add_relaxed */ 1626 1627 #ifndef arch_atomic64_fetch_add_acquire 1628 static __always_inline s64 1629 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) 1630 { 1631 s64 ret = arch_atomic64_fetch_add_relaxed(i, v); 1632 __atomic_acquire_fence(); 1633 return ret; 1634 } 1635 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire 1636 #endif 1637 1638 #ifndef arch_atomic64_fetch_add_release 1639 static __always_inline s64 1640 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v) 1641 { 1642 __atomic_release_fence(); 1643 return arch_atomic64_fetch_add_relaxed(i, v); 1644 } 1645 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release 1646 #endif 1647 1648 #ifndef arch_atomic64_fetch_add 1649 static __always_inline s64 1650 arch_atomic64_fetch_add(s64 i, atomic64_t *v) 1651 { 1652 s64 ret; 1653 __atomic_pre_full_fence(); 1654 ret = arch_atomic64_fetch_add_relaxed(i, v); 1655 __atomic_post_full_fence(); 1656 return ret; 1657 } 1658 #define arch_atomic64_fetch_add arch_atomic64_fetch_add 1659 #endif 1660 1661 #endif /* arch_atomic64_fetch_add_relaxed */ 1662 1663 #ifndef arch_atomic64_sub_return_relaxed 1664 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return 1665 #define arch_atomic64_sub_return_release arch_atomic64_sub_return 1666 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return 1667 #else /* arch_atomic64_sub_return_relaxed */ 1668 1669 #ifndef arch_atomic64_sub_return_acquire 1670 static __always_inline s64 1671 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v) 1672 { 1673 s64 ret = arch_atomic64_sub_return_relaxed(i, v); 1674 __atomic_acquire_fence(); 1675 return ret; 1676 } 1677 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire 1678 #endif 1679 1680 #ifndef arch_atomic64_sub_return_release 1681 static __always_inline s64 1682 arch_atomic64_sub_return_release(s64 i, atomic64_t *v) 1683 { 1684 __atomic_release_fence(); 1685 return arch_atomic64_sub_return_relaxed(i, v); 1686 } 1687 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release 1688 #endif 1689 1690 #ifndef arch_atomic64_sub_return 1691 static __always_inline s64 1692 arch_atomic64_sub_return(s64 i, atomic64_t *v) 1693 { 1694 s64 ret; 1695 __atomic_pre_full_fence(); 1696 ret = arch_atomic64_sub_return_relaxed(i, v); 1697 __atomic_post_full_fence(); 1698 return ret; 1699 } 1700 #define arch_atomic64_sub_return arch_atomic64_sub_return 1701 #endif 1702 1703 #endif /* arch_atomic64_sub_return_relaxed */ 1704 1705 #ifndef arch_atomic64_fetch_sub_relaxed 1706 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub 1707 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub 1708 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub 1709 #else /* arch_atomic64_fetch_sub_relaxed */ 1710 1711 #ifndef arch_atomic64_fetch_sub_acquire 1712 static __always_inline s64 1713 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) 1714 { 1715 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v); 1716 __atomic_acquire_fence(); 1717 return ret; 1718 } 1719 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire 1720 #endif 1721 1722 #ifndef arch_atomic64_fetch_sub_release 1723 static __always_inline s64 1724 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v) 1725 { 1726 __atomic_release_fence(); 1727 return arch_atomic64_fetch_sub_relaxed(i, v); 1728 } 1729 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release 1730 #endif 1731 1732 #ifndef arch_atomic64_fetch_sub 1733 static __always_inline s64 1734 arch_atomic64_fetch_sub(s64 i, atomic64_t *v) 1735 { 1736 s64 ret; 1737 __atomic_pre_full_fence(); 1738 ret = arch_atomic64_fetch_sub_relaxed(i, v); 1739 __atomic_post_full_fence(); 1740 return ret; 1741 } 1742 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub 1743 #endif 1744 1745 #endif /* arch_atomic64_fetch_sub_relaxed */ 1746 1747 #ifndef arch_atomic64_inc 1748 static __always_inline void 1749 arch_atomic64_inc(atomic64_t *v) 1750 { 1751 arch_atomic64_add(1, v); 1752 } 1753 #define arch_atomic64_inc arch_atomic64_inc 1754 #endif 1755 1756 #ifndef arch_atomic64_inc_return_relaxed 1757 #ifdef arch_atomic64_inc_return 1758 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return 1759 #define arch_atomic64_inc_return_release arch_atomic64_inc_return 1760 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return 1761 #endif /* arch_atomic64_inc_return */ 1762 1763 #ifndef arch_atomic64_inc_return 1764 static __always_inline s64 1765 arch_atomic64_inc_return(atomic64_t *v) 1766 { 1767 return arch_atomic64_add_return(1, v); 1768 } 1769 #define arch_atomic64_inc_return arch_atomic64_inc_return 1770 #endif 1771 1772 #ifndef arch_atomic64_inc_return_acquire 1773 static __always_inline s64 1774 arch_atomic64_inc_return_acquire(atomic64_t *v) 1775 { 1776 return arch_atomic64_add_return_acquire(1, v); 1777 } 1778 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire 1779 #endif 1780 1781 #ifndef arch_atomic64_inc_return_release 1782 static __always_inline s64 1783 arch_atomic64_inc_return_release(atomic64_t *v) 1784 { 1785 return arch_atomic64_add_return_release(1, v); 1786 } 1787 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release 1788 #endif 1789 1790 #ifndef arch_atomic64_inc_return_relaxed 1791 static __always_inline s64 1792 arch_atomic64_inc_return_relaxed(atomic64_t *v) 1793 { 1794 return arch_atomic64_add_return_relaxed(1, v); 1795 } 1796 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed 1797 #endif 1798 1799 #else /* arch_atomic64_inc_return_relaxed */ 1800 1801 #ifndef arch_atomic64_inc_return_acquire 1802 static __always_inline s64 1803 arch_atomic64_inc_return_acquire(atomic64_t *v) 1804 { 1805 s64 ret = arch_atomic64_inc_return_relaxed(v); 1806 __atomic_acquire_fence(); 1807 return ret; 1808 } 1809 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire 1810 #endif 1811 1812 #ifndef arch_atomic64_inc_return_release 1813 static __always_inline s64 1814 arch_atomic64_inc_return_release(atomic64_t *v) 1815 { 1816 __atomic_release_fence(); 1817 return arch_atomic64_inc_return_relaxed(v); 1818 } 1819 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release 1820 #endif 1821 1822 #ifndef arch_atomic64_inc_return 1823 static __always_inline s64 1824 arch_atomic64_inc_return(atomic64_t *v) 1825 { 1826 s64 ret; 1827 __atomic_pre_full_fence(); 1828 ret = arch_atomic64_inc_return_relaxed(v); 1829 __atomic_post_full_fence(); 1830 return ret; 1831 } 1832 #define arch_atomic64_inc_return arch_atomic64_inc_return 1833 #endif 1834 1835 #endif /* arch_atomic64_inc_return_relaxed */ 1836 1837 #ifndef arch_atomic64_fetch_inc_relaxed 1838 #ifdef arch_atomic64_fetch_inc 1839 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc 1840 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc 1841 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc 1842 #endif /* arch_atomic64_fetch_inc */ 1843 1844 #ifndef arch_atomic64_fetch_inc 1845 static __always_inline s64 1846 arch_atomic64_fetch_inc(atomic64_t *v) 1847 { 1848 return arch_atomic64_fetch_add(1, v); 1849 } 1850 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc 1851 #endif 1852 1853 #ifndef arch_atomic64_fetch_inc_acquire 1854 static __always_inline s64 1855 arch_atomic64_fetch_inc_acquire(atomic64_t *v) 1856 { 1857 return arch_atomic64_fetch_add_acquire(1, v); 1858 } 1859 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire 1860 #endif 1861 1862 #ifndef arch_atomic64_fetch_inc_release 1863 static __always_inline s64 1864 arch_atomic64_fetch_inc_release(atomic64_t *v) 1865 { 1866 return arch_atomic64_fetch_add_release(1, v); 1867 } 1868 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release 1869 #endif 1870 1871 #ifndef arch_atomic64_fetch_inc_relaxed 1872 static __always_inline s64 1873 arch_atomic64_fetch_inc_relaxed(atomic64_t *v) 1874 { 1875 return arch_atomic64_fetch_add_relaxed(1, v); 1876 } 1877 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed 1878 #endif 1879 1880 #else /* arch_atomic64_fetch_inc_relaxed */ 1881 1882 #ifndef arch_atomic64_fetch_inc_acquire 1883 static __always_inline s64 1884 arch_atomic64_fetch_inc_acquire(atomic64_t *v) 1885 { 1886 s64 ret = arch_atomic64_fetch_inc_relaxed(v); 1887 __atomic_acquire_fence(); 1888 return ret; 1889 } 1890 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire 1891 #endif 1892 1893 #ifndef arch_atomic64_fetch_inc_release 1894 static __always_inline s64 1895 arch_atomic64_fetch_inc_release(atomic64_t *v) 1896 { 1897 __atomic_release_fence(); 1898 return arch_atomic64_fetch_inc_relaxed(v); 1899 } 1900 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release 1901 #endif 1902 1903 #ifndef arch_atomic64_fetch_inc 1904 static __always_inline s64 1905 arch_atomic64_fetch_inc(atomic64_t *v) 1906 { 1907 s64 ret; 1908 __atomic_pre_full_fence(); 1909 ret = arch_atomic64_fetch_inc_relaxed(v); 1910 __atomic_post_full_fence(); 1911 return ret; 1912 } 1913 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc 1914 #endif 1915 1916 #endif /* arch_atomic64_fetch_inc_relaxed */ 1917 1918 #ifndef arch_atomic64_dec 1919 static __always_inline void 1920 arch_atomic64_dec(atomic64_t *v) 1921 { 1922 arch_atomic64_sub(1, v); 1923 } 1924 #define arch_atomic64_dec arch_atomic64_dec 1925 #endif 1926 1927 #ifndef arch_atomic64_dec_return_relaxed 1928 #ifdef arch_atomic64_dec_return 1929 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return 1930 #define arch_atomic64_dec_return_release arch_atomic64_dec_return 1931 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return 1932 #endif /* arch_atomic64_dec_return */ 1933 1934 #ifndef arch_atomic64_dec_return 1935 static __always_inline s64 1936 arch_atomic64_dec_return(atomic64_t *v) 1937 { 1938 return arch_atomic64_sub_return(1, v); 1939 } 1940 #define arch_atomic64_dec_return arch_atomic64_dec_return 1941 #endif 1942 1943 #ifndef arch_atomic64_dec_return_acquire 1944 static __always_inline s64 1945 arch_atomic64_dec_return_acquire(atomic64_t *v) 1946 { 1947 return arch_atomic64_sub_return_acquire(1, v); 1948 } 1949 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire 1950 #endif 1951 1952 #ifndef arch_atomic64_dec_return_release 1953 static __always_inline s64 1954 arch_atomic64_dec_return_release(atomic64_t *v) 1955 { 1956 return arch_atomic64_sub_return_release(1, v); 1957 } 1958 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release 1959 #endif 1960 1961 #ifndef arch_atomic64_dec_return_relaxed 1962 static __always_inline s64 1963 arch_atomic64_dec_return_relaxed(atomic64_t *v) 1964 { 1965 return arch_atomic64_sub_return_relaxed(1, v); 1966 } 1967 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed 1968 #endif 1969 1970 #else /* arch_atomic64_dec_return_relaxed */ 1971 1972 #ifndef arch_atomic64_dec_return_acquire 1973 static __always_inline s64 1974 arch_atomic64_dec_return_acquire(atomic64_t *v) 1975 { 1976 s64 ret = arch_atomic64_dec_return_relaxed(v); 1977 __atomic_acquire_fence(); 1978 return ret; 1979 } 1980 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire 1981 #endif 1982 1983 #ifndef arch_atomic64_dec_return_release 1984 static __always_inline s64 1985 arch_atomic64_dec_return_release(atomic64_t *v) 1986 { 1987 __atomic_release_fence(); 1988 return arch_atomic64_dec_return_relaxed(v); 1989 } 1990 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release 1991 #endif 1992 1993 #ifndef arch_atomic64_dec_return 1994 static __always_inline s64 1995 arch_atomic64_dec_return(atomic64_t *v) 1996 { 1997 s64 ret; 1998 __atomic_pre_full_fence(); 1999 ret = arch_atomic64_dec_return_relaxed(v); 2000 __atomic_post_full_fence(); 2001 return ret; 2002 } 2003 #define arch_atomic64_dec_return arch_atomic64_dec_return 2004 #endif 2005 2006 #endif /* arch_atomic64_dec_return_relaxed */ 2007 2008 #ifndef arch_atomic64_fetch_dec_relaxed 2009 #ifdef arch_atomic64_fetch_dec 2010 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec 2011 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec 2012 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec 2013 #endif /* arch_atomic64_fetch_dec */ 2014 2015 #ifndef arch_atomic64_fetch_dec 2016 static __always_inline s64 2017 arch_atomic64_fetch_dec(atomic64_t *v) 2018 { 2019 return arch_atomic64_fetch_sub(1, v); 2020 } 2021 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec 2022 #endif 2023 2024 #ifndef arch_atomic64_fetch_dec_acquire 2025 static __always_inline s64 2026 arch_atomic64_fetch_dec_acquire(atomic64_t *v) 2027 { 2028 return arch_atomic64_fetch_sub_acquire(1, v); 2029 } 2030 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire 2031 #endif 2032 2033 #ifndef arch_atomic64_fetch_dec_release 2034 static __always_inline s64 2035 arch_atomic64_fetch_dec_release(atomic64_t *v) 2036 { 2037 return arch_atomic64_fetch_sub_release(1, v); 2038 } 2039 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release 2040 #endif 2041 2042 #ifndef arch_atomic64_fetch_dec_relaxed 2043 static __always_inline s64 2044 arch_atomic64_fetch_dec_relaxed(atomic64_t *v) 2045 { 2046 return arch_atomic64_fetch_sub_relaxed(1, v); 2047 } 2048 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed 2049 #endif 2050 2051 #else /* arch_atomic64_fetch_dec_relaxed */ 2052 2053 #ifndef arch_atomic64_fetch_dec_acquire 2054 static __always_inline s64 2055 arch_atomic64_fetch_dec_acquire(atomic64_t *v) 2056 { 2057 s64 ret = arch_atomic64_fetch_dec_relaxed(v); 2058 __atomic_acquire_fence(); 2059 return ret; 2060 } 2061 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire 2062 #endif 2063 2064 #ifndef arch_atomic64_fetch_dec_release 2065 static __always_inline s64 2066 arch_atomic64_fetch_dec_release(atomic64_t *v) 2067 { 2068 __atomic_release_fence(); 2069 return arch_atomic64_fetch_dec_relaxed(v); 2070 } 2071 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release 2072 #endif 2073 2074 #ifndef arch_atomic64_fetch_dec 2075 static __always_inline s64 2076 arch_atomic64_fetch_dec(atomic64_t *v) 2077 { 2078 s64 ret; 2079 __atomic_pre_full_fence(); 2080 ret = arch_atomic64_fetch_dec_relaxed(v); 2081 __atomic_post_full_fence(); 2082 return ret; 2083 } 2084 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec 2085 #endif 2086 2087 #endif /* arch_atomic64_fetch_dec_relaxed */ 2088 2089 #ifndef arch_atomic64_fetch_and_relaxed 2090 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and 2091 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and 2092 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and 2093 #else /* arch_atomic64_fetch_and_relaxed */ 2094 2095 #ifndef arch_atomic64_fetch_and_acquire 2096 static __always_inline s64 2097 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) 2098 { 2099 s64 ret = arch_atomic64_fetch_and_relaxed(i, v); 2100 __atomic_acquire_fence(); 2101 return ret; 2102 } 2103 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire 2104 #endif 2105 2106 #ifndef arch_atomic64_fetch_and_release 2107 static __always_inline s64 2108 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v) 2109 { 2110 __atomic_release_fence(); 2111 return arch_atomic64_fetch_and_relaxed(i, v); 2112 } 2113 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release 2114 #endif 2115 2116 #ifndef arch_atomic64_fetch_and 2117 static __always_inline s64 2118 arch_atomic64_fetch_and(s64 i, atomic64_t *v) 2119 { 2120 s64 ret; 2121 __atomic_pre_full_fence(); 2122 ret = arch_atomic64_fetch_and_relaxed(i, v); 2123 __atomic_post_full_fence(); 2124 return ret; 2125 } 2126 #define arch_atomic64_fetch_and arch_atomic64_fetch_and 2127 #endif 2128 2129 #endif /* arch_atomic64_fetch_and_relaxed */ 2130 2131 #ifndef arch_atomic64_andnot 2132 static __always_inline void 2133 arch_atomic64_andnot(s64 i, atomic64_t *v) 2134 { 2135 arch_atomic64_and(~i, v); 2136 } 2137 #define arch_atomic64_andnot arch_atomic64_andnot 2138 #endif 2139 2140 #ifndef arch_atomic64_fetch_andnot_relaxed 2141 #ifdef arch_atomic64_fetch_andnot 2142 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot 2143 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot 2144 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot 2145 #endif /* arch_atomic64_fetch_andnot */ 2146 2147 #ifndef arch_atomic64_fetch_andnot 2148 static __always_inline s64 2149 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) 2150 { 2151 return arch_atomic64_fetch_and(~i, v); 2152 } 2153 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot 2154 #endif 2155 2156 #ifndef arch_atomic64_fetch_andnot_acquire 2157 static __always_inline s64 2158 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 2159 { 2160 return arch_atomic64_fetch_and_acquire(~i, v); 2161 } 2162 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire 2163 #endif 2164 2165 #ifndef arch_atomic64_fetch_andnot_release 2166 static __always_inline s64 2167 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 2168 { 2169 return arch_atomic64_fetch_and_release(~i, v); 2170 } 2171 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release 2172 #endif 2173 2174 #ifndef arch_atomic64_fetch_andnot_relaxed 2175 static __always_inline s64 2176 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) 2177 { 2178 return arch_atomic64_fetch_and_relaxed(~i, v); 2179 } 2180 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed 2181 #endif 2182 2183 #else /* arch_atomic64_fetch_andnot_relaxed */ 2184 2185 #ifndef arch_atomic64_fetch_andnot_acquire 2186 static __always_inline s64 2187 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 2188 { 2189 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); 2190 __atomic_acquire_fence(); 2191 return ret; 2192 } 2193 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire 2194 #endif 2195 2196 #ifndef arch_atomic64_fetch_andnot_release 2197 static __always_inline s64 2198 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 2199 { 2200 __atomic_release_fence(); 2201 return arch_atomic64_fetch_andnot_relaxed(i, v); 2202 } 2203 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release 2204 #endif 2205 2206 #ifndef arch_atomic64_fetch_andnot 2207 static __always_inline s64 2208 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) 2209 { 2210 s64 ret; 2211 __atomic_pre_full_fence(); 2212 ret = arch_atomic64_fetch_andnot_relaxed(i, v); 2213 __atomic_post_full_fence(); 2214 return ret; 2215 } 2216 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot 2217 #endif 2218 2219 #endif /* arch_atomic64_fetch_andnot_relaxed */ 2220 2221 #ifndef arch_atomic64_fetch_or_relaxed 2222 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or 2223 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or 2224 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or 2225 #else /* arch_atomic64_fetch_or_relaxed */ 2226 2227 #ifndef arch_atomic64_fetch_or_acquire 2228 static __always_inline s64 2229 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) 2230 { 2231 s64 ret = arch_atomic64_fetch_or_relaxed(i, v); 2232 __atomic_acquire_fence(); 2233 return ret; 2234 } 2235 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire 2236 #endif 2237 2238 #ifndef arch_atomic64_fetch_or_release 2239 static __always_inline s64 2240 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v) 2241 { 2242 __atomic_release_fence(); 2243 return arch_atomic64_fetch_or_relaxed(i, v); 2244 } 2245 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release 2246 #endif 2247 2248 #ifndef arch_atomic64_fetch_or 2249 static __always_inline s64 2250 arch_atomic64_fetch_or(s64 i, atomic64_t *v) 2251 { 2252 s64 ret; 2253 __atomic_pre_full_fence(); 2254 ret = arch_atomic64_fetch_or_relaxed(i, v); 2255 __atomic_post_full_fence(); 2256 return ret; 2257 } 2258 #define arch_atomic64_fetch_or arch_atomic64_fetch_or 2259 #endif 2260 2261 #endif /* arch_atomic64_fetch_or_relaxed */ 2262 2263 #ifndef arch_atomic64_fetch_xor_relaxed 2264 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor 2265 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor 2266 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor 2267 #else /* arch_atomic64_fetch_xor_relaxed */ 2268 2269 #ifndef arch_atomic64_fetch_xor_acquire 2270 static __always_inline s64 2271 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) 2272 { 2273 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v); 2274 __atomic_acquire_fence(); 2275 return ret; 2276 } 2277 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire 2278 #endif 2279 2280 #ifndef arch_atomic64_fetch_xor_release 2281 static __always_inline s64 2282 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v) 2283 { 2284 __atomic_release_fence(); 2285 return arch_atomic64_fetch_xor_relaxed(i, v); 2286 } 2287 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release 2288 #endif 2289 2290 #ifndef arch_atomic64_fetch_xor 2291 static __always_inline s64 2292 arch_atomic64_fetch_xor(s64 i, atomic64_t *v) 2293 { 2294 s64 ret; 2295 __atomic_pre_full_fence(); 2296 ret = arch_atomic64_fetch_xor_relaxed(i, v); 2297 __atomic_post_full_fence(); 2298 return ret; 2299 } 2300 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor 2301 #endif 2302 2303 #endif /* arch_atomic64_fetch_xor_relaxed */ 2304 2305 #ifndef arch_atomic64_xchg_relaxed 2306 #ifdef arch_atomic64_xchg 2307 #define arch_atomic64_xchg_acquire arch_atomic64_xchg 2308 #define arch_atomic64_xchg_release arch_atomic64_xchg 2309 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg 2310 #endif /* arch_atomic64_xchg */ 2311 2312 #ifndef arch_atomic64_xchg 2313 static __always_inline s64 2314 arch_atomic64_xchg(atomic64_t *v, s64 new) 2315 { 2316 return arch_xchg(&v->counter, new); 2317 } 2318 #define arch_atomic64_xchg arch_atomic64_xchg 2319 #endif 2320 2321 #ifndef arch_atomic64_xchg_acquire 2322 static __always_inline s64 2323 arch_atomic64_xchg_acquire(atomic64_t *v, s64 new) 2324 { 2325 return arch_xchg_acquire(&v->counter, new); 2326 } 2327 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire 2328 #endif 2329 2330 #ifndef arch_atomic64_xchg_release 2331 static __always_inline s64 2332 arch_atomic64_xchg_release(atomic64_t *v, s64 new) 2333 { 2334 return arch_xchg_release(&v->counter, new); 2335 } 2336 #define arch_atomic64_xchg_release arch_atomic64_xchg_release 2337 #endif 2338 2339 #ifndef arch_atomic64_xchg_relaxed 2340 static __always_inline s64 2341 arch_atomic64_xchg_relaxed(atomic64_t *v, s64 new) 2342 { 2343 return arch_xchg_relaxed(&v->counter, new); 2344 } 2345 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg_relaxed 2346 #endif 2347 2348 #else /* arch_atomic64_xchg_relaxed */ 2349 2350 #ifndef arch_atomic64_xchg_acquire 2351 static __always_inline s64 2352 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i) 2353 { 2354 s64 ret = arch_atomic64_xchg_relaxed(v, i); 2355 __atomic_acquire_fence(); 2356 return ret; 2357 } 2358 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire 2359 #endif 2360 2361 #ifndef arch_atomic64_xchg_release 2362 static __always_inline s64 2363 arch_atomic64_xchg_release(atomic64_t *v, s64 i) 2364 { 2365 __atomic_release_fence(); 2366 return arch_atomic64_xchg_relaxed(v, i); 2367 } 2368 #define arch_atomic64_xchg_release arch_atomic64_xchg_release 2369 #endif 2370 2371 #ifndef arch_atomic64_xchg 2372 static __always_inline s64 2373 arch_atomic64_xchg(atomic64_t *v, s64 i) 2374 { 2375 s64 ret; 2376 __atomic_pre_full_fence(); 2377 ret = arch_atomic64_xchg_relaxed(v, i); 2378 __atomic_post_full_fence(); 2379 return ret; 2380 } 2381 #define arch_atomic64_xchg arch_atomic64_xchg 2382 #endif 2383 2384 #endif /* arch_atomic64_xchg_relaxed */ 2385 2386 #ifndef arch_atomic64_cmpxchg_relaxed 2387 #ifdef arch_atomic64_cmpxchg 2388 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg 2389 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg 2390 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg 2391 #endif /* arch_atomic64_cmpxchg */ 2392 2393 #ifndef arch_atomic64_cmpxchg 2394 static __always_inline s64 2395 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) 2396 { 2397 return arch_cmpxchg(&v->counter, old, new); 2398 } 2399 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg 2400 #endif 2401 2402 #ifndef arch_atomic64_cmpxchg_acquire 2403 static __always_inline s64 2404 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) 2405 { 2406 return arch_cmpxchg_acquire(&v->counter, old, new); 2407 } 2408 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire 2409 #endif 2410 2411 #ifndef arch_atomic64_cmpxchg_release 2412 static __always_inline s64 2413 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) 2414 { 2415 return arch_cmpxchg_release(&v->counter, old, new); 2416 } 2417 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release 2418 #endif 2419 2420 #ifndef arch_atomic64_cmpxchg_relaxed 2421 static __always_inline s64 2422 arch_atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new) 2423 { 2424 return arch_cmpxchg_relaxed(&v->counter, old, new); 2425 } 2426 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg_relaxed 2427 #endif 2428 2429 #else /* arch_atomic64_cmpxchg_relaxed */ 2430 2431 #ifndef arch_atomic64_cmpxchg_acquire 2432 static __always_inline s64 2433 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) 2434 { 2435 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); 2436 __atomic_acquire_fence(); 2437 return ret; 2438 } 2439 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire 2440 #endif 2441 2442 #ifndef arch_atomic64_cmpxchg_release 2443 static __always_inline s64 2444 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) 2445 { 2446 __atomic_release_fence(); 2447 return arch_atomic64_cmpxchg_relaxed(v, old, new); 2448 } 2449 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release 2450 #endif 2451 2452 #ifndef arch_atomic64_cmpxchg 2453 static __always_inline s64 2454 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) 2455 { 2456 s64 ret; 2457 __atomic_pre_full_fence(); 2458 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); 2459 __atomic_post_full_fence(); 2460 return ret; 2461 } 2462 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg 2463 #endif 2464 2465 #endif /* arch_atomic64_cmpxchg_relaxed */ 2466 2467 #ifndef arch_atomic64_try_cmpxchg_relaxed 2468 #ifdef arch_atomic64_try_cmpxchg 2469 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg 2470 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg 2471 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg 2472 #endif /* arch_atomic64_try_cmpxchg */ 2473 2474 #ifndef arch_atomic64_try_cmpxchg 2475 static __always_inline bool 2476 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2477 { 2478 s64 r, o = *old; 2479 r = arch_atomic64_cmpxchg(v, o, new); 2480 if (unlikely(r != o)) 2481 *old = r; 2482 return likely(r == o); 2483 } 2484 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg 2485 #endif 2486 2487 #ifndef arch_atomic64_try_cmpxchg_acquire 2488 static __always_inline bool 2489 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2490 { 2491 s64 r, o = *old; 2492 r = arch_atomic64_cmpxchg_acquire(v, o, new); 2493 if (unlikely(r != o)) 2494 *old = r; 2495 return likely(r == o); 2496 } 2497 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire 2498 #endif 2499 2500 #ifndef arch_atomic64_try_cmpxchg_release 2501 static __always_inline bool 2502 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2503 { 2504 s64 r, o = *old; 2505 r = arch_atomic64_cmpxchg_release(v, o, new); 2506 if (unlikely(r != o)) 2507 *old = r; 2508 return likely(r == o); 2509 } 2510 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release 2511 #endif 2512 2513 #ifndef arch_atomic64_try_cmpxchg_relaxed 2514 static __always_inline bool 2515 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) 2516 { 2517 s64 r, o = *old; 2518 r = arch_atomic64_cmpxchg_relaxed(v, o, new); 2519 if (unlikely(r != o)) 2520 *old = r; 2521 return likely(r == o); 2522 } 2523 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed 2524 #endif 2525 2526 #else /* arch_atomic64_try_cmpxchg_relaxed */ 2527 2528 #ifndef arch_atomic64_try_cmpxchg_acquire 2529 static __always_inline bool 2530 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2531 { 2532 bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2533 __atomic_acquire_fence(); 2534 return ret; 2535 } 2536 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire 2537 #endif 2538 2539 #ifndef arch_atomic64_try_cmpxchg_release 2540 static __always_inline bool 2541 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2542 { 2543 __atomic_release_fence(); 2544 return arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2545 } 2546 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release 2547 #endif 2548 2549 #ifndef arch_atomic64_try_cmpxchg 2550 static __always_inline bool 2551 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2552 { 2553 bool ret; 2554 __atomic_pre_full_fence(); 2555 ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2556 __atomic_post_full_fence(); 2557 return ret; 2558 } 2559 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg 2560 #endif 2561 2562 #endif /* arch_atomic64_try_cmpxchg_relaxed */ 2563 2564 #ifndef arch_atomic64_sub_and_test 2565 static __always_inline bool 2566 arch_atomic64_sub_and_test(s64 i, atomic64_t *v) 2567 { 2568 return arch_atomic64_sub_return(i, v) == 0; 2569 } 2570 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test 2571 #endif 2572 2573 #ifndef arch_atomic64_dec_and_test 2574 static __always_inline bool 2575 arch_atomic64_dec_and_test(atomic64_t *v) 2576 { 2577 return arch_atomic64_dec_return(v) == 0; 2578 } 2579 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test 2580 #endif 2581 2582 #ifndef arch_atomic64_inc_and_test 2583 static __always_inline bool 2584 arch_atomic64_inc_and_test(atomic64_t *v) 2585 { 2586 return arch_atomic64_inc_return(v) == 0; 2587 } 2588 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test 2589 #endif 2590 2591 #ifndef arch_atomic64_add_negative_relaxed 2592 #ifdef arch_atomic64_add_negative 2593 #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative 2594 #define arch_atomic64_add_negative_release arch_atomic64_add_negative 2595 #define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative 2596 #endif /* arch_atomic64_add_negative */ 2597 2598 #ifndef arch_atomic64_add_negative 2599 static __always_inline bool 2600 arch_atomic64_add_negative(s64 i, atomic64_t *v) 2601 { 2602 return arch_atomic64_add_return(i, v) < 0; 2603 } 2604 #define arch_atomic64_add_negative arch_atomic64_add_negative 2605 #endif 2606 2607 #ifndef arch_atomic64_add_negative_acquire 2608 static __always_inline bool 2609 arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) 2610 { 2611 return arch_atomic64_add_return_acquire(i, v) < 0; 2612 } 2613 #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire 2614 #endif 2615 2616 #ifndef arch_atomic64_add_negative_release 2617 static __always_inline bool 2618 arch_atomic64_add_negative_release(s64 i, atomic64_t *v) 2619 { 2620 return arch_atomic64_add_return_release(i, v) < 0; 2621 } 2622 #define arch_atomic64_add_negative_release arch_atomic64_add_negative_release 2623 #endif 2624 2625 #ifndef arch_atomic64_add_negative_relaxed 2626 static __always_inline bool 2627 arch_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) 2628 { 2629 return arch_atomic64_add_return_relaxed(i, v) < 0; 2630 } 2631 #define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed 2632 #endif 2633 2634 #else /* arch_atomic64_add_negative_relaxed */ 2635 2636 #ifndef arch_atomic64_add_negative_acquire 2637 static __always_inline bool 2638 arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) 2639 { 2640 bool ret = arch_atomic64_add_negative_relaxed(i, v); 2641 __atomic_acquire_fence(); 2642 return ret; 2643 } 2644 #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire 2645 #endif 2646 2647 #ifndef arch_atomic64_add_negative_release 2648 static __always_inline bool 2649 arch_atomic64_add_negative_release(s64 i, atomic64_t *v) 2650 { 2651 __atomic_release_fence(); 2652 return arch_atomic64_add_negative_relaxed(i, v); 2653 } 2654 #define arch_atomic64_add_negative_release arch_atomic64_add_negative_release 2655 #endif 2656 2657 #ifndef arch_atomic64_add_negative 2658 static __always_inline bool 2659 arch_atomic64_add_negative(s64 i, atomic64_t *v) 2660 { 2661 bool ret; 2662 __atomic_pre_full_fence(); 2663 ret = arch_atomic64_add_negative_relaxed(i, v); 2664 __atomic_post_full_fence(); 2665 return ret; 2666 } 2667 #define arch_atomic64_add_negative arch_atomic64_add_negative 2668 #endif 2669 2670 #endif /* arch_atomic64_add_negative_relaxed */ 2671 2672 #ifndef arch_atomic64_fetch_add_unless 2673 static __always_inline s64 2674 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) 2675 { 2676 s64 c = arch_atomic64_read(v); 2677 2678 do { 2679 if (unlikely(c == u)) 2680 break; 2681 } while (!arch_atomic64_try_cmpxchg(v, &c, c + a)); 2682 2683 return c; 2684 } 2685 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless 2686 #endif 2687 2688 #ifndef arch_atomic64_add_unless 2689 static __always_inline bool 2690 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) 2691 { 2692 return arch_atomic64_fetch_add_unless(v, a, u) != u; 2693 } 2694 #define arch_atomic64_add_unless arch_atomic64_add_unless 2695 #endif 2696 2697 #ifndef arch_atomic64_inc_not_zero 2698 static __always_inline bool 2699 arch_atomic64_inc_not_zero(atomic64_t *v) 2700 { 2701 return arch_atomic64_add_unless(v, 1, 0); 2702 } 2703 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero 2704 #endif 2705 2706 #ifndef arch_atomic64_inc_unless_negative 2707 static __always_inline bool 2708 arch_atomic64_inc_unless_negative(atomic64_t *v) 2709 { 2710 s64 c = arch_atomic64_read(v); 2711 2712 do { 2713 if (unlikely(c < 0)) 2714 return false; 2715 } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1)); 2716 2717 return true; 2718 } 2719 #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative 2720 #endif 2721 2722 #ifndef arch_atomic64_dec_unless_positive 2723 static __always_inline bool 2724 arch_atomic64_dec_unless_positive(atomic64_t *v) 2725 { 2726 s64 c = arch_atomic64_read(v); 2727 2728 do { 2729 if (unlikely(c > 0)) 2730 return false; 2731 } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1)); 2732 2733 return true; 2734 } 2735 #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive 2736 #endif 2737 2738 #ifndef arch_atomic64_dec_if_positive 2739 static __always_inline s64 2740 arch_atomic64_dec_if_positive(atomic64_t *v) 2741 { 2742 s64 dec, c = arch_atomic64_read(v); 2743 2744 do { 2745 dec = c - 1; 2746 if (unlikely(dec < 0)) 2747 break; 2748 } while (!arch_atomic64_try_cmpxchg(v, &c, dec)); 2749 2750 return dec; 2751 } 2752 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive 2753 #endif 2754 2755 #endif /* _LINUX_ATOMIC_FALLBACK_H */ 2756 // e1cee558cc61cae887890db30fcdf93baca9f498 2757