1 // SPDX-License-Identifier: GPL-2.0 2 3 // Generated by scripts/atomic/gen-atomic-fallback.sh 4 // DO NOT MODIFY THIS FILE DIRECTLY 5 6 #ifndef _LINUX_ATOMIC_FALLBACK_H 7 #define _LINUX_ATOMIC_FALLBACK_H 8 9 #include <linux/compiler.h> 10 11 #ifndef arch_xchg_relaxed 12 #define arch_xchg_acquire arch_xchg 13 #define arch_xchg_release arch_xchg 14 #define arch_xchg_relaxed arch_xchg 15 #else /* arch_xchg_relaxed */ 16 17 #ifndef arch_xchg_acquire 18 #define arch_xchg_acquire(...) \ 19 __atomic_op_acquire(arch_xchg, __VA_ARGS__) 20 #endif 21 22 #ifndef arch_xchg_release 23 #define arch_xchg_release(...) \ 24 __atomic_op_release(arch_xchg, __VA_ARGS__) 25 #endif 26 27 #ifndef arch_xchg 28 #define arch_xchg(...) \ 29 __atomic_op_fence(arch_xchg, __VA_ARGS__) 30 #endif 31 32 #endif /* arch_xchg_relaxed */ 33 34 #ifndef arch_cmpxchg_relaxed 35 #define arch_cmpxchg_acquire arch_cmpxchg 36 #define arch_cmpxchg_release arch_cmpxchg 37 #define arch_cmpxchg_relaxed arch_cmpxchg 38 #else /* arch_cmpxchg_relaxed */ 39 40 #ifndef arch_cmpxchg_acquire 41 #define arch_cmpxchg_acquire(...) \ 42 __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__) 43 #endif 44 45 #ifndef arch_cmpxchg_release 46 #define arch_cmpxchg_release(...) \ 47 __atomic_op_release(arch_cmpxchg, __VA_ARGS__) 48 #endif 49 50 #ifndef arch_cmpxchg 51 #define arch_cmpxchg(...) \ 52 __atomic_op_fence(arch_cmpxchg, __VA_ARGS__) 53 #endif 54 55 #endif /* arch_cmpxchg_relaxed */ 56 57 #ifndef arch_cmpxchg64_relaxed 58 #define arch_cmpxchg64_acquire arch_cmpxchg64 59 #define arch_cmpxchg64_release arch_cmpxchg64 60 #define arch_cmpxchg64_relaxed arch_cmpxchg64 61 #else /* arch_cmpxchg64_relaxed */ 62 63 #ifndef arch_cmpxchg64_acquire 64 #define arch_cmpxchg64_acquire(...) \ 65 __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__) 66 #endif 67 68 #ifndef arch_cmpxchg64_release 69 #define arch_cmpxchg64_release(...) \ 70 __atomic_op_release(arch_cmpxchg64, __VA_ARGS__) 71 #endif 72 73 #ifndef arch_cmpxchg64 74 #define arch_cmpxchg64(...) \ 75 __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__) 76 #endif 77 78 #endif /* arch_cmpxchg64_relaxed */ 79 80 #ifndef arch_cmpxchg128_relaxed 81 #define arch_cmpxchg128_acquire arch_cmpxchg128 82 #define arch_cmpxchg128_release arch_cmpxchg128 83 #define arch_cmpxchg128_relaxed arch_cmpxchg128 84 #else /* arch_cmpxchg128_relaxed */ 85 86 #ifndef arch_cmpxchg128_acquire 87 #define arch_cmpxchg128_acquire(...) \ 88 __atomic_op_acquire(arch_cmpxchg128, __VA_ARGS__) 89 #endif 90 91 #ifndef arch_cmpxchg128_release 92 #define arch_cmpxchg128_release(...) \ 93 __atomic_op_release(arch_cmpxchg128, __VA_ARGS__) 94 #endif 95 96 #ifndef arch_cmpxchg128 97 #define arch_cmpxchg128(...) \ 98 __atomic_op_fence(arch_cmpxchg128, __VA_ARGS__) 99 #endif 100 101 #endif /* arch_cmpxchg128_relaxed */ 102 103 #ifndef arch_try_cmpxchg_relaxed 104 #ifdef arch_try_cmpxchg 105 #define arch_try_cmpxchg_acquire arch_try_cmpxchg 106 #define arch_try_cmpxchg_release arch_try_cmpxchg 107 #define arch_try_cmpxchg_relaxed arch_try_cmpxchg 108 #endif /* arch_try_cmpxchg */ 109 110 #ifndef arch_try_cmpxchg 111 #define arch_try_cmpxchg(_ptr, _oldp, _new) \ 112 ({ \ 113 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 114 ___r = arch_cmpxchg((_ptr), ___o, (_new)); \ 115 if (unlikely(___r != ___o)) \ 116 *___op = ___r; \ 117 likely(___r == ___o); \ 118 }) 119 #endif /* arch_try_cmpxchg */ 120 121 #ifndef arch_try_cmpxchg_acquire 122 #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \ 123 ({ \ 124 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 125 ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \ 126 if (unlikely(___r != ___o)) \ 127 *___op = ___r; \ 128 likely(___r == ___o); \ 129 }) 130 #endif /* arch_try_cmpxchg_acquire */ 131 132 #ifndef arch_try_cmpxchg_release 133 #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \ 134 ({ \ 135 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 136 ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \ 137 if (unlikely(___r != ___o)) \ 138 *___op = ___r; \ 139 likely(___r == ___o); \ 140 }) 141 #endif /* arch_try_cmpxchg_release */ 142 143 #ifndef arch_try_cmpxchg_relaxed 144 #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \ 145 ({ \ 146 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 147 ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \ 148 if (unlikely(___r != ___o)) \ 149 *___op = ___r; \ 150 likely(___r == ___o); \ 151 }) 152 #endif /* arch_try_cmpxchg_relaxed */ 153 154 #else /* arch_try_cmpxchg_relaxed */ 155 156 #ifndef arch_try_cmpxchg_acquire 157 #define arch_try_cmpxchg_acquire(...) \ 158 __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__) 159 #endif 160 161 #ifndef arch_try_cmpxchg_release 162 #define arch_try_cmpxchg_release(...) \ 163 __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__) 164 #endif 165 166 #ifndef arch_try_cmpxchg 167 #define arch_try_cmpxchg(...) \ 168 __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__) 169 #endif 170 171 #endif /* arch_try_cmpxchg_relaxed */ 172 173 #ifndef arch_try_cmpxchg64_relaxed 174 #ifdef arch_try_cmpxchg64 175 #define arch_try_cmpxchg64_acquire arch_try_cmpxchg64 176 #define arch_try_cmpxchg64_release arch_try_cmpxchg64 177 #define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64 178 #endif /* arch_try_cmpxchg64 */ 179 180 #ifndef arch_try_cmpxchg64 181 #define arch_try_cmpxchg64(_ptr, _oldp, _new) \ 182 ({ \ 183 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 184 ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \ 185 if (unlikely(___r != ___o)) \ 186 *___op = ___r; \ 187 likely(___r == ___o); \ 188 }) 189 #endif /* arch_try_cmpxchg64 */ 190 191 #ifndef arch_try_cmpxchg64_acquire 192 #define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \ 193 ({ \ 194 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 195 ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \ 196 if (unlikely(___r != ___o)) \ 197 *___op = ___r; \ 198 likely(___r == ___o); \ 199 }) 200 #endif /* arch_try_cmpxchg64_acquire */ 201 202 #ifndef arch_try_cmpxchg64_release 203 #define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \ 204 ({ \ 205 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 206 ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \ 207 if (unlikely(___r != ___o)) \ 208 *___op = ___r; \ 209 likely(___r == ___o); \ 210 }) 211 #endif /* arch_try_cmpxchg64_release */ 212 213 #ifndef arch_try_cmpxchg64_relaxed 214 #define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \ 215 ({ \ 216 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 217 ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \ 218 if (unlikely(___r != ___o)) \ 219 *___op = ___r; \ 220 likely(___r == ___o); \ 221 }) 222 #endif /* arch_try_cmpxchg64_relaxed */ 223 224 #else /* arch_try_cmpxchg64_relaxed */ 225 226 #ifndef arch_try_cmpxchg64_acquire 227 #define arch_try_cmpxchg64_acquire(...) \ 228 __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__) 229 #endif 230 231 #ifndef arch_try_cmpxchg64_release 232 #define arch_try_cmpxchg64_release(...) \ 233 __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__) 234 #endif 235 236 #ifndef arch_try_cmpxchg64 237 #define arch_try_cmpxchg64(...) \ 238 __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__) 239 #endif 240 241 #endif /* arch_try_cmpxchg64_relaxed */ 242 243 #ifndef arch_try_cmpxchg128_relaxed 244 #ifdef arch_try_cmpxchg128 245 #define arch_try_cmpxchg128_acquire arch_try_cmpxchg128 246 #define arch_try_cmpxchg128_release arch_try_cmpxchg128 247 #define arch_try_cmpxchg128_relaxed arch_try_cmpxchg128 248 #endif /* arch_try_cmpxchg128 */ 249 250 #ifndef arch_try_cmpxchg128 251 #define arch_try_cmpxchg128(_ptr, _oldp, _new) \ 252 ({ \ 253 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 254 ___r = arch_cmpxchg128((_ptr), ___o, (_new)); \ 255 if (unlikely(___r != ___o)) \ 256 *___op = ___r; \ 257 likely(___r == ___o); \ 258 }) 259 #endif /* arch_try_cmpxchg128 */ 260 261 #ifndef arch_try_cmpxchg128_acquire 262 #define arch_try_cmpxchg128_acquire(_ptr, _oldp, _new) \ 263 ({ \ 264 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 265 ___r = arch_cmpxchg128_acquire((_ptr), ___o, (_new)); \ 266 if (unlikely(___r != ___o)) \ 267 *___op = ___r; \ 268 likely(___r == ___o); \ 269 }) 270 #endif /* arch_try_cmpxchg128_acquire */ 271 272 #ifndef arch_try_cmpxchg128_release 273 #define arch_try_cmpxchg128_release(_ptr, _oldp, _new) \ 274 ({ \ 275 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 276 ___r = arch_cmpxchg128_release((_ptr), ___o, (_new)); \ 277 if (unlikely(___r != ___o)) \ 278 *___op = ___r; \ 279 likely(___r == ___o); \ 280 }) 281 #endif /* arch_try_cmpxchg128_release */ 282 283 #ifndef arch_try_cmpxchg128_relaxed 284 #define arch_try_cmpxchg128_relaxed(_ptr, _oldp, _new) \ 285 ({ \ 286 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 287 ___r = arch_cmpxchg128_relaxed((_ptr), ___o, (_new)); \ 288 if (unlikely(___r != ___o)) \ 289 *___op = ___r; \ 290 likely(___r == ___o); \ 291 }) 292 #endif /* arch_try_cmpxchg128_relaxed */ 293 294 #else /* arch_try_cmpxchg128_relaxed */ 295 296 #ifndef arch_try_cmpxchg128_acquire 297 #define arch_try_cmpxchg128_acquire(...) \ 298 __atomic_op_acquire(arch_try_cmpxchg128, __VA_ARGS__) 299 #endif 300 301 #ifndef arch_try_cmpxchg128_release 302 #define arch_try_cmpxchg128_release(...) \ 303 __atomic_op_release(arch_try_cmpxchg128, __VA_ARGS__) 304 #endif 305 306 #ifndef arch_try_cmpxchg128 307 #define arch_try_cmpxchg128(...) \ 308 __atomic_op_fence(arch_try_cmpxchg128, __VA_ARGS__) 309 #endif 310 311 #endif /* arch_try_cmpxchg128_relaxed */ 312 313 #ifndef arch_try_cmpxchg_local 314 #define arch_try_cmpxchg_local(_ptr, _oldp, _new) \ 315 ({ \ 316 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 317 ___r = arch_cmpxchg_local((_ptr), ___o, (_new)); \ 318 if (unlikely(___r != ___o)) \ 319 *___op = ___r; \ 320 likely(___r == ___o); \ 321 }) 322 #endif /* arch_try_cmpxchg_local */ 323 324 #ifndef arch_try_cmpxchg64_local 325 #define arch_try_cmpxchg64_local(_ptr, _oldp, _new) \ 326 ({ \ 327 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \ 328 ___r = arch_cmpxchg64_local((_ptr), ___o, (_new)); \ 329 if (unlikely(___r != ___o)) \ 330 *___op = ___r; \ 331 likely(___r == ___o); \ 332 }) 333 #endif /* arch_try_cmpxchg64_local */ 334 335 #ifndef arch_atomic_read_acquire 336 static __always_inline int 337 arch_atomic_read_acquire(const atomic_t *v) 338 { 339 int ret; 340 341 if (__native_word(atomic_t)) { 342 ret = smp_load_acquire(&(v)->counter); 343 } else { 344 ret = arch_atomic_read(v); 345 __atomic_acquire_fence(); 346 } 347 348 return ret; 349 } 350 #define arch_atomic_read_acquire arch_atomic_read_acquire 351 #endif 352 353 #ifndef arch_atomic_set_release 354 static __always_inline void 355 arch_atomic_set_release(atomic_t *v, int i) 356 { 357 if (__native_word(atomic_t)) { 358 smp_store_release(&(v)->counter, i); 359 } else { 360 __atomic_release_fence(); 361 arch_atomic_set(v, i); 362 } 363 } 364 #define arch_atomic_set_release arch_atomic_set_release 365 #endif 366 367 #ifndef arch_atomic_add_return_relaxed 368 #define arch_atomic_add_return_acquire arch_atomic_add_return 369 #define arch_atomic_add_return_release arch_atomic_add_return 370 #define arch_atomic_add_return_relaxed arch_atomic_add_return 371 #else /* arch_atomic_add_return_relaxed */ 372 373 #ifndef arch_atomic_add_return_acquire 374 static __always_inline int 375 arch_atomic_add_return_acquire(int i, atomic_t *v) 376 { 377 int ret = arch_atomic_add_return_relaxed(i, v); 378 __atomic_acquire_fence(); 379 return ret; 380 } 381 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire 382 #endif 383 384 #ifndef arch_atomic_add_return_release 385 static __always_inline int 386 arch_atomic_add_return_release(int i, atomic_t *v) 387 { 388 __atomic_release_fence(); 389 return arch_atomic_add_return_relaxed(i, v); 390 } 391 #define arch_atomic_add_return_release arch_atomic_add_return_release 392 #endif 393 394 #ifndef arch_atomic_add_return 395 static __always_inline int 396 arch_atomic_add_return(int i, atomic_t *v) 397 { 398 int ret; 399 __atomic_pre_full_fence(); 400 ret = arch_atomic_add_return_relaxed(i, v); 401 __atomic_post_full_fence(); 402 return ret; 403 } 404 #define arch_atomic_add_return arch_atomic_add_return 405 #endif 406 407 #endif /* arch_atomic_add_return_relaxed */ 408 409 #ifndef arch_atomic_fetch_add_relaxed 410 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add 411 #define arch_atomic_fetch_add_release arch_atomic_fetch_add 412 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add 413 #else /* arch_atomic_fetch_add_relaxed */ 414 415 #ifndef arch_atomic_fetch_add_acquire 416 static __always_inline int 417 arch_atomic_fetch_add_acquire(int i, atomic_t *v) 418 { 419 int ret = arch_atomic_fetch_add_relaxed(i, v); 420 __atomic_acquire_fence(); 421 return ret; 422 } 423 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire 424 #endif 425 426 #ifndef arch_atomic_fetch_add_release 427 static __always_inline int 428 arch_atomic_fetch_add_release(int i, atomic_t *v) 429 { 430 __atomic_release_fence(); 431 return arch_atomic_fetch_add_relaxed(i, v); 432 } 433 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release 434 #endif 435 436 #ifndef arch_atomic_fetch_add 437 static __always_inline int 438 arch_atomic_fetch_add(int i, atomic_t *v) 439 { 440 int ret; 441 __atomic_pre_full_fence(); 442 ret = arch_atomic_fetch_add_relaxed(i, v); 443 __atomic_post_full_fence(); 444 return ret; 445 } 446 #define arch_atomic_fetch_add arch_atomic_fetch_add 447 #endif 448 449 #endif /* arch_atomic_fetch_add_relaxed */ 450 451 #ifndef arch_atomic_sub_return_relaxed 452 #define arch_atomic_sub_return_acquire arch_atomic_sub_return 453 #define arch_atomic_sub_return_release arch_atomic_sub_return 454 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return 455 #else /* arch_atomic_sub_return_relaxed */ 456 457 #ifndef arch_atomic_sub_return_acquire 458 static __always_inline int 459 arch_atomic_sub_return_acquire(int i, atomic_t *v) 460 { 461 int ret = arch_atomic_sub_return_relaxed(i, v); 462 __atomic_acquire_fence(); 463 return ret; 464 } 465 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire 466 #endif 467 468 #ifndef arch_atomic_sub_return_release 469 static __always_inline int 470 arch_atomic_sub_return_release(int i, atomic_t *v) 471 { 472 __atomic_release_fence(); 473 return arch_atomic_sub_return_relaxed(i, v); 474 } 475 #define arch_atomic_sub_return_release arch_atomic_sub_return_release 476 #endif 477 478 #ifndef arch_atomic_sub_return 479 static __always_inline int 480 arch_atomic_sub_return(int i, atomic_t *v) 481 { 482 int ret; 483 __atomic_pre_full_fence(); 484 ret = arch_atomic_sub_return_relaxed(i, v); 485 __atomic_post_full_fence(); 486 return ret; 487 } 488 #define arch_atomic_sub_return arch_atomic_sub_return 489 #endif 490 491 #endif /* arch_atomic_sub_return_relaxed */ 492 493 #ifndef arch_atomic_fetch_sub_relaxed 494 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub 495 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub 496 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub 497 #else /* arch_atomic_fetch_sub_relaxed */ 498 499 #ifndef arch_atomic_fetch_sub_acquire 500 static __always_inline int 501 arch_atomic_fetch_sub_acquire(int i, atomic_t *v) 502 { 503 int ret = arch_atomic_fetch_sub_relaxed(i, v); 504 __atomic_acquire_fence(); 505 return ret; 506 } 507 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire 508 #endif 509 510 #ifndef arch_atomic_fetch_sub_release 511 static __always_inline int 512 arch_atomic_fetch_sub_release(int i, atomic_t *v) 513 { 514 __atomic_release_fence(); 515 return arch_atomic_fetch_sub_relaxed(i, v); 516 } 517 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release 518 #endif 519 520 #ifndef arch_atomic_fetch_sub 521 static __always_inline int 522 arch_atomic_fetch_sub(int i, atomic_t *v) 523 { 524 int ret; 525 __atomic_pre_full_fence(); 526 ret = arch_atomic_fetch_sub_relaxed(i, v); 527 __atomic_post_full_fence(); 528 return ret; 529 } 530 #define arch_atomic_fetch_sub arch_atomic_fetch_sub 531 #endif 532 533 #endif /* arch_atomic_fetch_sub_relaxed */ 534 535 #ifndef arch_atomic_inc 536 static __always_inline void 537 arch_atomic_inc(atomic_t *v) 538 { 539 arch_atomic_add(1, v); 540 } 541 #define arch_atomic_inc arch_atomic_inc 542 #endif 543 544 #ifndef arch_atomic_inc_return_relaxed 545 #ifdef arch_atomic_inc_return 546 #define arch_atomic_inc_return_acquire arch_atomic_inc_return 547 #define arch_atomic_inc_return_release arch_atomic_inc_return 548 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return 549 #endif /* arch_atomic_inc_return */ 550 551 #ifndef arch_atomic_inc_return 552 static __always_inline int 553 arch_atomic_inc_return(atomic_t *v) 554 { 555 return arch_atomic_add_return(1, v); 556 } 557 #define arch_atomic_inc_return arch_atomic_inc_return 558 #endif 559 560 #ifndef arch_atomic_inc_return_acquire 561 static __always_inline int 562 arch_atomic_inc_return_acquire(atomic_t *v) 563 { 564 return arch_atomic_add_return_acquire(1, v); 565 } 566 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire 567 #endif 568 569 #ifndef arch_atomic_inc_return_release 570 static __always_inline int 571 arch_atomic_inc_return_release(atomic_t *v) 572 { 573 return arch_atomic_add_return_release(1, v); 574 } 575 #define arch_atomic_inc_return_release arch_atomic_inc_return_release 576 #endif 577 578 #ifndef arch_atomic_inc_return_relaxed 579 static __always_inline int 580 arch_atomic_inc_return_relaxed(atomic_t *v) 581 { 582 return arch_atomic_add_return_relaxed(1, v); 583 } 584 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed 585 #endif 586 587 #else /* arch_atomic_inc_return_relaxed */ 588 589 #ifndef arch_atomic_inc_return_acquire 590 static __always_inline int 591 arch_atomic_inc_return_acquire(atomic_t *v) 592 { 593 int ret = arch_atomic_inc_return_relaxed(v); 594 __atomic_acquire_fence(); 595 return ret; 596 } 597 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire 598 #endif 599 600 #ifndef arch_atomic_inc_return_release 601 static __always_inline int 602 arch_atomic_inc_return_release(atomic_t *v) 603 { 604 __atomic_release_fence(); 605 return arch_atomic_inc_return_relaxed(v); 606 } 607 #define arch_atomic_inc_return_release arch_atomic_inc_return_release 608 #endif 609 610 #ifndef arch_atomic_inc_return 611 static __always_inline int 612 arch_atomic_inc_return(atomic_t *v) 613 { 614 int ret; 615 __atomic_pre_full_fence(); 616 ret = arch_atomic_inc_return_relaxed(v); 617 __atomic_post_full_fence(); 618 return ret; 619 } 620 #define arch_atomic_inc_return arch_atomic_inc_return 621 #endif 622 623 #endif /* arch_atomic_inc_return_relaxed */ 624 625 #ifndef arch_atomic_fetch_inc_relaxed 626 #ifdef arch_atomic_fetch_inc 627 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc 628 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc 629 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc 630 #endif /* arch_atomic_fetch_inc */ 631 632 #ifndef arch_atomic_fetch_inc 633 static __always_inline int 634 arch_atomic_fetch_inc(atomic_t *v) 635 { 636 return arch_atomic_fetch_add(1, v); 637 } 638 #define arch_atomic_fetch_inc arch_atomic_fetch_inc 639 #endif 640 641 #ifndef arch_atomic_fetch_inc_acquire 642 static __always_inline int 643 arch_atomic_fetch_inc_acquire(atomic_t *v) 644 { 645 return arch_atomic_fetch_add_acquire(1, v); 646 } 647 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire 648 #endif 649 650 #ifndef arch_atomic_fetch_inc_release 651 static __always_inline int 652 arch_atomic_fetch_inc_release(atomic_t *v) 653 { 654 return arch_atomic_fetch_add_release(1, v); 655 } 656 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release 657 #endif 658 659 #ifndef arch_atomic_fetch_inc_relaxed 660 static __always_inline int 661 arch_atomic_fetch_inc_relaxed(atomic_t *v) 662 { 663 return arch_atomic_fetch_add_relaxed(1, v); 664 } 665 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed 666 #endif 667 668 #else /* arch_atomic_fetch_inc_relaxed */ 669 670 #ifndef arch_atomic_fetch_inc_acquire 671 static __always_inline int 672 arch_atomic_fetch_inc_acquire(atomic_t *v) 673 { 674 int ret = arch_atomic_fetch_inc_relaxed(v); 675 __atomic_acquire_fence(); 676 return ret; 677 } 678 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire 679 #endif 680 681 #ifndef arch_atomic_fetch_inc_release 682 static __always_inline int 683 arch_atomic_fetch_inc_release(atomic_t *v) 684 { 685 __atomic_release_fence(); 686 return arch_atomic_fetch_inc_relaxed(v); 687 } 688 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release 689 #endif 690 691 #ifndef arch_atomic_fetch_inc 692 static __always_inline int 693 arch_atomic_fetch_inc(atomic_t *v) 694 { 695 int ret; 696 __atomic_pre_full_fence(); 697 ret = arch_atomic_fetch_inc_relaxed(v); 698 __atomic_post_full_fence(); 699 return ret; 700 } 701 #define arch_atomic_fetch_inc arch_atomic_fetch_inc 702 #endif 703 704 #endif /* arch_atomic_fetch_inc_relaxed */ 705 706 #ifndef arch_atomic_dec 707 static __always_inline void 708 arch_atomic_dec(atomic_t *v) 709 { 710 arch_atomic_sub(1, v); 711 } 712 #define arch_atomic_dec arch_atomic_dec 713 #endif 714 715 #ifndef arch_atomic_dec_return_relaxed 716 #ifdef arch_atomic_dec_return 717 #define arch_atomic_dec_return_acquire arch_atomic_dec_return 718 #define arch_atomic_dec_return_release arch_atomic_dec_return 719 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return 720 #endif /* arch_atomic_dec_return */ 721 722 #ifndef arch_atomic_dec_return 723 static __always_inline int 724 arch_atomic_dec_return(atomic_t *v) 725 { 726 return arch_atomic_sub_return(1, v); 727 } 728 #define arch_atomic_dec_return arch_atomic_dec_return 729 #endif 730 731 #ifndef arch_atomic_dec_return_acquire 732 static __always_inline int 733 arch_atomic_dec_return_acquire(atomic_t *v) 734 { 735 return arch_atomic_sub_return_acquire(1, v); 736 } 737 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire 738 #endif 739 740 #ifndef arch_atomic_dec_return_release 741 static __always_inline int 742 arch_atomic_dec_return_release(atomic_t *v) 743 { 744 return arch_atomic_sub_return_release(1, v); 745 } 746 #define arch_atomic_dec_return_release arch_atomic_dec_return_release 747 #endif 748 749 #ifndef arch_atomic_dec_return_relaxed 750 static __always_inline int 751 arch_atomic_dec_return_relaxed(atomic_t *v) 752 { 753 return arch_atomic_sub_return_relaxed(1, v); 754 } 755 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed 756 #endif 757 758 #else /* arch_atomic_dec_return_relaxed */ 759 760 #ifndef arch_atomic_dec_return_acquire 761 static __always_inline int 762 arch_atomic_dec_return_acquire(atomic_t *v) 763 { 764 int ret = arch_atomic_dec_return_relaxed(v); 765 __atomic_acquire_fence(); 766 return ret; 767 } 768 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire 769 #endif 770 771 #ifndef arch_atomic_dec_return_release 772 static __always_inline int 773 arch_atomic_dec_return_release(atomic_t *v) 774 { 775 __atomic_release_fence(); 776 return arch_atomic_dec_return_relaxed(v); 777 } 778 #define arch_atomic_dec_return_release arch_atomic_dec_return_release 779 #endif 780 781 #ifndef arch_atomic_dec_return 782 static __always_inline int 783 arch_atomic_dec_return(atomic_t *v) 784 { 785 int ret; 786 __atomic_pre_full_fence(); 787 ret = arch_atomic_dec_return_relaxed(v); 788 __atomic_post_full_fence(); 789 return ret; 790 } 791 #define arch_atomic_dec_return arch_atomic_dec_return 792 #endif 793 794 #endif /* arch_atomic_dec_return_relaxed */ 795 796 #ifndef arch_atomic_fetch_dec_relaxed 797 #ifdef arch_atomic_fetch_dec 798 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec 799 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec 800 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec 801 #endif /* arch_atomic_fetch_dec */ 802 803 #ifndef arch_atomic_fetch_dec 804 static __always_inline int 805 arch_atomic_fetch_dec(atomic_t *v) 806 { 807 return arch_atomic_fetch_sub(1, v); 808 } 809 #define arch_atomic_fetch_dec arch_atomic_fetch_dec 810 #endif 811 812 #ifndef arch_atomic_fetch_dec_acquire 813 static __always_inline int 814 arch_atomic_fetch_dec_acquire(atomic_t *v) 815 { 816 return arch_atomic_fetch_sub_acquire(1, v); 817 } 818 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire 819 #endif 820 821 #ifndef arch_atomic_fetch_dec_release 822 static __always_inline int 823 arch_atomic_fetch_dec_release(atomic_t *v) 824 { 825 return arch_atomic_fetch_sub_release(1, v); 826 } 827 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release 828 #endif 829 830 #ifndef arch_atomic_fetch_dec_relaxed 831 static __always_inline int 832 arch_atomic_fetch_dec_relaxed(atomic_t *v) 833 { 834 return arch_atomic_fetch_sub_relaxed(1, v); 835 } 836 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed 837 #endif 838 839 #else /* arch_atomic_fetch_dec_relaxed */ 840 841 #ifndef arch_atomic_fetch_dec_acquire 842 static __always_inline int 843 arch_atomic_fetch_dec_acquire(atomic_t *v) 844 { 845 int ret = arch_atomic_fetch_dec_relaxed(v); 846 __atomic_acquire_fence(); 847 return ret; 848 } 849 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire 850 #endif 851 852 #ifndef arch_atomic_fetch_dec_release 853 static __always_inline int 854 arch_atomic_fetch_dec_release(atomic_t *v) 855 { 856 __atomic_release_fence(); 857 return arch_atomic_fetch_dec_relaxed(v); 858 } 859 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release 860 #endif 861 862 #ifndef arch_atomic_fetch_dec 863 static __always_inline int 864 arch_atomic_fetch_dec(atomic_t *v) 865 { 866 int ret; 867 __atomic_pre_full_fence(); 868 ret = arch_atomic_fetch_dec_relaxed(v); 869 __atomic_post_full_fence(); 870 return ret; 871 } 872 #define arch_atomic_fetch_dec arch_atomic_fetch_dec 873 #endif 874 875 #endif /* arch_atomic_fetch_dec_relaxed */ 876 877 #ifndef arch_atomic_fetch_and_relaxed 878 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and 879 #define arch_atomic_fetch_and_release arch_atomic_fetch_and 880 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and 881 #else /* arch_atomic_fetch_and_relaxed */ 882 883 #ifndef arch_atomic_fetch_and_acquire 884 static __always_inline int 885 arch_atomic_fetch_and_acquire(int i, atomic_t *v) 886 { 887 int ret = arch_atomic_fetch_and_relaxed(i, v); 888 __atomic_acquire_fence(); 889 return ret; 890 } 891 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire 892 #endif 893 894 #ifndef arch_atomic_fetch_and_release 895 static __always_inline int 896 arch_atomic_fetch_and_release(int i, atomic_t *v) 897 { 898 __atomic_release_fence(); 899 return arch_atomic_fetch_and_relaxed(i, v); 900 } 901 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release 902 #endif 903 904 #ifndef arch_atomic_fetch_and 905 static __always_inline int 906 arch_atomic_fetch_and(int i, atomic_t *v) 907 { 908 int ret; 909 __atomic_pre_full_fence(); 910 ret = arch_atomic_fetch_and_relaxed(i, v); 911 __atomic_post_full_fence(); 912 return ret; 913 } 914 #define arch_atomic_fetch_and arch_atomic_fetch_and 915 #endif 916 917 #endif /* arch_atomic_fetch_and_relaxed */ 918 919 #ifndef arch_atomic_andnot 920 static __always_inline void 921 arch_atomic_andnot(int i, atomic_t *v) 922 { 923 arch_atomic_and(~i, v); 924 } 925 #define arch_atomic_andnot arch_atomic_andnot 926 #endif 927 928 #ifndef arch_atomic_fetch_andnot_relaxed 929 #ifdef arch_atomic_fetch_andnot 930 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot 931 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot 932 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot 933 #endif /* arch_atomic_fetch_andnot */ 934 935 #ifndef arch_atomic_fetch_andnot 936 static __always_inline int 937 arch_atomic_fetch_andnot(int i, atomic_t *v) 938 { 939 return arch_atomic_fetch_and(~i, v); 940 } 941 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot 942 #endif 943 944 #ifndef arch_atomic_fetch_andnot_acquire 945 static __always_inline int 946 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v) 947 { 948 return arch_atomic_fetch_and_acquire(~i, v); 949 } 950 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire 951 #endif 952 953 #ifndef arch_atomic_fetch_andnot_release 954 static __always_inline int 955 arch_atomic_fetch_andnot_release(int i, atomic_t *v) 956 { 957 return arch_atomic_fetch_and_release(~i, v); 958 } 959 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release 960 #endif 961 962 #ifndef arch_atomic_fetch_andnot_relaxed 963 static __always_inline int 964 arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v) 965 { 966 return arch_atomic_fetch_and_relaxed(~i, v); 967 } 968 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed 969 #endif 970 971 #else /* arch_atomic_fetch_andnot_relaxed */ 972 973 #ifndef arch_atomic_fetch_andnot_acquire 974 static __always_inline int 975 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v) 976 { 977 int ret = arch_atomic_fetch_andnot_relaxed(i, v); 978 __atomic_acquire_fence(); 979 return ret; 980 } 981 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire 982 #endif 983 984 #ifndef arch_atomic_fetch_andnot_release 985 static __always_inline int 986 arch_atomic_fetch_andnot_release(int i, atomic_t *v) 987 { 988 __atomic_release_fence(); 989 return arch_atomic_fetch_andnot_relaxed(i, v); 990 } 991 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release 992 #endif 993 994 #ifndef arch_atomic_fetch_andnot 995 static __always_inline int 996 arch_atomic_fetch_andnot(int i, atomic_t *v) 997 { 998 int ret; 999 __atomic_pre_full_fence(); 1000 ret = arch_atomic_fetch_andnot_relaxed(i, v); 1001 __atomic_post_full_fence(); 1002 return ret; 1003 } 1004 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot 1005 #endif 1006 1007 #endif /* arch_atomic_fetch_andnot_relaxed */ 1008 1009 #ifndef arch_atomic_fetch_or_relaxed 1010 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or 1011 #define arch_atomic_fetch_or_release arch_atomic_fetch_or 1012 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or 1013 #else /* arch_atomic_fetch_or_relaxed */ 1014 1015 #ifndef arch_atomic_fetch_or_acquire 1016 static __always_inline int 1017 arch_atomic_fetch_or_acquire(int i, atomic_t *v) 1018 { 1019 int ret = arch_atomic_fetch_or_relaxed(i, v); 1020 __atomic_acquire_fence(); 1021 return ret; 1022 } 1023 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire 1024 #endif 1025 1026 #ifndef arch_atomic_fetch_or_release 1027 static __always_inline int 1028 arch_atomic_fetch_or_release(int i, atomic_t *v) 1029 { 1030 __atomic_release_fence(); 1031 return arch_atomic_fetch_or_relaxed(i, v); 1032 } 1033 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release 1034 #endif 1035 1036 #ifndef arch_atomic_fetch_or 1037 static __always_inline int 1038 arch_atomic_fetch_or(int i, atomic_t *v) 1039 { 1040 int ret; 1041 __atomic_pre_full_fence(); 1042 ret = arch_atomic_fetch_or_relaxed(i, v); 1043 __atomic_post_full_fence(); 1044 return ret; 1045 } 1046 #define arch_atomic_fetch_or arch_atomic_fetch_or 1047 #endif 1048 1049 #endif /* arch_atomic_fetch_or_relaxed */ 1050 1051 #ifndef arch_atomic_fetch_xor_relaxed 1052 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor 1053 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor 1054 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor 1055 #else /* arch_atomic_fetch_xor_relaxed */ 1056 1057 #ifndef arch_atomic_fetch_xor_acquire 1058 static __always_inline int 1059 arch_atomic_fetch_xor_acquire(int i, atomic_t *v) 1060 { 1061 int ret = arch_atomic_fetch_xor_relaxed(i, v); 1062 __atomic_acquire_fence(); 1063 return ret; 1064 } 1065 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire 1066 #endif 1067 1068 #ifndef arch_atomic_fetch_xor_release 1069 static __always_inline int 1070 arch_atomic_fetch_xor_release(int i, atomic_t *v) 1071 { 1072 __atomic_release_fence(); 1073 return arch_atomic_fetch_xor_relaxed(i, v); 1074 } 1075 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release 1076 #endif 1077 1078 #ifndef arch_atomic_fetch_xor 1079 static __always_inline int 1080 arch_atomic_fetch_xor(int i, atomic_t *v) 1081 { 1082 int ret; 1083 __atomic_pre_full_fence(); 1084 ret = arch_atomic_fetch_xor_relaxed(i, v); 1085 __atomic_post_full_fence(); 1086 return ret; 1087 } 1088 #define arch_atomic_fetch_xor arch_atomic_fetch_xor 1089 #endif 1090 1091 #endif /* arch_atomic_fetch_xor_relaxed */ 1092 1093 #ifndef arch_atomic_xchg_relaxed 1094 #define arch_atomic_xchg_acquire arch_atomic_xchg 1095 #define arch_atomic_xchg_release arch_atomic_xchg 1096 #define arch_atomic_xchg_relaxed arch_atomic_xchg 1097 #else /* arch_atomic_xchg_relaxed */ 1098 1099 #ifndef arch_atomic_xchg_acquire 1100 static __always_inline int 1101 arch_atomic_xchg_acquire(atomic_t *v, int i) 1102 { 1103 int ret = arch_atomic_xchg_relaxed(v, i); 1104 __atomic_acquire_fence(); 1105 return ret; 1106 } 1107 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire 1108 #endif 1109 1110 #ifndef arch_atomic_xchg_release 1111 static __always_inline int 1112 arch_atomic_xchg_release(atomic_t *v, int i) 1113 { 1114 __atomic_release_fence(); 1115 return arch_atomic_xchg_relaxed(v, i); 1116 } 1117 #define arch_atomic_xchg_release arch_atomic_xchg_release 1118 #endif 1119 1120 #ifndef arch_atomic_xchg 1121 static __always_inline int 1122 arch_atomic_xchg(atomic_t *v, int i) 1123 { 1124 int ret; 1125 __atomic_pre_full_fence(); 1126 ret = arch_atomic_xchg_relaxed(v, i); 1127 __atomic_post_full_fence(); 1128 return ret; 1129 } 1130 #define arch_atomic_xchg arch_atomic_xchg 1131 #endif 1132 1133 #endif /* arch_atomic_xchg_relaxed */ 1134 1135 #ifndef arch_atomic_cmpxchg_relaxed 1136 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg 1137 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg 1138 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg 1139 #else /* arch_atomic_cmpxchg_relaxed */ 1140 1141 #ifndef arch_atomic_cmpxchg_acquire 1142 static __always_inline int 1143 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new) 1144 { 1145 int ret = arch_atomic_cmpxchg_relaxed(v, old, new); 1146 __atomic_acquire_fence(); 1147 return ret; 1148 } 1149 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire 1150 #endif 1151 1152 #ifndef arch_atomic_cmpxchg_release 1153 static __always_inline int 1154 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new) 1155 { 1156 __atomic_release_fence(); 1157 return arch_atomic_cmpxchg_relaxed(v, old, new); 1158 } 1159 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release 1160 #endif 1161 1162 #ifndef arch_atomic_cmpxchg 1163 static __always_inline int 1164 arch_atomic_cmpxchg(atomic_t *v, int old, int new) 1165 { 1166 int ret; 1167 __atomic_pre_full_fence(); 1168 ret = arch_atomic_cmpxchg_relaxed(v, old, new); 1169 __atomic_post_full_fence(); 1170 return ret; 1171 } 1172 #define arch_atomic_cmpxchg arch_atomic_cmpxchg 1173 #endif 1174 1175 #endif /* arch_atomic_cmpxchg_relaxed */ 1176 1177 #ifndef arch_atomic_try_cmpxchg_relaxed 1178 #ifdef arch_atomic_try_cmpxchg 1179 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg 1180 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg 1181 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg 1182 #endif /* arch_atomic_try_cmpxchg */ 1183 1184 #ifndef arch_atomic_try_cmpxchg 1185 static __always_inline bool 1186 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new) 1187 { 1188 int r, o = *old; 1189 r = arch_atomic_cmpxchg(v, o, new); 1190 if (unlikely(r != o)) 1191 *old = r; 1192 return likely(r == o); 1193 } 1194 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg 1195 #endif 1196 1197 #ifndef arch_atomic_try_cmpxchg_acquire 1198 static __always_inline bool 1199 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 1200 { 1201 int r, o = *old; 1202 r = arch_atomic_cmpxchg_acquire(v, o, new); 1203 if (unlikely(r != o)) 1204 *old = r; 1205 return likely(r == o); 1206 } 1207 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire 1208 #endif 1209 1210 #ifndef arch_atomic_try_cmpxchg_release 1211 static __always_inline bool 1212 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 1213 { 1214 int r, o = *old; 1215 r = arch_atomic_cmpxchg_release(v, o, new); 1216 if (unlikely(r != o)) 1217 *old = r; 1218 return likely(r == o); 1219 } 1220 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release 1221 #endif 1222 1223 #ifndef arch_atomic_try_cmpxchg_relaxed 1224 static __always_inline bool 1225 arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new) 1226 { 1227 int r, o = *old; 1228 r = arch_atomic_cmpxchg_relaxed(v, o, new); 1229 if (unlikely(r != o)) 1230 *old = r; 1231 return likely(r == o); 1232 } 1233 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed 1234 #endif 1235 1236 #else /* arch_atomic_try_cmpxchg_relaxed */ 1237 1238 #ifndef arch_atomic_try_cmpxchg_acquire 1239 static __always_inline bool 1240 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) 1241 { 1242 bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); 1243 __atomic_acquire_fence(); 1244 return ret; 1245 } 1246 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire 1247 #endif 1248 1249 #ifndef arch_atomic_try_cmpxchg_release 1250 static __always_inline bool 1251 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) 1252 { 1253 __atomic_release_fence(); 1254 return arch_atomic_try_cmpxchg_relaxed(v, old, new); 1255 } 1256 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release 1257 #endif 1258 1259 #ifndef arch_atomic_try_cmpxchg 1260 static __always_inline bool 1261 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new) 1262 { 1263 bool ret; 1264 __atomic_pre_full_fence(); 1265 ret = arch_atomic_try_cmpxchg_relaxed(v, old, new); 1266 __atomic_post_full_fence(); 1267 return ret; 1268 } 1269 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg 1270 #endif 1271 1272 #endif /* arch_atomic_try_cmpxchg_relaxed */ 1273 1274 #ifndef arch_atomic_sub_and_test 1275 static __always_inline bool 1276 arch_atomic_sub_and_test(int i, atomic_t *v) 1277 { 1278 return arch_atomic_sub_return(i, v) == 0; 1279 } 1280 #define arch_atomic_sub_and_test arch_atomic_sub_and_test 1281 #endif 1282 1283 #ifndef arch_atomic_dec_and_test 1284 static __always_inline bool 1285 arch_atomic_dec_and_test(atomic_t *v) 1286 { 1287 return arch_atomic_dec_return(v) == 0; 1288 } 1289 #define arch_atomic_dec_and_test arch_atomic_dec_and_test 1290 #endif 1291 1292 #ifndef arch_atomic_inc_and_test 1293 static __always_inline bool 1294 arch_atomic_inc_and_test(atomic_t *v) 1295 { 1296 return arch_atomic_inc_return(v) == 0; 1297 } 1298 #define arch_atomic_inc_and_test arch_atomic_inc_and_test 1299 #endif 1300 1301 #ifndef arch_atomic_add_negative_relaxed 1302 #ifdef arch_atomic_add_negative 1303 #define arch_atomic_add_negative_acquire arch_atomic_add_negative 1304 #define arch_atomic_add_negative_release arch_atomic_add_negative 1305 #define arch_atomic_add_negative_relaxed arch_atomic_add_negative 1306 #endif /* arch_atomic_add_negative */ 1307 1308 #ifndef arch_atomic_add_negative 1309 static __always_inline bool 1310 arch_atomic_add_negative(int i, atomic_t *v) 1311 { 1312 return arch_atomic_add_return(i, v) < 0; 1313 } 1314 #define arch_atomic_add_negative arch_atomic_add_negative 1315 #endif 1316 1317 #ifndef arch_atomic_add_negative_acquire 1318 static __always_inline bool 1319 arch_atomic_add_negative_acquire(int i, atomic_t *v) 1320 { 1321 return arch_atomic_add_return_acquire(i, v) < 0; 1322 } 1323 #define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire 1324 #endif 1325 1326 #ifndef arch_atomic_add_negative_release 1327 static __always_inline bool 1328 arch_atomic_add_negative_release(int i, atomic_t *v) 1329 { 1330 return arch_atomic_add_return_release(i, v) < 0; 1331 } 1332 #define arch_atomic_add_negative_release arch_atomic_add_negative_release 1333 #endif 1334 1335 #ifndef arch_atomic_add_negative_relaxed 1336 static __always_inline bool 1337 arch_atomic_add_negative_relaxed(int i, atomic_t *v) 1338 { 1339 return arch_atomic_add_return_relaxed(i, v) < 0; 1340 } 1341 #define arch_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed 1342 #endif 1343 1344 #else /* arch_atomic_add_negative_relaxed */ 1345 1346 #ifndef arch_atomic_add_negative_acquire 1347 static __always_inline bool 1348 arch_atomic_add_negative_acquire(int i, atomic_t *v) 1349 { 1350 bool ret = arch_atomic_add_negative_relaxed(i, v); 1351 __atomic_acquire_fence(); 1352 return ret; 1353 } 1354 #define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire 1355 #endif 1356 1357 #ifndef arch_atomic_add_negative_release 1358 static __always_inline bool 1359 arch_atomic_add_negative_release(int i, atomic_t *v) 1360 { 1361 __atomic_release_fence(); 1362 return arch_atomic_add_negative_relaxed(i, v); 1363 } 1364 #define arch_atomic_add_negative_release arch_atomic_add_negative_release 1365 #endif 1366 1367 #ifndef arch_atomic_add_negative 1368 static __always_inline bool 1369 arch_atomic_add_negative(int i, atomic_t *v) 1370 { 1371 bool ret; 1372 __atomic_pre_full_fence(); 1373 ret = arch_atomic_add_negative_relaxed(i, v); 1374 __atomic_post_full_fence(); 1375 return ret; 1376 } 1377 #define arch_atomic_add_negative arch_atomic_add_negative 1378 #endif 1379 1380 #endif /* arch_atomic_add_negative_relaxed */ 1381 1382 #ifndef arch_atomic_fetch_add_unless 1383 static __always_inline int 1384 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) 1385 { 1386 int c = arch_atomic_read(v); 1387 1388 do { 1389 if (unlikely(c == u)) 1390 break; 1391 } while (!arch_atomic_try_cmpxchg(v, &c, c + a)); 1392 1393 return c; 1394 } 1395 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless 1396 #endif 1397 1398 #ifndef arch_atomic_add_unless 1399 static __always_inline bool 1400 arch_atomic_add_unless(atomic_t *v, int a, int u) 1401 { 1402 return arch_atomic_fetch_add_unless(v, a, u) != u; 1403 } 1404 #define arch_atomic_add_unless arch_atomic_add_unless 1405 #endif 1406 1407 #ifndef arch_atomic_inc_not_zero 1408 static __always_inline bool 1409 arch_atomic_inc_not_zero(atomic_t *v) 1410 { 1411 return arch_atomic_add_unless(v, 1, 0); 1412 } 1413 #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero 1414 #endif 1415 1416 #ifndef arch_atomic_inc_unless_negative 1417 static __always_inline bool 1418 arch_atomic_inc_unless_negative(atomic_t *v) 1419 { 1420 int c = arch_atomic_read(v); 1421 1422 do { 1423 if (unlikely(c < 0)) 1424 return false; 1425 } while (!arch_atomic_try_cmpxchg(v, &c, c + 1)); 1426 1427 return true; 1428 } 1429 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative 1430 #endif 1431 1432 #ifndef arch_atomic_dec_unless_positive 1433 static __always_inline bool 1434 arch_atomic_dec_unless_positive(atomic_t *v) 1435 { 1436 int c = arch_atomic_read(v); 1437 1438 do { 1439 if (unlikely(c > 0)) 1440 return false; 1441 } while (!arch_atomic_try_cmpxchg(v, &c, c - 1)); 1442 1443 return true; 1444 } 1445 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive 1446 #endif 1447 1448 #ifndef arch_atomic_dec_if_positive 1449 static __always_inline int 1450 arch_atomic_dec_if_positive(atomic_t *v) 1451 { 1452 int dec, c = arch_atomic_read(v); 1453 1454 do { 1455 dec = c - 1; 1456 if (unlikely(dec < 0)) 1457 break; 1458 } while (!arch_atomic_try_cmpxchg(v, &c, dec)); 1459 1460 return dec; 1461 } 1462 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive 1463 #endif 1464 1465 #ifdef CONFIG_GENERIC_ATOMIC64 1466 #include <asm-generic/atomic64.h> 1467 #endif 1468 1469 #ifndef arch_atomic64_read_acquire 1470 static __always_inline s64 1471 arch_atomic64_read_acquire(const atomic64_t *v) 1472 { 1473 s64 ret; 1474 1475 if (__native_word(atomic64_t)) { 1476 ret = smp_load_acquire(&(v)->counter); 1477 } else { 1478 ret = arch_atomic64_read(v); 1479 __atomic_acquire_fence(); 1480 } 1481 1482 return ret; 1483 } 1484 #define arch_atomic64_read_acquire arch_atomic64_read_acquire 1485 #endif 1486 1487 #ifndef arch_atomic64_set_release 1488 static __always_inline void 1489 arch_atomic64_set_release(atomic64_t *v, s64 i) 1490 { 1491 if (__native_word(atomic64_t)) { 1492 smp_store_release(&(v)->counter, i); 1493 } else { 1494 __atomic_release_fence(); 1495 arch_atomic64_set(v, i); 1496 } 1497 } 1498 #define arch_atomic64_set_release arch_atomic64_set_release 1499 #endif 1500 1501 #ifndef arch_atomic64_add_return_relaxed 1502 #define arch_atomic64_add_return_acquire arch_atomic64_add_return 1503 #define arch_atomic64_add_return_release arch_atomic64_add_return 1504 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return 1505 #else /* arch_atomic64_add_return_relaxed */ 1506 1507 #ifndef arch_atomic64_add_return_acquire 1508 static __always_inline s64 1509 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v) 1510 { 1511 s64 ret = arch_atomic64_add_return_relaxed(i, v); 1512 __atomic_acquire_fence(); 1513 return ret; 1514 } 1515 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire 1516 #endif 1517 1518 #ifndef arch_atomic64_add_return_release 1519 static __always_inline s64 1520 arch_atomic64_add_return_release(s64 i, atomic64_t *v) 1521 { 1522 __atomic_release_fence(); 1523 return arch_atomic64_add_return_relaxed(i, v); 1524 } 1525 #define arch_atomic64_add_return_release arch_atomic64_add_return_release 1526 #endif 1527 1528 #ifndef arch_atomic64_add_return 1529 static __always_inline s64 1530 arch_atomic64_add_return(s64 i, atomic64_t *v) 1531 { 1532 s64 ret; 1533 __atomic_pre_full_fence(); 1534 ret = arch_atomic64_add_return_relaxed(i, v); 1535 __atomic_post_full_fence(); 1536 return ret; 1537 } 1538 #define arch_atomic64_add_return arch_atomic64_add_return 1539 #endif 1540 1541 #endif /* arch_atomic64_add_return_relaxed */ 1542 1543 #ifndef arch_atomic64_fetch_add_relaxed 1544 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add 1545 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add 1546 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add 1547 #else /* arch_atomic64_fetch_add_relaxed */ 1548 1549 #ifndef arch_atomic64_fetch_add_acquire 1550 static __always_inline s64 1551 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v) 1552 { 1553 s64 ret = arch_atomic64_fetch_add_relaxed(i, v); 1554 __atomic_acquire_fence(); 1555 return ret; 1556 } 1557 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire 1558 #endif 1559 1560 #ifndef arch_atomic64_fetch_add_release 1561 static __always_inline s64 1562 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v) 1563 { 1564 __atomic_release_fence(); 1565 return arch_atomic64_fetch_add_relaxed(i, v); 1566 } 1567 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release 1568 #endif 1569 1570 #ifndef arch_atomic64_fetch_add 1571 static __always_inline s64 1572 arch_atomic64_fetch_add(s64 i, atomic64_t *v) 1573 { 1574 s64 ret; 1575 __atomic_pre_full_fence(); 1576 ret = arch_atomic64_fetch_add_relaxed(i, v); 1577 __atomic_post_full_fence(); 1578 return ret; 1579 } 1580 #define arch_atomic64_fetch_add arch_atomic64_fetch_add 1581 #endif 1582 1583 #endif /* arch_atomic64_fetch_add_relaxed */ 1584 1585 #ifndef arch_atomic64_sub_return_relaxed 1586 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return 1587 #define arch_atomic64_sub_return_release arch_atomic64_sub_return 1588 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return 1589 #else /* arch_atomic64_sub_return_relaxed */ 1590 1591 #ifndef arch_atomic64_sub_return_acquire 1592 static __always_inline s64 1593 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v) 1594 { 1595 s64 ret = arch_atomic64_sub_return_relaxed(i, v); 1596 __atomic_acquire_fence(); 1597 return ret; 1598 } 1599 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire 1600 #endif 1601 1602 #ifndef arch_atomic64_sub_return_release 1603 static __always_inline s64 1604 arch_atomic64_sub_return_release(s64 i, atomic64_t *v) 1605 { 1606 __atomic_release_fence(); 1607 return arch_atomic64_sub_return_relaxed(i, v); 1608 } 1609 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release 1610 #endif 1611 1612 #ifndef arch_atomic64_sub_return 1613 static __always_inline s64 1614 arch_atomic64_sub_return(s64 i, atomic64_t *v) 1615 { 1616 s64 ret; 1617 __atomic_pre_full_fence(); 1618 ret = arch_atomic64_sub_return_relaxed(i, v); 1619 __atomic_post_full_fence(); 1620 return ret; 1621 } 1622 #define arch_atomic64_sub_return arch_atomic64_sub_return 1623 #endif 1624 1625 #endif /* arch_atomic64_sub_return_relaxed */ 1626 1627 #ifndef arch_atomic64_fetch_sub_relaxed 1628 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub 1629 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub 1630 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub 1631 #else /* arch_atomic64_fetch_sub_relaxed */ 1632 1633 #ifndef arch_atomic64_fetch_sub_acquire 1634 static __always_inline s64 1635 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) 1636 { 1637 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v); 1638 __atomic_acquire_fence(); 1639 return ret; 1640 } 1641 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire 1642 #endif 1643 1644 #ifndef arch_atomic64_fetch_sub_release 1645 static __always_inline s64 1646 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v) 1647 { 1648 __atomic_release_fence(); 1649 return arch_atomic64_fetch_sub_relaxed(i, v); 1650 } 1651 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release 1652 #endif 1653 1654 #ifndef arch_atomic64_fetch_sub 1655 static __always_inline s64 1656 arch_atomic64_fetch_sub(s64 i, atomic64_t *v) 1657 { 1658 s64 ret; 1659 __atomic_pre_full_fence(); 1660 ret = arch_atomic64_fetch_sub_relaxed(i, v); 1661 __atomic_post_full_fence(); 1662 return ret; 1663 } 1664 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub 1665 #endif 1666 1667 #endif /* arch_atomic64_fetch_sub_relaxed */ 1668 1669 #ifndef arch_atomic64_inc 1670 static __always_inline void 1671 arch_atomic64_inc(atomic64_t *v) 1672 { 1673 arch_atomic64_add(1, v); 1674 } 1675 #define arch_atomic64_inc arch_atomic64_inc 1676 #endif 1677 1678 #ifndef arch_atomic64_inc_return_relaxed 1679 #ifdef arch_atomic64_inc_return 1680 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return 1681 #define arch_atomic64_inc_return_release arch_atomic64_inc_return 1682 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return 1683 #endif /* arch_atomic64_inc_return */ 1684 1685 #ifndef arch_atomic64_inc_return 1686 static __always_inline s64 1687 arch_atomic64_inc_return(atomic64_t *v) 1688 { 1689 return arch_atomic64_add_return(1, v); 1690 } 1691 #define arch_atomic64_inc_return arch_atomic64_inc_return 1692 #endif 1693 1694 #ifndef arch_atomic64_inc_return_acquire 1695 static __always_inline s64 1696 arch_atomic64_inc_return_acquire(atomic64_t *v) 1697 { 1698 return arch_atomic64_add_return_acquire(1, v); 1699 } 1700 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire 1701 #endif 1702 1703 #ifndef arch_atomic64_inc_return_release 1704 static __always_inline s64 1705 arch_atomic64_inc_return_release(atomic64_t *v) 1706 { 1707 return arch_atomic64_add_return_release(1, v); 1708 } 1709 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release 1710 #endif 1711 1712 #ifndef arch_atomic64_inc_return_relaxed 1713 static __always_inline s64 1714 arch_atomic64_inc_return_relaxed(atomic64_t *v) 1715 { 1716 return arch_atomic64_add_return_relaxed(1, v); 1717 } 1718 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed 1719 #endif 1720 1721 #else /* arch_atomic64_inc_return_relaxed */ 1722 1723 #ifndef arch_atomic64_inc_return_acquire 1724 static __always_inline s64 1725 arch_atomic64_inc_return_acquire(atomic64_t *v) 1726 { 1727 s64 ret = arch_atomic64_inc_return_relaxed(v); 1728 __atomic_acquire_fence(); 1729 return ret; 1730 } 1731 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire 1732 #endif 1733 1734 #ifndef arch_atomic64_inc_return_release 1735 static __always_inline s64 1736 arch_atomic64_inc_return_release(atomic64_t *v) 1737 { 1738 __atomic_release_fence(); 1739 return arch_atomic64_inc_return_relaxed(v); 1740 } 1741 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release 1742 #endif 1743 1744 #ifndef arch_atomic64_inc_return 1745 static __always_inline s64 1746 arch_atomic64_inc_return(atomic64_t *v) 1747 { 1748 s64 ret; 1749 __atomic_pre_full_fence(); 1750 ret = arch_atomic64_inc_return_relaxed(v); 1751 __atomic_post_full_fence(); 1752 return ret; 1753 } 1754 #define arch_atomic64_inc_return arch_atomic64_inc_return 1755 #endif 1756 1757 #endif /* arch_atomic64_inc_return_relaxed */ 1758 1759 #ifndef arch_atomic64_fetch_inc_relaxed 1760 #ifdef arch_atomic64_fetch_inc 1761 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc 1762 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc 1763 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc 1764 #endif /* arch_atomic64_fetch_inc */ 1765 1766 #ifndef arch_atomic64_fetch_inc 1767 static __always_inline s64 1768 arch_atomic64_fetch_inc(atomic64_t *v) 1769 { 1770 return arch_atomic64_fetch_add(1, v); 1771 } 1772 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc 1773 #endif 1774 1775 #ifndef arch_atomic64_fetch_inc_acquire 1776 static __always_inline s64 1777 arch_atomic64_fetch_inc_acquire(atomic64_t *v) 1778 { 1779 return arch_atomic64_fetch_add_acquire(1, v); 1780 } 1781 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire 1782 #endif 1783 1784 #ifndef arch_atomic64_fetch_inc_release 1785 static __always_inline s64 1786 arch_atomic64_fetch_inc_release(atomic64_t *v) 1787 { 1788 return arch_atomic64_fetch_add_release(1, v); 1789 } 1790 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release 1791 #endif 1792 1793 #ifndef arch_atomic64_fetch_inc_relaxed 1794 static __always_inline s64 1795 arch_atomic64_fetch_inc_relaxed(atomic64_t *v) 1796 { 1797 return arch_atomic64_fetch_add_relaxed(1, v); 1798 } 1799 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed 1800 #endif 1801 1802 #else /* arch_atomic64_fetch_inc_relaxed */ 1803 1804 #ifndef arch_atomic64_fetch_inc_acquire 1805 static __always_inline s64 1806 arch_atomic64_fetch_inc_acquire(atomic64_t *v) 1807 { 1808 s64 ret = arch_atomic64_fetch_inc_relaxed(v); 1809 __atomic_acquire_fence(); 1810 return ret; 1811 } 1812 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire 1813 #endif 1814 1815 #ifndef arch_atomic64_fetch_inc_release 1816 static __always_inline s64 1817 arch_atomic64_fetch_inc_release(atomic64_t *v) 1818 { 1819 __atomic_release_fence(); 1820 return arch_atomic64_fetch_inc_relaxed(v); 1821 } 1822 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release 1823 #endif 1824 1825 #ifndef arch_atomic64_fetch_inc 1826 static __always_inline s64 1827 arch_atomic64_fetch_inc(atomic64_t *v) 1828 { 1829 s64 ret; 1830 __atomic_pre_full_fence(); 1831 ret = arch_atomic64_fetch_inc_relaxed(v); 1832 __atomic_post_full_fence(); 1833 return ret; 1834 } 1835 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc 1836 #endif 1837 1838 #endif /* arch_atomic64_fetch_inc_relaxed */ 1839 1840 #ifndef arch_atomic64_dec 1841 static __always_inline void 1842 arch_atomic64_dec(atomic64_t *v) 1843 { 1844 arch_atomic64_sub(1, v); 1845 } 1846 #define arch_atomic64_dec arch_atomic64_dec 1847 #endif 1848 1849 #ifndef arch_atomic64_dec_return_relaxed 1850 #ifdef arch_atomic64_dec_return 1851 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return 1852 #define arch_atomic64_dec_return_release arch_atomic64_dec_return 1853 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return 1854 #endif /* arch_atomic64_dec_return */ 1855 1856 #ifndef arch_atomic64_dec_return 1857 static __always_inline s64 1858 arch_atomic64_dec_return(atomic64_t *v) 1859 { 1860 return arch_atomic64_sub_return(1, v); 1861 } 1862 #define arch_atomic64_dec_return arch_atomic64_dec_return 1863 #endif 1864 1865 #ifndef arch_atomic64_dec_return_acquire 1866 static __always_inline s64 1867 arch_atomic64_dec_return_acquire(atomic64_t *v) 1868 { 1869 return arch_atomic64_sub_return_acquire(1, v); 1870 } 1871 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire 1872 #endif 1873 1874 #ifndef arch_atomic64_dec_return_release 1875 static __always_inline s64 1876 arch_atomic64_dec_return_release(atomic64_t *v) 1877 { 1878 return arch_atomic64_sub_return_release(1, v); 1879 } 1880 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release 1881 #endif 1882 1883 #ifndef arch_atomic64_dec_return_relaxed 1884 static __always_inline s64 1885 arch_atomic64_dec_return_relaxed(atomic64_t *v) 1886 { 1887 return arch_atomic64_sub_return_relaxed(1, v); 1888 } 1889 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed 1890 #endif 1891 1892 #else /* arch_atomic64_dec_return_relaxed */ 1893 1894 #ifndef arch_atomic64_dec_return_acquire 1895 static __always_inline s64 1896 arch_atomic64_dec_return_acquire(atomic64_t *v) 1897 { 1898 s64 ret = arch_atomic64_dec_return_relaxed(v); 1899 __atomic_acquire_fence(); 1900 return ret; 1901 } 1902 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire 1903 #endif 1904 1905 #ifndef arch_atomic64_dec_return_release 1906 static __always_inline s64 1907 arch_atomic64_dec_return_release(atomic64_t *v) 1908 { 1909 __atomic_release_fence(); 1910 return arch_atomic64_dec_return_relaxed(v); 1911 } 1912 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release 1913 #endif 1914 1915 #ifndef arch_atomic64_dec_return 1916 static __always_inline s64 1917 arch_atomic64_dec_return(atomic64_t *v) 1918 { 1919 s64 ret; 1920 __atomic_pre_full_fence(); 1921 ret = arch_atomic64_dec_return_relaxed(v); 1922 __atomic_post_full_fence(); 1923 return ret; 1924 } 1925 #define arch_atomic64_dec_return arch_atomic64_dec_return 1926 #endif 1927 1928 #endif /* arch_atomic64_dec_return_relaxed */ 1929 1930 #ifndef arch_atomic64_fetch_dec_relaxed 1931 #ifdef arch_atomic64_fetch_dec 1932 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec 1933 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec 1934 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec 1935 #endif /* arch_atomic64_fetch_dec */ 1936 1937 #ifndef arch_atomic64_fetch_dec 1938 static __always_inline s64 1939 arch_atomic64_fetch_dec(atomic64_t *v) 1940 { 1941 return arch_atomic64_fetch_sub(1, v); 1942 } 1943 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec 1944 #endif 1945 1946 #ifndef arch_atomic64_fetch_dec_acquire 1947 static __always_inline s64 1948 arch_atomic64_fetch_dec_acquire(atomic64_t *v) 1949 { 1950 return arch_atomic64_fetch_sub_acquire(1, v); 1951 } 1952 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire 1953 #endif 1954 1955 #ifndef arch_atomic64_fetch_dec_release 1956 static __always_inline s64 1957 arch_atomic64_fetch_dec_release(atomic64_t *v) 1958 { 1959 return arch_atomic64_fetch_sub_release(1, v); 1960 } 1961 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release 1962 #endif 1963 1964 #ifndef arch_atomic64_fetch_dec_relaxed 1965 static __always_inline s64 1966 arch_atomic64_fetch_dec_relaxed(atomic64_t *v) 1967 { 1968 return arch_atomic64_fetch_sub_relaxed(1, v); 1969 } 1970 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed 1971 #endif 1972 1973 #else /* arch_atomic64_fetch_dec_relaxed */ 1974 1975 #ifndef arch_atomic64_fetch_dec_acquire 1976 static __always_inline s64 1977 arch_atomic64_fetch_dec_acquire(atomic64_t *v) 1978 { 1979 s64 ret = arch_atomic64_fetch_dec_relaxed(v); 1980 __atomic_acquire_fence(); 1981 return ret; 1982 } 1983 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire 1984 #endif 1985 1986 #ifndef arch_atomic64_fetch_dec_release 1987 static __always_inline s64 1988 arch_atomic64_fetch_dec_release(atomic64_t *v) 1989 { 1990 __atomic_release_fence(); 1991 return arch_atomic64_fetch_dec_relaxed(v); 1992 } 1993 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release 1994 #endif 1995 1996 #ifndef arch_atomic64_fetch_dec 1997 static __always_inline s64 1998 arch_atomic64_fetch_dec(atomic64_t *v) 1999 { 2000 s64 ret; 2001 __atomic_pre_full_fence(); 2002 ret = arch_atomic64_fetch_dec_relaxed(v); 2003 __atomic_post_full_fence(); 2004 return ret; 2005 } 2006 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec 2007 #endif 2008 2009 #endif /* arch_atomic64_fetch_dec_relaxed */ 2010 2011 #ifndef arch_atomic64_fetch_and_relaxed 2012 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and 2013 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and 2014 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and 2015 #else /* arch_atomic64_fetch_and_relaxed */ 2016 2017 #ifndef arch_atomic64_fetch_and_acquire 2018 static __always_inline s64 2019 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v) 2020 { 2021 s64 ret = arch_atomic64_fetch_and_relaxed(i, v); 2022 __atomic_acquire_fence(); 2023 return ret; 2024 } 2025 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire 2026 #endif 2027 2028 #ifndef arch_atomic64_fetch_and_release 2029 static __always_inline s64 2030 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v) 2031 { 2032 __atomic_release_fence(); 2033 return arch_atomic64_fetch_and_relaxed(i, v); 2034 } 2035 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release 2036 #endif 2037 2038 #ifndef arch_atomic64_fetch_and 2039 static __always_inline s64 2040 arch_atomic64_fetch_and(s64 i, atomic64_t *v) 2041 { 2042 s64 ret; 2043 __atomic_pre_full_fence(); 2044 ret = arch_atomic64_fetch_and_relaxed(i, v); 2045 __atomic_post_full_fence(); 2046 return ret; 2047 } 2048 #define arch_atomic64_fetch_and arch_atomic64_fetch_and 2049 #endif 2050 2051 #endif /* arch_atomic64_fetch_and_relaxed */ 2052 2053 #ifndef arch_atomic64_andnot 2054 static __always_inline void 2055 arch_atomic64_andnot(s64 i, atomic64_t *v) 2056 { 2057 arch_atomic64_and(~i, v); 2058 } 2059 #define arch_atomic64_andnot arch_atomic64_andnot 2060 #endif 2061 2062 #ifndef arch_atomic64_fetch_andnot_relaxed 2063 #ifdef arch_atomic64_fetch_andnot 2064 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot 2065 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot 2066 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot 2067 #endif /* arch_atomic64_fetch_andnot */ 2068 2069 #ifndef arch_atomic64_fetch_andnot 2070 static __always_inline s64 2071 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) 2072 { 2073 return arch_atomic64_fetch_and(~i, v); 2074 } 2075 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot 2076 #endif 2077 2078 #ifndef arch_atomic64_fetch_andnot_acquire 2079 static __always_inline s64 2080 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 2081 { 2082 return arch_atomic64_fetch_and_acquire(~i, v); 2083 } 2084 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire 2085 #endif 2086 2087 #ifndef arch_atomic64_fetch_andnot_release 2088 static __always_inline s64 2089 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 2090 { 2091 return arch_atomic64_fetch_and_release(~i, v); 2092 } 2093 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release 2094 #endif 2095 2096 #ifndef arch_atomic64_fetch_andnot_relaxed 2097 static __always_inline s64 2098 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) 2099 { 2100 return arch_atomic64_fetch_and_relaxed(~i, v); 2101 } 2102 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed 2103 #endif 2104 2105 #else /* arch_atomic64_fetch_andnot_relaxed */ 2106 2107 #ifndef arch_atomic64_fetch_andnot_acquire 2108 static __always_inline s64 2109 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) 2110 { 2111 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v); 2112 __atomic_acquire_fence(); 2113 return ret; 2114 } 2115 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire 2116 #endif 2117 2118 #ifndef arch_atomic64_fetch_andnot_release 2119 static __always_inline s64 2120 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v) 2121 { 2122 __atomic_release_fence(); 2123 return arch_atomic64_fetch_andnot_relaxed(i, v); 2124 } 2125 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release 2126 #endif 2127 2128 #ifndef arch_atomic64_fetch_andnot 2129 static __always_inline s64 2130 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v) 2131 { 2132 s64 ret; 2133 __atomic_pre_full_fence(); 2134 ret = arch_atomic64_fetch_andnot_relaxed(i, v); 2135 __atomic_post_full_fence(); 2136 return ret; 2137 } 2138 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot 2139 #endif 2140 2141 #endif /* arch_atomic64_fetch_andnot_relaxed */ 2142 2143 #ifndef arch_atomic64_fetch_or_relaxed 2144 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or 2145 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or 2146 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or 2147 #else /* arch_atomic64_fetch_or_relaxed */ 2148 2149 #ifndef arch_atomic64_fetch_or_acquire 2150 static __always_inline s64 2151 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v) 2152 { 2153 s64 ret = arch_atomic64_fetch_or_relaxed(i, v); 2154 __atomic_acquire_fence(); 2155 return ret; 2156 } 2157 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire 2158 #endif 2159 2160 #ifndef arch_atomic64_fetch_or_release 2161 static __always_inline s64 2162 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v) 2163 { 2164 __atomic_release_fence(); 2165 return arch_atomic64_fetch_or_relaxed(i, v); 2166 } 2167 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release 2168 #endif 2169 2170 #ifndef arch_atomic64_fetch_or 2171 static __always_inline s64 2172 arch_atomic64_fetch_or(s64 i, atomic64_t *v) 2173 { 2174 s64 ret; 2175 __atomic_pre_full_fence(); 2176 ret = arch_atomic64_fetch_or_relaxed(i, v); 2177 __atomic_post_full_fence(); 2178 return ret; 2179 } 2180 #define arch_atomic64_fetch_or arch_atomic64_fetch_or 2181 #endif 2182 2183 #endif /* arch_atomic64_fetch_or_relaxed */ 2184 2185 #ifndef arch_atomic64_fetch_xor_relaxed 2186 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor 2187 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor 2188 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor 2189 #else /* arch_atomic64_fetch_xor_relaxed */ 2190 2191 #ifndef arch_atomic64_fetch_xor_acquire 2192 static __always_inline s64 2193 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) 2194 { 2195 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v); 2196 __atomic_acquire_fence(); 2197 return ret; 2198 } 2199 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire 2200 #endif 2201 2202 #ifndef arch_atomic64_fetch_xor_release 2203 static __always_inline s64 2204 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v) 2205 { 2206 __atomic_release_fence(); 2207 return arch_atomic64_fetch_xor_relaxed(i, v); 2208 } 2209 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release 2210 #endif 2211 2212 #ifndef arch_atomic64_fetch_xor 2213 static __always_inline s64 2214 arch_atomic64_fetch_xor(s64 i, atomic64_t *v) 2215 { 2216 s64 ret; 2217 __atomic_pre_full_fence(); 2218 ret = arch_atomic64_fetch_xor_relaxed(i, v); 2219 __atomic_post_full_fence(); 2220 return ret; 2221 } 2222 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor 2223 #endif 2224 2225 #endif /* arch_atomic64_fetch_xor_relaxed */ 2226 2227 #ifndef arch_atomic64_xchg_relaxed 2228 #define arch_atomic64_xchg_acquire arch_atomic64_xchg 2229 #define arch_atomic64_xchg_release arch_atomic64_xchg 2230 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg 2231 #else /* arch_atomic64_xchg_relaxed */ 2232 2233 #ifndef arch_atomic64_xchg_acquire 2234 static __always_inline s64 2235 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i) 2236 { 2237 s64 ret = arch_atomic64_xchg_relaxed(v, i); 2238 __atomic_acquire_fence(); 2239 return ret; 2240 } 2241 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire 2242 #endif 2243 2244 #ifndef arch_atomic64_xchg_release 2245 static __always_inline s64 2246 arch_atomic64_xchg_release(atomic64_t *v, s64 i) 2247 { 2248 __atomic_release_fence(); 2249 return arch_atomic64_xchg_relaxed(v, i); 2250 } 2251 #define arch_atomic64_xchg_release arch_atomic64_xchg_release 2252 #endif 2253 2254 #ifndef arch_atomic64_xchg 2255 static __always_inline s64 2256 arch_atomic64_xchg(atomic64_t *v, s64 i) 2257 { 2258 s64 ret; 2259 __atomic_pre_full_fence(); 2260 ret = arch_atomic64_xchg_relaxed(v, i); 2261 __atomic_post_full_fence(); 2262 return ret; 2263 } 2264 #define arch_atomic64_xchg arch_atomic64_xchg 2265 #endif 2266 2267 #endif /* arch_atomic64_xchg_relaxed */ 2268 2269 #ifndef arch_atomic64_cmpxchg_relaxed 2270 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg 2271 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg 2272 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg 2273 #else /* arch_atomic64_cmpxchg_relaxed */ 2274 2275 #ifndef arch_atomic64_cmpxchg_acquire 2276 static __always_inline s64 2277 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) 2278 { 2279 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); 2280 __atomic_acquire_fence(); 2281 return ret; 2282 } 2283 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire 2284 #endif 2285 2286 #ifndef arch_atomic64_cmpxchg_release 2287 static __always_inline s64 2288 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) 2289 { 2290 __atomic_release_fence(); 2291 return arch_atomic64_cmpxchg_relaxed(v, old, new); 2292 } 2293 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release 2294 #endif 2295 2296 #ifndef arch_atomic64_cmpxchg 2297 static __always_inline s64 2298 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) 2299 { 2300 s64 ret; 2301 __atomic_pre_full_fence(); 2302 ret = arch_atomic64_cmpxchg_relaxed(v, old, new); 2303 __atomic_post_full_fence(); 2304 return ret; 2305 } 2306 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg 2307 #endif 2308 2309 #endif /* arch_atomic64_cmpxchg_relaxed */ 2310 2311 #ifndef arch_atomic64_try_cmpxchg_relaxed 2312 #ifdef arch_atomic64_try_cmpxchg 2313 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg 2314 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg 2315 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg 2316 #endif /* arch_atomic64_try_cmpxchg */ 2317 2318 #ifndef arch_atomic64_try_cmpxchg 2319 static __always_inline bool 2320 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2321 { 2322 s64 r, o = *old; 2323 r = arch_atomic64_cmpxchg(v, o, new); 2324 if (unlikely(r != o)) 2325 *old = r; 2326 return likely(r == o); 2327 } 2328 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg 2329 #endif 2330 2331 #ifndef arch_atomic64_try_cmpxchg_acquire 2332 static __always_inline bool 2333 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2334 { 2335 s64 r, o = *old; 2336 r = arch_atomic64_cmpxchg_acquire(v, o, new); 2337 if (unlikely(r != o)) 2338 *old = r; 2339 return likely(r == o); 2340 } 2341 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire 2342 #endif 2343 2344 #ifndef arch_atomic64_try_cmpxchg_release 2345 static __always_inline bool 2346 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2347 { 2348 s64 r, o = *old; 2349 r = arch_atomic64_cmpxchg_release(v, o, new); 2350 if (unlikely(r != o)) 2351 *old = r; 2352 return likely(r == o); 2353 } 2354 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release 2355 #endif 2356 2357 #ifndef arch_atomic64_try_cmpxchg_relaxed 2358 static __always_inline bool 2359 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) 2360 { 2361 s64 r, o = *old; 2362 r = arch_atomic64_cmpxchg_relaxed(v, o, new); 2363 if (unlikely(r != o)) 2364 *old = r; 2365 return likely(r == o); 2366 } 2367 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed 2368 #endif 2369 2370 #else /* arch_atomic64_try_cmpxchg_relaxed */ 2371 2372 #ifndef arch_atomic64_try_cmpxchg_acquire 2373 static __always_inline bool 2374 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) 2375 { 2376 bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2377 __atomic_acquire_fence(); 2378 return ret; 2379 } 2380 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire 2381 #endif 2382 2383 #ifndef arch_atomic64_try_cmpxchg_release 2384 static __always_inline bool 2385 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) 2386 { 2387 __atomic_release_fence(); 2388 return arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2389 } 2390 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release 2391 #endif 2392 2393 #ifndef arch_atomic64_try_cmpxchg 2394 static __always_inline bool 2395 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) 2396 { 2397 bool ret; 2398 __atomic_pre_full_fence(); 2399 ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new); 2400 __atomic_post_full_fence(); 2401 return ret; 2402 } 2403 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg 2404 #endif 2405 2406 #endif /* arch_atomic64_try_cmpxchg_relaxed */ 2407 2408 #ifndef arch_atomic64_sub_and_test 2409 static __always_inline bool 2410 arch_atomic64_sub_and_test(s64 i, atomic64_t *v) 2411 { 2412 return arch_atomic64_sub_return(i, v) == 0; 2413 } 2414 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test 2415 #endif 2416 2417 #ifndef arch_atomic64_dec_and_test 2418 static __always_inline bool 2419 arch_atomic64_dec_and_test(atomic64_t *v) 2420 { 2421 return arch_atomic64_dec_return(v) == 0; 2422 } 2423 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test 2424 #endif 2425 2426 #ifndef arch_atomic64_inc_and_test 2427 static __always_inline bool 2428 arch_atomic64_inc_and_test(atomic64_t *v) 2429 { 2430 return arch_atomic64_inc_return(v) == 0; 2431 } 2432 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test 2433 #endif 2434 2435 #ifndef arch_atomic64_add_negative_relaxed 2436 #ifdef arch_atomic64_add_negative 2437 #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative 2438 #define arch_atomic64_add_negative_release arch_atomic64_add_negative 2439 #define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative 2440 #endif /* arch_atomic64_add_negative */ 2441 2442 #ifndef arch_atomic64_add_negative 2443 static __always_inline bool 2444 arch_atomic64_add_negative(s64 i, atomic64_t *v) 2445 { 2446 return arch_atomic64_add_return(i, v) < 0; 2447 } 2448 #define arch_atomic64_add_negative arch_atomic64_add_negative 2449 #endif 2450 2451 #ifndef arch_atomic64_add_negative_acquire 2452 static __always_inline bool 2453 arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) 2454 { 2455 return arch_atomic64_add_return_acquire(i, v) < 0; 2456 } 2457 #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire 2458 #endif 2459 2460 #ifndef arch_atomic64_add_negative_release 2461 static __always_inline bool 2462 arch_atomic64_add_negative_release(s64 i, atomic64_t *v) 2463 { 2464 return arch_atomic64_add_return_release(i, v) < 0; 2465 } 2466 #define arch_atomic64_add_negative_release arch_atomic64_add_negative_release 2467 #endif 2468 2469 #ifndef arch_atomic64_add_negative_relaxed 2470 static __always_inline bool 2471 arch_atomic64_add_negative_relaxed(s64 i, atomic64_t *v) 2472 { 2473 return arch_atomic64_add_return_relaxed(i, v) < 0; 2474 } 2475 #define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed 2476 #endif 2477 2478 #else /* arch_atomic64_add_negative_relaxed */ 2479 2480 #ifndef arch_atomic64_add_negative_acquire 2481 static __always_inline bool 2482 arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v) 2483 { 2484 bool ret = arch_atomic64_add_negative_relaxed(i, v); 2485 __atomic_acquire_fence(); 2486 return ret; 2487 } 2488 #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire 2489 #endif 2490 2491 #ifndef arch_atomic64_add_negative_release 2492 static __always_inline bool 2493 arch_atomic64_add_negative_release(s64 i, atomic64_t *v) 2494 { 2495 __atomic_release_fence(); 2496 return arch_atomic64_add_negative_relaxed(i, v); 2497 } 2498 #define arch_atomic64_add_negative_release arch_atomic64_add_negative_release 2499 #endif 2500 2501 #ifndef arch_atomic64_add_negative 2502 static __always_inline bool 2503 arch_atomic64_add_negative(s64 i, atomic64_t *v) 2504 { 2505 bool ret; 2506 __atomic_pre_full_fence(); 2507 ret = arch_atomic64_add_negative_relaxed(i, v); 2508 __atomic_post_full_fence(); 2509 return ret; 2510 } 2511 #define arch_atomic64_add_negative arch_atomic64_add_negative 2512 #endif 2513 2514 #endif /* arch_atomic64_add_negative_relaxed */ 2515 2516 #ifndef arch_atomic64_fetch_add_unless 2517 static __always_inline s64 2518 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) 2519 { 2520 s64 c = arch_atomic64_read(v); 2521 2522 do { 2523 if (unlikely(c == u)) 2524 break; 2525 } while (!arch_atomic64_try_cmpxchg(v, &c, c + a)); 2526 2527 return c; 2528 } 2529 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless 2530 #endif 2531 2532 #ifndef arch_atomic64_add_unless 2533 static __always_inline bool 2534 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u) 2535 { 2536 return arch_atomic64_fetch_add_unless(v, a, u) != u; 2537 } 2538 #define arch_atomic64_add_unless arch_atomic64_add_unless 2539 #endif 2540 2541 #ifndef arch_atomic64_inc_not_zero 2542 static __always_inline bool 2543 arch_atomic64_inc_not_zero(atomic64_t *v) 2544 { 2545 return arch_atomic64_add_unless(v, 1, 0); 2546 } 2547 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero 2548 #endif 2549 2550 #ifndef arch_atomic64_inc_unless_negative 2551 static __always_inline bool 2552 arch_atomic64_inc_unless_negative(atomic64_t *v) 2553 { 2554 s64 c = arch_atomic64_read(v); 2555 2556 do { 2557 if (unlikely(c < 0)) 2558 return false; 2559 } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1)); 2560 2561 return true; 2562 } 2563 #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative 2564 #endif 2565 2566 #ifndef arch_atomic64_dec_unless_positive 2567 static __always_inline bool 2568 arch_atomic64_dec_unless_positive(atomic64_t *v) 2569 { 2570 s64 c = arch_atomic64_read(v); 2571 2572 do { 2573 if (unlikely(c > 0)) 2574 return false; 2575 } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1)); 2576 2577 return true; 2578 } 2579 #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive 2580 #endif 2581 2582 #ifndef arch_atomic64_dec_if_positive 2583 static __always_inline s64 2584 arch_atomic64_dec_if_positive(atomic64_t *v) 2585 { 2586 s64 dec, c = arch_atomic64_read(v); 2587 2588 do { 2589 dec = c - 1; 2590 if (unlikely(dec < 0)) 2591 break; 2592 } while (!arch_atomic64_try_cmpxchg(v, &c, dec)); 2593 2594 return dec; 2595 } 2596 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive 2597 #endif 2598 2599 #endif /* _LINUX_ATOMIC_FALLBACK_H */ 2600 // 9f0fd6ed53267c6ec64e36cd18e6fd8df57ea277 2601