1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* Atomic operations usable in machine independent code */ 3 #ifndef _LINUX_ATOMIC_H 4 #define _LINUX_ATOMIC_H 5 #include <linux/types.h> 6 7 #include <asm/atomic.h> 8 #include <asm/barrier.h> 9 10 /* 11 * Relaxed variants of xchg, cmpxchg and some atomic operations. 12 * 13 * We support four variants: 14 * 15 * - Fully ordered: The default implementation, no suffix required. 16 * - Acquire: Provides ACQUIRE semantics, _acquire suffix. 17 * - Release: Provides RELEASE semantics, _release suffix. 18 * - Relaxed: No ordering guarantees, _relaxed suffix. 19 * 20 * For compound atomics performing both a load and a store, ACQUIRE 21 * semantics apply only to the load and RELEASE semantics only to the 22 * store portion of the operation. Note that a failed cmpxchg_acquire 23 * does -not- imply any memory ordering constraints. 24 * 25 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions. 26 */ 27 28 #ifndef atomic_read_acquire 29 #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter) 30 #endif 31 32 #ifndef atomic_set_release 33 #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i)) 34 #endif 35 36 /* 37 * The idea here is to build acquire/release variants by adding explicit 38 * barriers on top of the relaxed variant. In the case where the relaxed 39 * variant is already fully ordered, no additional barriers are needed. 40 * 41 * Besides, if an arch has a special barrier for acquire/release, it could 42 * implement its own __atomic_op_* and use the same framework for building 43 * variants 44 * 45 * If an architecture overrides __atomic_op_acquire() it will probably want 46 * to define smp_mb__after_spinlock(). 47 */ 48 #ifndef __atomic_op_acquire 49 #define __atomic_op_acquire(op, args...) \ 50 ({ \ 51 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \ 52 smp_mb__after_atomic(); \ 53 __ret; \ 54 }) 55 #endif 56 57 #ifndef __atomic_op_release 58 #define __atomic_op_release(op, args...) \ 59 ({ \ 60 smp_mb__before_atomic(); \ 61 op##_relaxed(args); \ 62 }) 63 #endif 64 65 #ifndef __atomic_op_fence 66 #define __atomic_op_fence(op, args...) \ 67 ({ \ 68 typeof(op##_relaxed(args)) __ret; \ 69 smp_mb__before_atomic(); \ 70 __ret = op##_relaxed(args); \ 71 smp_mb__after_atomic(); \ 72 __ret; \ 73 }) 74 #endif 75 76 /* atomic_add_return_relaxed */ 77 #ifndef atomic_add_return_relaxed 78 #define atomic_add_return_relaxed atomic_add_return 79 #define atomic_add_return_acquire atomic_add_return 80 #define atomic_add_return_release atomic_add_return 81 82 #else /* atomic_add_return_relaxed */ 83 84 #ifndef atomic_add_return_acquire 85 #define atomic_add_return_acquire(...) \ 86 __atomic_op_acquire(atomic_add_return, __VA_ARGS__) 87 #endif 88 89 #ifndef atomic_add_return_release 90 #define atomic_add_return_release(...) \ 91 __atomic_op_release(atomic_add_return, __VA_ARGS__) 92 #endif 93 94 #ifndef atomic_add_return 95 #define atomic_add_return(...) \ 96 __atomic_op_fence(atomic_add_return, __VA_ARGS__) 97 #endif 98 #endif /* atomic_add_return_relaxed */ 99 100 /* atomic_inc_return_relaxed */ 101 #ifndef atomic_inc_return_relaxed 102 #define atomic_inc_return_relaxed atomic_inc_return 103 #define atomic_inc_return_acquire atomic_inc_return 104 #define atomic_inc_return_release atomic_inc_return 105 106 #else /* atomic_inc_return_relaxed */ 107 108 #ifndef atomic_inc_return_acquire 109 #define atomic_inc_return_acquire(...) \ 110 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__) 111 #endif 112 113 #ifndef atomic_inc_return_release 114 #define atomic_inc_return_release(...) \ 115 __atomic_op_release(atomic_inc_return, __VA_ARGS__) 116 #endif 117 118 #ifndef atomic_inc_return 119 #define atomic_inc_return(...) \ 120 __atomic_op_fence(atomic_inc_return, __VA_ARGS__) 121 #endif 122 #endif /* atomic_inc_return_relaxed */ 123 124 /* atomic_sub_return_relaxed */ 125 #ifndef atomic_sub_return_relaxed 126 #define atomic_sub_return_relaxed atomic_sub_return 127 #define atomic_sub_return_acquire atomic_sub_return 128 #define atomic_sub_return_release atomic_sub_return 129 130 #else /* atomic_sub_return_relaxed */ 131 132 #ifndef atomic_sub_return_acquire 133 #define atomic_sub_return_acquire(...) \ 134 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__) 135 #endif 136 137 #ifndef atomic_sub_return_release 138 #define atomic_sub_return_release(...) \ 139 __atomic_op_release(atomic_sub_return, __VA_ARGS__) 140 #endif 141 142 #ifndef atomic_sub_return 143 #define atomic_sub_return(...) \ 144 __atomic_op_fence(atomic_sub_return, __VA_ARGS__) 145 #endif 146 #endif /* atomic_sub_return_relaxed */ 147 148 /* atomic_dec_return_relaxed */ 149 #ifndef atomic_dec_return_relaxed 150 #define atomic_dec_return_relaxed atomic_dec_return 151 #define atomic_dec_return_acquire atomic_dec_return 152 #define atomic_dec_return_release atomic_dec_return 153 154 #else /* atomic_dec_return_relaxed */ 155 156 #ifndef atomic_dec_return_acquire 157 #define atomic_dec_return_acquire(...) \ 158 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__) 159 #endif 160 161 #ifndef atomic_dec_return_release 162 #define atomic_dec_return_release(...) \ 163 __atomic_op_release(atomic_dec_return, __VA_ARGS__) 164 #endif 165 166 #ifndef atomic_dec_return 167 #define atomic_dec_return(...) \ 168 __atomic_op_fence(atomic_dec_return, __VA_ARGS__) 169 #endif 170 #endif /* atomic_dec_return_relaxed */ 171 172 173 /* atomic_fetch_add_relaxed */ 174 #ifndef atomic_fetch_add_relaxed 175 #define atomic_fetch_add_relaxed atomic_fetch_add 176 #define atomic_fetch_add_acquire atomic_fetch_add 177 #define atomic_fetch_add_release atomic_fetch_add 178 179 #else /* atomic_fetch_add_relaxed */ 180 181 #ifndef atomic_fetch_add_acquire 182 #define atomic_fetch_add_acquire(...) \ 183 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__) 184 #endif 185 186 #ifndef atomic_fetch_add_release 187 #define atomic_fetch_add_release(...) \ 188 __atomic_op_release(atomic_fetch_add, __VA_ARGS__) 189 #endif 190 191 #ifndef atomic_fetch_add 192 #define atomic_fetch_add(...) \ 193 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__) 194 #endif 195 #endif /* atomic_fetch_add_relaxed */ 196 197 /* atomic_fetch_inc_relaxed */ 198 #ifndef atomic_fetch_inc_relaxed 199 200 #ifndef atomic_fetch_inc 201 #define atomic_fetch_inc(v) atomic_fetch_add(1, (v)) 202 #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v)) 203 #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v)) 204 #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v)) 205 #else /* atomic_fetch_inc */ 206 #define atomic_fetch_inc_relaxed atomic_fetch_inc 207 #define atomic_fetch_inc_acquire atomic_fetch_inc 208 #define atomic_fetch_inc_release atomic_fetch_inc 209 #endif /* atomic_fetch_inc */ 210 211 #else /* atomic_fetch_inc_relaxed */ 212 213 #ifndef atomic_fetch_inc_acquire 214 #define atomic_fetch_inc_acquire(...) \ 215 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__) 216 #endif 217 218 #ifndef atomic_fetch_inc_release 219 #define atomic_fetch_inc_release(...) \ 220 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__) 221 #endif 222 223 #ifndef atomic_fetch_inc 224 #define atomic_fetch_inc(...) \ 225 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__) 226 #endif 227 #endif /* atomic_fetch_inc_relaxed */ 228 229 /* atomic_fetch_sub_relaxed */ 230 #ifndef atomic_fetch_sub_relaxed 231 #define atomic_fetch_sub_relaxed atomic_fetch_sub 232 #define atomic_fetch_sub_acquire atomic_fetch_sub 233 #define atomic_fetch_sub_release atomic_fetch_sub 234 235 #else /* atomic_fetch_sub_relaxed */ 236 237 #ifndef atomic_fetch_sub_acquire 238 #define atomic_fetch_sub_acquire(...) \ 239 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__) 240 #endif 241 242 #ifndef atomic_fetch_sub_release 243 #define atomic_fetch_sub_release(...) \ 244 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__) 245 #endif 246 247 #ifndef atomic_fetch_sub 248 #define atomic_fetch_sub(...) \ 249 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__) 250 #endif 251 #endif /* atomic_fetch_sub_relaxed */ 252 253 /* atomic_fetch_dec_relaxed */ 254 #ifndef atomic_fetch_dec_relaxed 255 256 #ifndef atomic_fetch_dec 257 #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v)) 258 #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v)) 259 #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v)) 260 #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v)) 261 #else /* atomic_fetch_dec */ 262 #define atomic_fetch_dec_relaxed atomic_fetch_dec 263 #define atomic_fetch_dec_acquire atomic_fetch_dec 264 #define atomic_fetch_dec_release atomic_fetch_dec 265 #endif /* atomic_fetch_dec */ 266 267 #else /* atomic_fetch_dec_relaxed */ 268 269 #ifndef atomic_fetch_dec_acquire 270 #define atomic_fetch_dec_acquire(...) \ 271 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__) 272 #endif 273 274 #ifndef atomic_fetch_dec_release 275 #define atomic_fetch_dec_release(...) \ 276 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__) 277 #endif 278 279 #ifndef atomic_fetch_dec 280 #define atomic_fetch_dec(...) \ 281 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__) 282 #endif 283 #endif /* atomic_fetch_dec_relaxed */ 284 285 /* atomic_fetch_or_relaxed */ 286 #ifndef atomic_fetch_or_relaxed 287 #define atomic_fetch_or_relaxed atomic_fetch_or 288 #define atomic_fetch_or_acquire atomic_fetch_or 289 #define atomic_fetch_or_release atomic_fetch_or 290 291 #else /* atomic_fetch_or_relaxed */ 292 293 #ifndef atomic_fetch_or_acquire 294 #define atomic_fetch_or_acquire(...) \ 295 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__) 296 #endif 297 298 #ifndef atomic_fetch_or_release 299 #define atomic_fetch_or_release(...) \ 300 __atomic_op_release(atomic_fetch_or, __VA_ARGS__) 301 #endif 302 303 #ifndef atomic_fetch_or 304 #define atomic_fetch_or(...) \ 305 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__) 306 #endif 307 #endif /* atomic_fetch_or_relaxed */ 308 309 /* atomic_fetch_and_relaxed */ 310 #ifndef atomic_fetch_and_relaxed 311 #define atomic_fetch_and_relaxed atomic_fetch_and 312 #define atomic_fetch_and_acquire atomic_fetch_and 313 #define atomic_fetch_and_release atomic_fetch_and 314 315 #else /* atomic_fetch_and_relaxed */ 316 317 #ifndef atomic_fetch_and_acquire 318 #define atomic_fetch_and_acquire(...) \ 319 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__) 320 #endif 321 322 #ifndef atomic_fetch_and_release 323 #define atomic_fetch_and_release(...) \ 324 __atomic_op_release(atomic_fetch_and, __VA_ARGS__) 325 #endif 326 327 #ifndef atomic_fetch_and 328 #define atomic_fetch_and(...) \ 329 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__) 330 #endif 331 #endif /* atomic_fetch_and_relaxed */ 332 333 #ifdef atomic_andnot 334 /* atomic_fetch_andnot_relaxed */ 335 #ifndef atomic_fetch_andnot_relaxed 336 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot 337 #define atomic_fetch_andnot_acquire atomic_fetch_andnot 338 #define atomic_fetch_andnot_release atomic_fetch_andnot 339 340 #else /* atomic_fetch_andnot_relaxed */ 341 342 #ifndef atomic_fetch_andnot_acquire 343 #define atomic_fetch_andnot_acquire(...) \ 344 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__) 345 #endif 346 347 #ifndef atomic_fetch_andnot_release 348 #define atomic_fetch_andnot_release(...) \ 349 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__) 350 #endif 351 352 #ifndef atomic_fetch_andnot 353 #define atomic_fetch_andnot(...) \ 354 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__) 355 #endif 356 #endif /* atomic_fetch_andnot_relaxed */ 357 #endif /* atomic_andnot */ 358 359 /* atomic_fetch_xor_relaxed */ 360 #ifndef atomic_fetch_xor_relaxed 361 #define atomic_fetch_xor_relaxed atomic_fetch_xor 362 #define atomic_fetch_xor_acquire atomic_fetch_xor 363 #define atomic_fetch_xor_release atomic_fetch_xor 364 365 #else /* atomic_fetch_xor_relaxed */ 366 367 #ifndef atomic_fetch_xor_acquire 368 #define atomic_fetch_xor_acquire(...) \ 369 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__) 370 #endif 371 372 #ifndef atomic_fetch_xor_release 373 #define atomic_fetch_xor_release(...) \ 374 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__) 375 #endif 376 377 #ifndef atomic_fetch_xor 378 #define atomic_fetch_xor(...) \ 379 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__) 380 #endif 381 #endif /* atomic_fetch_xor_relaxed */ 382 383 384 /* atomic_xchg_relaxed */ 385 #ifndef atomic_xchg_relaxed 386 #define atomic_xchg_relaxed atomic_xchg 387 #define atomic_xchg_acquire atomic_xchg 388 #define atomic_xchg_release atomic_xchg 389 390 #else /* atomic_xchg_relaxed */ 391 392 #ifndef atomic_xchg_acquire 393 #define atomic_xchg_acquire(...) \ 394 __atomic_op_acquire(atomic_xchg, __VA_ARGS__) 395 #endif 396 397 #ifndef atomic_xchg_release 398 #define atomic_xchg_release(...) \ 399 __atomic_op_release(atomic_xchg, __VA_ARGS__) 400 #endif 401 402 #ifndef atomic_xchg 403 #define atomic_xchg(...) \ 404 __atomic_op_fence(atomic_xchg, __VA_ARGS__) 405 #endif 406 #endif /* atomic_xchg_relaxed */ 407 408 /* atomic_cmpxchg_relaxed */ 409 #ifndef atomic_cmpxchg_relaxed 410 #define atomic_cmpxchg_relaxed atomic_cmpxchg 411 #define atomic_cmpxchg_acquire atomic_cmpxchg 412 #define atomic_cmpxchg_release atomic_cmpxchg 413 414 #else /* atomic_cmpxchg_relaxed */ 415 416 #ifndef atomic_cmpxchg_acquire 417 #define atomic_cmpxchg_acquire(...) \ 418 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__) 419 #endif 420 421 #ifndef atomic_cmpxchg_release 422 #define atomic_cmpxchg_release(...) \ 423 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__) 424 #endif 425 426 #ifndef atomic_cmpxchg 427 #define atomic_cmpxchg(...) \ 428 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__) 429 #endif 430 #endif /* atomic_cmpxchg_relaxed */ 431 432 #ifndef atomic_try_cmpxchg 433 434 #define __atomic_try_cmpxchg(type, _p, _po, _n) \ 435 ({ \ 436 typeof(_po) __po = (_po); \ 437 typeof(*(_po)) __r, __o = *__po; \ 438 __r = atomic_cmpxchg##type((_p), __o, (_n)); \ 439 if (unlikely(__r != __o)) \ 440 *__po = __r; \ 441 likely(__r == __o); \ 442 }) 443 444 #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n) 445 #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n) 446 #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n) 447 #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n) 448 449 #else /* atomic_try_cmpxchg */ 450 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg 451 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg 452 #define atomic_try_cmpxchg_release atomic_try_cmpxchg 453 #endif /* atomic_try_cmpxchg */ 454 455 /* cmpxchg_relaxed */ 456 #ifndef cmpxchg_relaxed 457 #define cmpxchg_relaxed cmpxchg 458 #define cmpxchg_acquire cmpxchg 459 #define cmpxchg_release cmpxchg 460 461 #else /* cmpxchg_relaxed */ 462 463 #ifndef cmpxchg_acquire 464 #define cmpxchg_acquire(...) \ 465 __atomic_op_acquire(cmpxchg, __VA_ARGS__) 466 #endif 467 468 #ifndef cmpxchg_release 469 #define cmpxchg_release(...) \ 470 __atomic_op_release(cmpxchg, __VA_ARGS__) 471 #endif 472 473 #ifndef cmpxchg 474 #define cmpxchg(...) \ 475 __atomic_op_fence(cmpxchg, __VA_ARGS__) 476 #endif 477 #endif /* cmpxchg_relaxed */ 478 479 /* cmpxchg64_relaxed */ 480 #ifndef cmpxchg64_relaxed 481 #define cmpxchg64_relaxed cmpxchg64 482 #define cmpxchg64_acquire cmpxchg64 483 #define cmpxchg64_release cmpxchg64 484 485 #else /* cmpxchg64_relaxed */ 486 487 #ifndef cmpxchg64_acquire 488 #define cmpxchg64_acquire(...) \ 489 __atomic_op_acquire(cmpxchg64, __VA_ARGS__) 490 #endif 491 492 #ifndef cmpxchg64_release 493 #define cmpxchg64_release(...) \ 494 __atomic_op_release(cmpxchg64, __VA_ARGS__) 495 #endif 496 497 #ifndef cmpxchg64 498 #define cmpxchg64(...) \ 499 __atomic_op_fence(cmpxchg64, __VA_ARGS__) 500 #endif 501 #endif /* cmpxchg64_relaxed */ 502 503 /* xchg_relaxed */ 504 #ifndef xchg_relaxed 505 #define xchg_relaxed xchg 506 #define xchg_acquire xchg 507 #define xchg_release xchg 508 509 #else /* xchg_relaxed */ 510 511 #ifndef xchg_acquire 512 #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__) 513 #endif 514 515 #ifndef xchg_release 516 #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__) 517 #endif 518 519 #ifndef xchg 520 #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__) 521 #endif 522 #endif /* xchg_relaxed */ 523 524 /** 525 * atomic_fetch_add_unless - add unless the number is already a given value 526 * @v: pointer of type atomic_t 527 * @a: the amount to add to v... 528 * @u: ...unless v is equal to u. 529 * 530 * Atomically adds @a to @v, if @v was not already @u. 531 * Returns the original value of @v. 532 */ 533 #ifndef atomic_fetch_add_unless 534 static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u) 535 { 536 int c = atomic_read(v); 537 538 do { 539 if (unlikely(c == u)) 540 break; 541 } while (!atomic_try_cmpxchg(v, &c, c + a)); 542 543 return c; 544 } 545 #endif 546 547 /** 548 * atomic_add_unless - add unless the number is already a given value 549 * @v: pointer of type atomic_t 550 * @a: the amount to add to v... 551 * @u: ...unless v is equal to u. 552 * 553 * Atomically adds @a to @v, if @v was not already @u. 554 * Returns true if the addition was done. 555 */ 556 static inline bool atomic_add_unless(atomic_t *v, int a, int u) 557 { 558 return atomic_fetch_add_unless(v, a, u) != u; 559 } 560 561 /** 562 * atomic_inc_not_zero - increment unless the number is zero 563 * @v: pointer of type atomic_t 564 * 565 * Atomically increments @v by 1, if @v is non-zero. 566 * Returns true if the increment was done. 567 */ 568 #ifndef atomic_inc_not_zero 569 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 570 #endif 571 572 #ifndef atomic_andnot 573 static inline void atomic_andnot(int i, atomic_t *v) 574 { 575 atomic_and(~i, v); 576 } 577 578 static inline int atomic_fetch_andnot(int i, atomic_t *v) 579 { 580 return atomic_fetch_and(~i, v); 581 } 582 583 static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v) 584 { 585 return atomic_fetch_and_relaxed(~i, v); 586 } 587 588 static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v) 589 { 590 return atomic_fetch_and_acquire(~i, v); 591 } 592 593 static inline int atomic_fetch_andnot_release(int i, atomic_t *v) 594 { 595 return atomic_fetch_and_release(~i, v); 596 } 597 #endif 598 599 #ifndef atomic_inc_unless_negative 600 static inline bool atomic_inc_unless_negative(atomic_t *p) 601 { 602 int v, v1; 603 for (v = 0; v >= 0; v = v1) { 604 v1 = atomic_cmpxchg(p, v, v + 1); 605 if (likely(v1 == v)) 606 return true; 607 } 608 return false; 609 } 610 #endif 611 612 #ifndef atomic_dec_unless_positive 613 static inline bool atomic_dec_unless_positive(atomic_t *p) 614 { 615 int v, v1; 616 for (v = 0; v <= 0; v = v1) { 617 v1 = atomic_cmpxchg(p, v, v - 1); 618 if (likely(v1 == v)) 619 return true; 620 } 621 return false; 622 } 623 #endif 624 625 /* 626 * atomic_dec_if_positive - decrement by 1 if old value positive 627 * @v: pointer of type atomic_t 628 * 629 * The function returns the old value of *v minus 1, even if 630 * the atomic variable, v, was not decremented. 631 */ 632 #ifndef atomic_dec_if_positive 633 static inline int atomic_dec_if_positive(atomic_t *v) 634 { 635 int c, old, dec; 636 c = atomic_read(v); 637 for (;;) { 638 dec = c - 1; 639 if (unlikely(dec < 0)) 640 break; 641 old = atomic_cmpxchg((v), c, dec); 642 if (likely(old == c)) 643 break; 644 c = old; 645 } 646 return dec; 647 } 648 #endif 649 650 #define atomic_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c)) 651 #define atomic_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c)) 652 653 #ifdef CONFIG_GENERIC_ATOMIC64 654 #include <asm-generic/atomic64.h> 655 #endif 656 657 #ifndef atomic64_read_acquire 658 #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter) 659 #endif 660 661 #ifndef atomic64_set_release 662 #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i)) 663 #endif 664 665 /* atomic64_add_return_relaxed */ 666 #ifndef atomic64_add_return_relaxed 667 #define atomic64_add_return_relaxed atomic64_add_return 668 #define atomic64_add_return_acquire atomic64_add_return 669 #define atomic64_add_return_release atomic64_add_return 670 671 #else /* atomic64_add_return_relaxed */ 672 673 #ifndef atomic64_add_return_acquire 674 #define atomic64_add_return_acquire(...) \ 675 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__) 676 #endif 677 678 #ifndef atomic64_add_return_release 679 #define atomic64_add_return_release(...) \ 680 __atomic_op_release(atomic64_add_return, __VA_ARGS__) 681 #endif 682 683 #ifndef atomic64_add_return 684 #define atomic64_add_return(...) \ 685 __atomic_op_fence(atomic64_add_return, __VA_ARGS__) 686 #endif 687 #endif /* atomic64_add_return_relaxed */ 688 689 /* atomic64_inc_return_relaxed */ 690 #ifndef atomic64_inc_return_relaxed 691 #define atomic64_inc_return_relaxed atomic64_inc_return 692 #define atomic64_inc_return_acquire atomic64_inc_return 693 #define atomic64_inc_return_release atomic64_inc_return 694 695 #else /* atomic64_inc_return_relaxed */ 696 697 #ifndef atomic64_inc_return_acquire 698 #define atomic64_inc_return_acquire(...) \ 699 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__) 700 #endif 701 702 #ifndef atomic64_inc_return_release 703 #define atomic64_inc_return_release(...) \ 704 __atomic_op_release(atomic64_inc_return, __VA_ARGS__) 705 #endif 706 707 #ifndef atomic64_inc_return 708 #define atomic64_inc_return(...) \ 709 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__) 710 #endif 711 #endif /* atomic64_inc_return_relaxed */ 712 713 714 /* atomic64_sub_return_relaxed */ 715 #ifndef atomic64_sub_return_relaxed 716 #define atomic64_sub_return_relaxed atomic64_sub_return 717 #define atomic64_sub_return_acquire atomic64_sub_return 718 #define atomic64_sub_return_release atomic64_sub_return 719 720 #else /* atomic64_sub_return_relaxed */ 721 722 #ifndef atomic64_sub_return_acquire 723 #define atomic64_sub_return_acquire(...) \ 724 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__) 725 #endif 726 727 #ifndef atomic64_sub_return_release 728 #define atomic64_sub_return_release(...) \ 729 __atomic_op_release(atomic64_sub_return, __VA_ARGS__) 730 #endif 731 732 #ifndef atomic64_sub_return 733 #define atomic64_sub_return(...) \ 734 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__) 735 #endif 736 #endif /* atomic64_sub_return_relaxed */ 737 738 /* atomic64_dec_return_relaxed */ 739 #ifndef atomic64_dec_return_relaxed 740 #define atomic64_dec_return_relaxed atomic64_dec_return 741 #define atomic64_dec_return_acquire atomic64_dec_return 742 #define atomic64_dec_return_release atomic64_dec_return 743 744 #else /* atomic64_dec_return_relaxed */ 745 746 #ifndef atomic64_dec_return_acquire 747 #define atomic64_dec_return_acquire(...) \ 748 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__) 749 #endif 750 751 #ifndef atomic64_dec_return_release 752 #define atomic64_dec_return_release(...) \ 753 __atomic_op_release(atomic64_dec_return, __VA_ARGS__) 754 #endif 755 756 #ifndef atomic64_dec_return 757 #define atomic64_dec_return(...) \ 758 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__) 759 #endif 760 #endif /* atomic64_dec_return_relaxed */ 761 762 763 /* atomic64_fetch_add_relaxed */ 764 #ifndef atomic64_fetch_add_relaxed 765 #define atomic64_fetch_add_relaxed atomic64_fetch_add 766 #define atomic64_fetch_add_acquire atomic64_fetch_add 767 #define atomic64_fetch_add_release atomic64_fetch_add 768 769 #else /* atomic64_fetch_add_relaxed */ 770 771 #ifndef atomic64_fetch_add_acquire 772 #define atomic64_fetch_add_acquire(...) \ 773 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__) 774 #endif 775 776 #ifndef atomic64_fetch_add_release 777 #define atomic64_fetch_add_release(...) \ 778 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__) 779 #endif 780 781 #ifndef atomic64_fetch_add 782 #define atomic64_fetch_add(...) \ 783 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__) 784 #endif 785 #endif /* atomic64_fetch_add_relaxed */ 786 787 /* atomic64_fetch_inc_relaxed */ 788 #ifndef atomic64_fetch_inc_relaxed 789 790 #ifndef atomic64_fetch_inc 791 #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v)) 792 #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v)) 793 #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v)) 794 #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v)) 795 #else /* atomic64_fetch_inc */ 796 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc 797 #define atomic64_fetch_inc_acquire atomic64_fetch_inc 798 #define atomic64_fetch_inc_release atomic64_fetch_inc 799 #endif /* atomic64_fetch_inc */ 800 801 #else /* atomic64_fetch_inc_relaxed */ 802 803 #ifndef atomic64_fetch_inc_acquire 804 #define atomic64_fetch_inc_acquire(...) \ 805 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__) 806 #endif 807 808 #ifndef atomic64_fetch_inc_release 809 #define atomic64_fetch_inc_release(...) \ 810 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__) 811 #endif 812 813 #ifndef atomic64_fetch_inc 814 #define atomic64_fetch_inc(...) \ 815 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__) 816 #endif 817 #endif /* atomic64_fetch_inc_relaxed */ 818 819 /* atomic64_fetch_sub_relaxed */ 820 #ifndef atomic64_fetch_sub_relaxed 821 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub 822 #define atomic64_fetch_sub_acquire atomic64_fetch_sub 823 #define atomic64_fetch_sub_release atomic64_fetch_sub 824 825 #else /* atomic64_fetch_sub_relaxed */ 826 827 #ifndef atomic64_fetch_sub_acquire 828 #define atomic64_fetch_sub_acquire(...) \ 829 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__) 830 #endif 831 832 #ifndef atomic64_fetch_sub_release 833 #define atomic64_fetch_sub_release(...) \ 834 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__) 835 #endif 836 837 #ifndef atomic64_fetch_sub 838 #define atomic64_fetch_sub(...) \ 839 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__) 840 #endif 841 #endif /* atomic64_fetch_sub_relaxed */ 842 843 /* atomic64_fetch_dec_relaxed */ 844 #ifndef atomic64_fetch_dec_relaxed 845 846 #ifndef atomic64_fetch_dec 847 #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v)) 848 #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v)) 849 #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v)) 850 #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v)) 851 #else /* atomic64_fetch_dec */ 852 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec 853 #define atomic64_fetch_dec_acquire atomic64_fetch_dec 854 #define atomic64_fetch_dec_release atomic64_fetch_dec 855 #endif /* atomic64_fetch_dec */ 856 857 #else /* atomic64_fetch_dec_relaxed */ 858 859 #ifndef atomic64_fetch_dec_acquire 860 #define atomic64_fetch_dec_acquire(...) \ 861 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__) 862 #endif 863 864 #ifndef atomic64_fetch_dec_release 865 #define atomic64_fetch_dec_release(...) \ 866 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__) 867 #endif 868 869 #ifndef atomic64_fetch_dec 870 #define atomic64_fetch_dec(...) \ 871 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__) 872 #endif 873 #endif /* atomic64_fetch_dec_relaxed */ 874 875 /* atomic64_fetch_or_relaxed */ 876 #ifndef atomic64_fetch_or_relaxed 877 #define atomic64_fetch_or_relaxed atomic64_fetch_or 878 #define atomic64_fetch_or_acquire atomic64_fetch_or 879 #define atomic64_fetch_or_release atomic64_fetch_or 880 881 #else /* atomic64_fetch_or_relaxed */ 882 883 #ifndef atomic64_fetch_or_acquire 884 #define atomic64_fetch_or_acquire(...) \ 885 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__) 886 #endif 887 888 #ifndef atomic64_fetch_or_release 889 #define atomic64_fetch_or_release(...) \ 890 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__) 891 #endif 892 893 #ifndef atomic64_fetch_or 894 #define atomic64_fetch_or(...) \ 895 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__) 896 #endif 897 #endif /* atomic64_fetch_or_relaxed */ 898 899 /* atomic64_fetch_and_relaxed */ 900 #ifndef atomic64_fetch_and_relaxed 901 #define atomic64_fetch_and_relaxed atomic64_fetch_and 902 #define atomic64_fetch_and_acquire atomic64_fetch_and 903 #define atomic64_fetch_and_release atomic64_fetch_and 904 905 #else /* atomic64_fetch_and_relaxed */ 906 907 #ifndef atomic64_fetch_and_acquire 908 #define atomic64_fetch_and_acquire(...) \ 909 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__) 910 #endif 911 912 #ifndef atomic64_fetch_and_release 913 #define atomic64_fetch_and_release(...) \ 914 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__) 915 #endif 916 917 #ifndef atomic64_fetch_and 918 #define atomic64_fetch_and(...) \ 919 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__) 920 #endif 921 #endif /* atomic64_fetch_and_relaxed */ 922 923 #ifdef atomic64_andnot 924 /* atomic64_fetch_andnot_relaxed */ 925 #ifndef atomic64_fetch_andnot_relaxed 926 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot 927 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot 928 #define atomic64_fetch_andnot_release atomic64_fetch_andnot 929 930 #else /* atomic64_fetch_andnot_relaxed */ 931 932 #ifndef atomic64_fetch_andnot_acquire 933 #define atomic64_fetch_andnot_acquire(...) \ 934 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__) 935 #endif 936 937 #ifndef atomic64_fetch_andnot_release 938 #define atomic64_fetch_andnot_release(...) \ 939 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__) 940 #endif 941 942 #ifndef atomic64_fetch_andnot 943 #define atomic64_fetch_andnot(...) \ 944 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__) 945 #endif 946 #endif /* atomic64_fetch_andnot_relaxed */ 947 #endif /* atomic64_andnot */ 948 949 /* atomic64_fetch_xor_relaxed */ 950 #ifndef atomic64_fetch_xor_relaxed 951 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor 952 #define atomic64_fetch_xor_acquire atomic64_fetch_xor 953 #define atomic64_fetch_xor_release atomic64_fetch_xor 954 955 #else /* atomic64_fetch_xor_relaxed */ 956 957 #ifndef atomic64_fetch_xor_acquire 958 #define atomic64_fetch_xor_acquire(...) \ 959 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__) 960 #endif 961 962 #ifndef atomic64_fetch_xor_release 963 #define atomic64_fetch_xor_release(...) \ 964 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__) 965 #endif 966 967 #ifndef atomic64_fetch_xor 968 #define atomic64_fetch_xor(...) \ 969 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__) 970 #endif 971 #endif /* atomic64_fetch_xor_relaxed */ 972 973 974 /* atomic64_xchg_relaxed */ 975 #ifndef atomic64_xchg_relaxed 976 #define atomic64_xchg_relaxed atomic64_xchg 977 #define atomic64_xchg_acquire atomic64_xchg 978 #define atomic64_xchg_release atomic64_xchg 979 980 #else /* atomic64_xchg_relaxed */ 981 982 #ifndef atomic64_xchg_acquire 983 #define atomic64_xchg_acquire(...) \ 984 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__) 985 #endif 986 987 #ifndef atomic64_xchg_release 988 #define atomic64_xchg_release(...) \ 989 __atomic_op_release(atomic64_xchg, __VA_ARGS__) 990 #endif 991 992 #ifndef atomic64_xchg 993 #define atomic64_xchg(...) \ 994 __atomic_op_fence(atomic64_xchg, __VA_ARGS__) 995 #endif 996 #endif /* atomic64_xchg_relaxed */ 997 998 /* atomic64_cmpxchg_relaxed */ 999 #ifndef atomic64_cmpxchg_relaxed 1000 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg 1001 #define atomic64_cmpxchg_acquire atomic64_cmpxchg 1002 #define atomic64_cmpxchg_release atomic64_cmpxchg 1003 1004 #else /* atomic64_cmpxchg_relaxed */ 1005 1006 #ifndef atomic64_cmpxchg_acquire 1007 #define atomic64_cmpxchg_acquire(...) \ 1008 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__) 1009 #endif 1010 1011 #ifndef atomic64_cmpxchg_release 1012 #define atomic64_cmpxchg_release(...) \ 1013 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__) 1014 #endif 1015 1016 #ifndef atomic64_cmpxchg 1017 #define atomic64_cmpxchg(...) \ 1018 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__) 1019 #endif 1020 #endif /* atomic64_cmpxchg_relaxed */ 1021 1022 #ifndef atomic64_try_cmpxchg 1023 1024 #define __atomic64_try_cmpxchg(type, _p, _po, _n) \ 1025 ({ \ 1026 typeof(_po) __po = (_po); \ 1027 typeof(*(_po)) __r, __o = *__po; \ 1028 __r = atomic64_cmpxchg##type((_p), __o, (_n)); \ 1029 if (unlikely(__r != __o)) \ 1030 *__po = __r; \ 1031 likely(__r == __o); \ 1032 }) 1033 1034 #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n) 1035 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n) 1036 #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n) 1037 #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n) 1038 1039 #else /* atomic64_try_cmpxchg */ 1040 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg 1041 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg 1042 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg 1043 #endif /* atomic64_try_cmpxchg */ 1044 1045 /** 1046 * atomic64_inc_not_zero - increment unless the number is zero 1047 * @v: pointer of type atomic64_t 1048 * 1049 * Atomically increments @v by 1, if @v is non-zero. 1050 * Returns true if the increment was done. 1051 */ 1052 #ifndef atomic64_inc_not_zero 1053 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) 1054 #endif 1055 1056 #ifndef atomic64_andnot 1057 static inline void atomic64_andnot(long long i, atomic64_t *v) 1058 { 1059 atomic64_and(~i, v); 1060 } 1061 1062 static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v) 1063 { 1064 return atomic64_fetch_and(~i, v); 1065 } 1066 1067 static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v) 1068 { 1069 return atomic64_fetch_and_relaxed(~i, v); 1070 } 1071 1072 static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v) 1073 { 1074 return atomic64_fetch_and_acquire(~i, v); 1075 } 1076 1077 static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v) 1078 { 1079 return atomic64_fetch_and_release(~i, v); 1080 } 1081 #endif 1082 1083 #define atomic64_cond_read_relaxed(v, c) smp_cond_load_relaxed(&(v)->counter, (c)) 1084 #define atomic64_cond_read_acquire(v, c) smp_cond_load_acquire(&(v)->counter, (c)) 1085 1086 #include <asm-generic/atomic-long.h> 1087 1088 #endif /* _LINUX_ATOMIC_H */ 1089