1 // SPDX-License-Identifier: GPL-2.0 2 3 // Generated by scripts/atomic/gen-atomic-long.sh 4 // DO NOT MODIFY THIS FILE DIRECTLY 5 6 #ifndef _LINUX_ATOMIC_LONG_H 7 #define _LINUX_ATOMIC_LONG_H 8 9 #include <linux/compiler.h> 10 #include <asm/types.h> 11 12 #ifdef CONFIG_64BIT 13 typedef atomic64_t atomic_long_t; 14 #define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i) 15 #define atomic_long_cond_read_acquire atomic64_cond_read_acquire 16 #define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed 17 #else 18 typedef atomic_t atomic_long_t; 19 #define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i) 20 #define atomic_long_cond_read_acquire atomic_cond_read_acquire 21 #define atomic_long_cond_read_relaxed atomic_cond_read_relaxed 22 #endif 23 24 #ifdef CONFIG_64BIT 25 26 static __always_inline long 27 raw_atomic_long_read(const atomic_long_t *v) 28 { 29 return raw_atomic64_read(v); 30 } 31 32 static __always_inline long 33 raw_atomic_long_read_acquire(const atomic_long_t *v) 34 { 35 return raw_atomic64_read_acquire(v); 36 } 37 38 static __always_inline void 39 raw_atomic_long_set(atomic_long_t *v, long i) 40 { 41 raw_atomic64_set(v, i); 42 } 43 44 static __always_inline void 45 raw_atomic_long_set_release(atomic_long_t *v, long i) 46 { 47 raw_atomic64_set_release(v, i); 48 } 49 50 static __always_inline void 51 raw_atomic_long_add(long i, atomic_long_t *v) 52 { 53 raw_atomic64_add(i, v); 54 } 55 56 static __always_inline long 57 raw_atomic_long_add_return(long i, atomic_long_t *v) 58 { 59 return raw_atomic64_add_return(i, v); 60 } 61 62 static __always_inline long 63 raw_atomic_long_add_return_acquire(long i, atomic_long_t *v) 64 { 65 return raw_atomic64_add_return_acquire(i, v); 66 } 67 68 static __always_inline long 69 raw_atomic_long_add_return_release(long i, atomic_long_t *v) 70 { 71 return raw_atomic64_add_return_release(i, v); 72 } 73 74 static __always_inline long 75 raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v) 76 { 77 return raw_atomic64_add_return_relaxed(i, v); 78 } 79 80 static __always_inline long 81 raw_atomic_long_fetch_add(long i, atomic_long_t *v) 82 { 83 return raw_atomic64_fetch_add(i, v); 84 } 85 86 static __always_inline long 87 raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 88 { 89 return raw_atomic64_fetch_add_acquire(i, v); 90 } 91 92 static __always_inline long 93 raw_atomic_long_fetch_add_release(long i, atomic_long_t *v) 94 { 95 return raw_atomic64_fetch_add_release(i, v); 96 } 97 98 static __always_inline long 99 raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 100 { 101 return raw_atomic64_fetch_add_relaxed(i, v); 102 } 103 104 static __always_inline void 105 raw_atomic_long_sub(long i, atomic_long_t *v) 106 { 107 raw_atomic64_sub(i, v); 108 } 109 110 static __always_inline long 111 raw_atomic_long_sub_return(long i, atomic_long_t *v) 112 { 113 return raw_atomic64_sub_return(i, v); 114 } 115 116 static __always_inline long 117 raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v) 118 { 119 return raw_atomic64_sub_return_acquire(i, v); 120 } 121 122 static __always_inline long 123 raw_atomic_long_sub_return_release(long i, atomic_long_t *v) 124 { 125 return raw_atomic64_sub_return_release(i, v); 126 } 127 128 static __always_inline long 129 raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 130 { 131 return raw_atomic64_sub_return_relaxed(i, v); 132 } 133 134 static __always_inline long 135 raw_atomic_long_fetch_sub(long i, atomic_long_t *v) 136 { 137 return raw_atomic64_fetch_sub(i, v); 138 } 139 140 static __always_inline long 141 raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 142 { 143 return raw_atomic64_fetch_sub_acquire(i, v); 144 } 145 146 static __always_inline long 147 raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v) 148 { 149 return raw_atomic64_fetch_sub_release(i, v); 150 } 151 152 static __always_inline long 153 raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 154 { 155 return raw_atomic64_fetch_sub_relaxed(i, v); 156 } 157 158 static __always_inline void 159 raw_atomic_long_inc(atomic_long_t *v) 160 { 161 raw_atomic64_inc(v); 162 } 163 164 static __always_inline long 165 raw_atomic_long_inc_return(atomic_long_t *v) 166 { 167 return raw_atomic64_inc_return(v); 168 } 169 170 static __always_inline long 171 raw_atomic_long_inc_return_acquire(atomic_long_t *v) 172 { 173 return raw_atomic64_inc_return_acquire(v); 174 } 175 176 static __always_inline long 177 raw_atomic_long_inc_return_release(atomic_long_t *v) 178 { 179 return raw_atomic64_inc_return_release(v); 180 } 181 182 static __always_inline long 183 raw_atomic_long_inc_return_relaxed(atomic_long_t *v) 184 { 185 return raw_atomic64_inc_return_relaxed(v); 186 } 187 188 static __always_inline long 189 raw_atomic_long_fetch_inc(atomic_long_t *v) 190 { 191 return raw_atomic64_fetch_inc(v); 192 } 193 194 static __always_inline long 195 raw_atomic_long_fetch_inc_acquire(atomic_long_t *v) 196 { 197 return raw_atomic64_fetch_inc_acquire(v); 198 } 199 200 static __always_inline long 201 raw_atomic_long_fetch_inc_release(atomic_long_t *v) 202 { 203 return raw_atomic64_fetch_inc_release(v); 204 } 205 206 static __always_inline long 207 raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v) 208 { 209 return raw_atomic64_fetch_inc_relaxed(v); 210 } 211 212 static __always_inline void 213 raw_atomic_long_dec(atomic_long_t *v) 214 { 215 raw_atomic64_dec(v); 216 } 217 218 static __always_inline long 219 raw_atomic_long_dec_return(atomic_long_t *v) 220 { 221 return raw_atomic64_dec_return(v); 222 } 223 224 static __always_inline long 225 raw_atomic_long_dec_return_acquire(atomic_long_t *v) 226 { 227 return raw_atomic64_dec_return_acquire(v); 228 } 229 230 static __always_inline long 231 raw_atomic_long_dec_return_release(atomic_long_t *v) 232 { 233 return raw_atomic64_dec_return_release(v); 234 } 235 236 static __always_inline long 237 raw_atomic_long_dec_return_relaxed(atomic_long_t *v) 238 { 239 return raw_atomic64_dec_return_relaxed(v); 240 } 241 242 static __always_inline long 243 raw_atomic_long_fetch_dec(atomic_long_t *v) 244 { 245 return raw_atomic64_fetch_dec(v); 246 } 247 248 static __always_inline long 249 raw_atomic_long_fetch_dec_acquire(atomic_long_t *v) 250 { 251 return raw_atomic64_fetch_dec_acquire(v); 252 } 253 254 static __always_inline long 255 raw_atomic_long_fetch_dec_release(atomic_long_t *v) 256 { 257 return raw_atomic64_fetch_dec_release(v); 258 } 259 260 static __always_inline long 261 raw_atomic_long_fetch_dec_relaxed(atomic_long_t *v) 262 { 263 return raw_atomic64_fetch_dec_relaxed(v); 264 } 265 266 static __always_inline void 267 raw_atomic_long_and(long i, atomic_long_t *v) 268 { 269 raw_atomic64_and(i, v); 270 } 271 272 static __always_inline long 273 raw_atomic_long_fetch_and(long i, atomic_long_t *v) 274 { 275 return raw_atomic64_fetch_and(i, v); 276 } 277 278 static __always_inline long 279 raw_atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 280 { 281 return raw_atomic64_fetch_and_acquire(i, v); 282 } 283 284 static __always_inline long 285 raw_atomic_long_fetch_and_release(long i, atomic_long_t *v) 286 { 287 return raw_atomic64_fetch_and_release(i, v); 288 } 289 290 static __always_inline long 291 raw_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 292 { 293 return raw_atomic64_fetch_and_relaxed(i, v); 294 } 295 296 static __always_inline void 297 raw_atomic_long_andnot(long i, atomic_long_t *v) 298 { 299 raw_atomic64_andnot(i, v); 300 } 301 302 static __always_inline long 303 raw_atomic_long_fetch_andnot(long i, atomic_long_t *v) 304 { 305 return raw_atomic64_fetch_andnot(i, v); 306 } 307 308 static __always_inline long 309 raw_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 310 { 311 return raw_atomic64_fetch_andnot_acquire(i, v); 312 } 313 314 static __always_inline long 315 raw_atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 316 { 317 return raw_atomic64_fetch_andnot_release(i, v); 318 } 319 320 static __always_inline long 321 raw_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 322 { 323 return raw_atomic64_fetch_andnot_relaxed(i, v); 324 } 325 326 static __always_inline void 327 raw_atomic_long_or(long i, atomic_long_t *v) 328 { 329 raw_atomic64_or(i, v); 330 } 331 332 static __always_inline long 333 raw_atomic_long_fetch_or(long i, atomic_long_t *v) 334 { 335 return raw_atomic64_fetch_or(i, v); 336 } 337 338 static __always_inline long 339 raw_atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 340 { 341 return raw_atomic64_fetch_or_acquire(i, v); 342 } 343 344 static __always_inline long 345 raw_atomic_long_fetch_or_release(long i, atomic_long_t *v) 346 { 347 return raw_atomic64_fetch_or_release(i, v); 348 } 349 350 static __always_inline long 351 raw_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 352 { 353 return raw_atomic64_fetch_or_relaxed(i, v); 354 } 355 356 static __always_inline void 357 raw_atomic_long_xor(long i, atomic_long_t *v) 358 { 359 raw_atomic64_xor(i, v); 360 } 361 362 static __always_inline long 363 raw_atomic_long_fetch_xor(long i, atomic_long_t *v) 364 { 365 return raw_atomic64_fetch_xor(i, v); 366 } 367 368 static __always_inline long 369 raw_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 370 { 371 return raw_atomic64_fetch_xor_acquire(i, v); 372 } 373 374 static __always_inline long 375 raw_atomic_long_fetch_xor_release(long i, atomic_long_t *v) 376 { 377 return raw_atomic64_fetch_xor_release(i, v); 378 } 379 380 static __always_inline long 381 raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 382 { 383 return raw_atomic64_fetch_xor_relaxed(i, v); 384 } 385 386 static __always_inline long 387 raw_atomic_long_xchg(atomic_long_t *v, long i) 388 { 389 return raw_atomic64_xchg(v, i); 390 } 391 392 static __always_inline long 393 raw_atomic_long_xchg_acquire(atomic_long_t *v, long i) 394 { 395 return raw_atomic64_xchg_acquire(v, i); 396 } 397 398 static __always_inline long 399 raw_atomic_long_xchg_release(atomic_long_t *v, long i) 400 { 401 return raw_atomic64_xchg_release(v, i); 402 } 403 404 static __always_inline long 405 raw_atomic_long_xchg_relaxed(atomic_long_t *v, long i) 406 { 407 return raw_atomic64_xchg_relaxed(v, i); 408 } 409 410 static __always_inline long 411 raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 412 { 413 return raw_atomic64_cmpxchg(v, old, new); 414 } 415 416 static __always_inline long 417 raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 418 { 419 return raw_atomic64_cmpxchg_acquire(v, old, new); 420 } 421 422 static __always_inline long 423 raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 424 { 425 return raw_atomic64_cmpxchg_release(v, old, new); 426 } 427 428 static __always_inline long 429 raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 430 { 431 return raw_atomic64_cmpxchg_relaxed(v, old, new); 432 } 433 434 static __always_inline bool 435 raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 436 { 437 return raw_atomic64_try_cmpxchg(v, (s64 *)old, new); 438 } 439 440 static __always_inline bool 441 raw_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 442 { 443 return raw_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new); 444 } 445 446 static __always_inline bool 447 raw_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 448 { 449 return raw_atomic64_try_cmpxchg_release(v, (s64 *)old, new); 450 } 451 452 static __always_inline bool 453 raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 454 { 455 return raw_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new); 456 } 457 458 static __always_inline bool 459 raw_atomic_long_sub_and_test(long i, atomic_long_t *v) 460 { 461 return raw_atomic64_sub_and_test(i, v); 462 } 463 464 static __always_inline bool 465 raw_atomic_long_dec_and_test(atomic_long_t *v) 466 { 467 return raw_atomic64_dec_and_test(v); 468 } 469 470 static __always_inline bool 471 raw_atomic_long_inc_and_test(atomic_long_t *v) 472 { 473 return raw_atomic64_inc_and_test(v); 474 } 475 476 static __always_inline bool 477 raw_atomic_long_add_negative(long i, atomic_long_t *v) 478 { 479 return raw_atomic64_add_negative(i, v); 480 } 481 482 static __always_inline bool 483 raw_atomic_long_add_negative_acquire(long i, atomic_long_t *v) 484 { 485 return raw_atomic64_add_negative_acquire(i, v); 486 } 487 488 static __always_inline bool 489 raw_atomic_long_add_negative_release(long i, atomic_long_t *v) 490 { 491 return raw_atomic64_add_negative_release(i, v); 492 } 493 494 static __always_inline bool 495 raw_atomic_long_add_negative_relaxed(long i, atomic_long_t *v) 496 { 497 return raw_atomic64_add_negative_relaxed(i, v); 498 } 499 500 static __always_inline long 501 raw_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 502 { 503 return raw_atomic64_fetch_add_unless(v, a, u); 504 } 505 506 static __always_inline bool 507 raw_atomic_long_add_unless(atomic_long_t *v, long a, long u) 508 { 509 return raw_atomic64_add_unless(v, a, u); 510 } 511 512 static __always_inline bool 513 raw_atomic_long_inc_not_zero(atomic_long_t *v) 514 { 515 return raw_atomic64_inc_not_zero(v); 516 } 517 518 static __always_inline bool 519 raw_atomic_long_inc_unless_negative(atomic_long_t *v) 520 { 521 return raw_atomic64_inc_unless_negative(v); 522 } 523 524 static __always_inline bool 525 raw_atomic_long_dec_unless_positive(atomic_long_t *v) 526 { 527 return raw_atomic64_dec_unless_positive(v); 528 } 529 530 static __always_inline long 531 raw_atomic_long_dec_if_positive(atomic_long_t *v) 532 { 533 return raw_atomic64_dec_if_positive(v); 534 } 535 536 #else /* CONFIG_64BIT */ 537 538 static __always_inline long 539 raw_atomic_long_read(const atomic_long_t *v) 540 { 541 return raw_atomic_read(v); 542 } 543 544 static __always_inline long 545 raw_atomic_long_read_acquire(const atomic_long_t *v) 546 { 547 return raw_atomic_read_acquire(v); 548 } 549 550 static __always_inline void 551 raw_atomic_long_set(atomic_long_t *v, long i) 552 { 553 raw_atomic_set(v, i); 554 } 555 556 static __always_inline void 557 raw_atomic_long_set_release(atomic_long_t *v, long i) 558 { 559 raw_atomic_set_release(v, i); 560 } 561 562 static __always_inline void 563 raw_atomic_long_add(long i, atomic_long_t *v) 564 { 565 raw_atomic_add(i, v); 566 } 567 568 static __always_inline long 569 raw_atomic_long_add_return(long i, atomic_long_t *v) 570 { 571 return raw_atomic_add_return(i, v); 572 } 573 574 static __always_inline long 575 raw_atomic_long_add_return_acquire(long i, atomic_long_t *v) 576 { 577 return raw_atomic_add_return_acquire(i, v); 578 } 579 580 static __always_inline long 581 raw_atomic_long_add_return_release(long i, atomic_long_t *v) 582 { 583 return raw_atomic_add_return_release(i, v); 584 } 585 586 static __always_inline long 587 raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v) 588 { 589 return raw_atomic_add_return_relaxed(i, v); 590 } 591 592 static __always_inline long 593 raw_atomic_long_fetch_add(long i, atomic_long_t *v) 594 { 595 return raw_atomic_fetch_add(i, v); 596 } 597 598 static __always_inline long 599 raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 600 { 601 return raw_atomic_fetch_add_acquire(i, v); 602 } 603 604 static __always_inline long 605 raw_atomic_long_fetch_add_release(long i, atomic_long_t *v) 606 { 607 return raw_atomic_fetch_add_release(i, v); 608 } 609 610 static __always_inline long 611 raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 612 { 613 return raw_atomic_fetch_add_relaxed(i, v); 614 } 615 616 static __always_inline void 617 raw_atomic_long_sub(long i, atomic_long_t *v) 618 { 619 raw_atomic_sub(i, v); 620 } 621 622 static __always_inline long 623 raw_atomic_long_sub_return(long i, atomic_long_t *v) 624 { 625 return raw_atomic_sub_return(i, v); 626 } 627 628 static __always_inline long 629 raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v) 630 { 631 return raw_atomic_sub_return_acquire(i, v); 632 } 633 634 static __always_inline long 635 raw_atomic_long_sub_return_release(long i, atomic_long_t *v) 636 { 637 return raw_atomic_sub_return_release(i, v); 638 } 639 640 static __always_inline long 641 raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 642 { 643 return raw_atomic_sub_return_relaxed(i, v); 644 } 645 646 static __always_inline long 647 raw_atomic_long_fetch_sub(long i, atomic_long_t *v) 648 { 649 return raw_atomic_fetch_sub(i, v); 650 } 651 652 static __always_inline long 653 raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 654 { 655 return raw_atomic_fetch_sub_acquire(i, v); 656 } 657 658 static __always_inline long 659 raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v) 660 { 661 return raw_atomic_fetch_sub_release(i, v); 662 } 663 664 static __always_inline long 665 raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 666 { 667 return raw_atomic_fetch_sub_relaxed(i, v); 668 } 669 670 static __always_inline void 671 raw_atomic_long_inc(atomic_long_t *v) 672 { 673 raw_atomic_inc(v); 674 } 675 676 static __always_inline long 677 raw_atomic_long_inc_return(atomic_long_t *v) 678 { 679 return raw_atomic_inc_return(v); 680 } 681 682 static __always_inline long 683 raw_atomic_long_inc_return_acquire(atomic_long_t *v) 684 { 685 return raw_atomic_inc_return_acquire(v); 686 } 687 688 static __always_inline long 689 raw_atomic_long_inc_return_release(atomic_long_t *v) 690 { 691 return raw_atomic_inc_return_release(v); 692 } 693 694 static __always_inline long 695 raw_atomic_long_inc_return_relaxed(atomic_long_t *v) 696 { 697 return raw_atomic_inc_return_relaxed(v); 698 } 699 700 static __always_inline long 701 raw_atomic_long_fetch_inc(atomic_long_t *v) 702 { 703 return raw_atomic_fetch_inc(v); 704 } 705 706 static __always_inline long 707 raw_atomic_long_fetch_inc_acquire(atomic_long_t *v) 708 { 709 return raw_atomic_fetch_inc_acquire(v); 710 } 711 712 static __always_inline long 713 raw_atomic_long_fetch_inc_release(atomic_long_t *v) 714 { 715 return raw_atomic_fetch_inc_release(v); 716 } 717 718 static __always_inline long 719 raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v) 720 { 721 return raw_atomic_fetch_inc_relaxed(v); 722 } 723 724 static __always_inline void 725 raw_atomic_long_dec(atomic_long_t *v) 726 { 727 raw_atomic_dec(v); 728 } 729 730 static __always_inline long 731 raw_atomic_long_dec_return(atomic_long_t *v) 732 { 733 return raw_atomic_dec_return(v); 734 } 735 736 static __always_inline long 737 raw_atomic_long_dec_return_acquire(atomic_long_t *v) 738 { 739 return raw_atomic_dec_return_acquire(v); 740 } 741 742 static __always_inline long 743 raw_atomic_long_dec_return_release(atomic_long_t *v) 744 { 745 return raw_atomic_dec_return_release(v); 746 } 747 748 static __always_inline long 749 raw_atomic_long_dec_return_relaxed(atomic_long_t *v) 750 { 751 return raw_atomic_dec_return_relaxed(v); 752 } 753 754 static __always_inline long 755 raw_atomic_long_fetch_dec(atomic_long_t *v) 756 { 757 return raw_atomic_fetch_dec(v); 758 } 759 760 static __always_inline long 761 raw_atomic_long_fetch_dec_acquire(atomic_long_t *v) 762 { 763 return raw_atomic_fetch_dec_acquire(v); 764 } 765 766 static __always_inline long 767 raw_atomic_long_fetch_dec_release(atomic_long_t *v) 768 { 769 return raw_atomic_fetch_dec_release(v); 770 } 771 772 static __always_inline long 773 raw_atomic_long_fetch_dec_relaxed(atomic_long_t *v) 774 { 775 return raw_atomic_fetch_dec_relaxed(v); 776 } 777 778 static __always_inline void 779 raw_atomic_long_and(long i, atomic_long_t *v) 780 { 781 raw_atomic_and(i, v); 782 } 783 784 static __always_inline long 785 raw_atomic_long_fetch_and(long i, atomic_long_t *v) 786 { 787 return raw_atomic_fetch_and(i, v); 788 } 789 790 static __always_inline long 791 raw_atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 792 { 793 return raw_atomic_fetch_and_acquire(i, v); 794 } 795 796 static __always_inline long 797 raw_atomic_long_fetch_and_release(long i, atomic_long_t *v) 798 { 799 return raw_atomic_fetch_and_release(i, v); 800 } 801 802 static __always_inline long 803 raw_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 804 { 805 return raw_atomic_fetch_and_relaxed(i, v); 806 } 807 808 static __always_inline void 809 raw_atomic_long_andnot(long i, atomic_long_t *v) 810 { 811 raw_atomic_andnot(i, v); 812 } 813 814 static __always_inline long 815 raw_atomic_long_fetch_andnot(long i, atomic_long_t *v) 816 { 817 return raw_atomic_fetch_andnot(i, v); 818 } 819 820 static __always_inline long 821 raw_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 822 { 823 return raw_atomic_fetch_andnot_acquire(i, v); 824 } 825 826 static __always_inline long 827 raw_atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 828 { 829 return raw_atomic_fetch_andnot_release(i, v); 830 } 831 832 static __always_inline long 833 raw_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 834 { 835 return raw_atomic_fetch_andnot_relaxed(i, v); 836 } 837 838 static __always_inline void 839 raw_atomic_long_or(long i, atomic_long_t *v) 840 { 841 raw_atomic_or(i, v); 842 } 843 844 static __always_inline long 845 raw_atomic_long_fetch_or(long i, atomic_long_t *v) 846 { 847 return raw_atomic_fetch_or(i, v); 848 } 849 850 static __always_inline long 851 raw_atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 852 { 853 return raw_atomic_fetch_or_acquire(i, v); 854 } 855 856 static __always_inline long 857 raw_atomic_long_fetch_or_release(long i, atomic_long_t *v) 858 { 859 return raw_atomic_fetch_or_release(i, v); 860 } 861 862 static __always_inline long 863 raw_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 864 { 865 return raw_atomic_fetch_or_relaxed(i, v); 866 } 867 868 static __always_inline void 869 raw_atomic_long_xor(long i, atomic_long_t *v) 870 { 871 raw_atomic_xor(i, v); 872 } 873 874 static __always_inline long 875 raw_atomic_long_fetch_xor(long i, atomic_long_t *v) 876 { 877 return raw_atomic_fetch_xor(i, v); 878 } 879 880 static __always_inline long 881 raw_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 882 { 883 return raw_atomic_fetch_xor_acquire(i, v); 884 } 885 886 static __always_inline long 887 raw_atomic_long_fetch_xor_release(long i, atomic_long_t *v) 888 { 889 return raw_atomic_fetch_xor_release(i, v); 890 } 891 892 static __always_inline long 893 raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 894 { 895 return raw_atomic_fetch_xor_relaxed(i, v); 896 } 897 898 static __always_inline long 899 raw_atomic_long_xchg(atomic_long_t *v, long i) 900 { 901 return raw_atomic_xchg(v, i); 902 } 903 904 static __always_inline long 905 raw_atomic_long_xchg_acquire(atomic_long_t *v, long i) 906 { 907 return raw_atomic_xchg_acquire(v, i); 908 } 909 910 static __always_inline long 911 raw_atomic_long_xchg_release(atomic_long_t *v, long i) 912 { 913 return raw_atomic_xchg_release(v, i); 914 } 915 916 static __always_inline long 917 raw_atomic_long_xchg_relaxed(atomic_long_t *v, long i) 918 { 919 return raw_atomic_xchg_relaxed(v, i); 920 } 921 922 static __always_inline long 923 raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 924 { 925 return raw_atomic_cmpxchg(v, old, new); 926 } 927 928 static __always_inline long 929 raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 930 { 931 return raw_atomic_cmpxchg_acquire(v, old, new); 932 } 933 934 static __always_inline long 935 raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 936 { 937 return raw_atomic_cmpxchg_release(v, old, new); 938 } 939 940 static __always_inline long 941 raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 942 { 943 return raw_atomic_cmpxchg_relaxed(v, old, new); 944 } 945 946 static __always_inline bool 947 raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 948 { 949 return raw_atomic_try_cmpxchg(v, (int *)old, new); 950 } 951 952 static __always_inline bool 953 raw_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 954 { 955 return raw_atomic_try_cmpxchg_acquire(v, (int *)old, new); 956 } 957 958 static __always_inline bool 959 raw_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 960 { 961 return raw_atomic_try_cmpxchg_release(v, (int *)old, new); 962 } 963 964 static __always_inline bool 965 raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 966 { 967 return raw_atomic_try_cmpxchg_relaxed(v, (int *)old, new); 968 } 969 970 static __always_inline bool 971 raw_atomic_long_sub_and_test(long i, atomic_long_t *v) 972 { 973 return raw_atomic_sub_and_test(i, v); 974 } 975 976 static __always_inline bool 977 raw_atomic_long_dec_and_test(atomic_long_t *v) 978 { 979 return raw_atomic_dec_and_test(v); 980 } 981 982 static __always_inline bool 983 raw_atomic_long_inc_and_test(atomic_long_t *v) 984 { 985 return raw_atomic_inc_and_test(v); 986 } 987 988 static __always_inline bool 989 raw_atomic_long_add_negative(long i, atomic_long_t *v) 990 { 991 return raw_atomic_add_negative(i, v); 992 } 993 994 static __always_inline bool 995 raw_atomic_long_add_negative_acquire(long i, atomic_long_t *v) 996 { 997 return raw_atomic_add_negative_acquire(i, v); 998 } 999 1000 static __always_inline bool 1001 raw_atomic_long_add_negative_release(long i, atomic_long_t *v) 1002 { 1003 return raw_atomic_add_negative_release(i, v); 1004 } 1005 1006 static __always_inline bool 1007 raw_atomic_long_add_negative_relaxed(long i, atomic_long_t *v) 1008 { 1009 return raw_atomic_add_negative_relaxed(i, v); 1010 } 1011 1012 static __always_inline long 1013 raw_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 1014 { 1015 return raw_atomic_fetch_add_unless(v, a, u); 1016 } 1017 1018 static __always_inline bool 1019 raw_atomic_long_add_unless(atomic_long_t *v, long a, long u) 1020 { 1021 return raw_atomic_add_unless(v, a, u); 1022 } 1023 1024 static __always_inline bool 1025 raw_atomic_long_inc_not_zero(atomic_long_t *v) 1026 { 1027 return raw_atomic_inc_not_zero(v); 1028 } 1029 1030 static __always_inline bool 1031 raw_atomic_long_inc_unless_negative(atomic_long_t *v) 1032 { 1033 return raw_atomic_inc_unless_negative(v); 1034 } 1035 1036 static __always_inline bool 1037 raw_atomic_long_dec_unless_positive(atomic_long_t *v) 1038 { 1039 return raw_atomic_dec_unless_positive(v); 1040 } 1041 1042 static __always_inline long 1043 raw_atomic_long_dec_if_positive(atomic_long_t *v) 1044 { 1045 return raw_atomic_dec_if_positive(v); 1046 } 1047 1048 #endif /* CONFIG_64BIT */ 1049 #endif /* _LINUX_ATOMIC_LONG_H */ 1050 // 108784846d3bbbb201b8dabe621c5dc30b216206 1051