1 // SPDX-License-Identifier: GPL-2.0
2 
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
5 
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
8 
9 #include <linux/compiler.h>
10 
11 #ifndef arch_xchg_relaxed
12 #define arch_xchg_acquire arch_xchg
13 #define arch_xchg_release arch_xchg
14 #define arch_xchg_relaxed arch_xchg
15 #else /* arch_xchg_relaxed */
16 
17 #ifndef arch_xchg_acquire
18 #define arch_xchg_acquire(...) \
19 	__atomic_op_acquire(arch_xchg, __VA_ARGS__)
20 #endif
21 
22 #ifndef arch_xchg_release
23 #define arch_xchg_release(...) \
24 	__atomic_op_release(arch_xchg, __VA_ARGS__)
25 #endif
26 
27 #ifndef arch_xchg
28 #define arch_xchg(...) \
29 	__atomic_op_fence(arch_xchg, __VA_ARGS__)
30 #endif
31 
32 #endif /* arch_xchg_relaxed */
33 
34 #ifndef arch_cmpxchg_relaxed
35 #define arch_cmpxchg_acquire arch_cmpxchg
36 #define arch_cmpxchg_release arch_cmpxchg
37 #define arch_cmpxchg_relaxed arch_cmpxchg
38 #else /* arch_cmpxchg_relaxed */
39 
40 #ifndef arch_cmpxchg_acquire
41 #define arch_cmpxchg_acquire(...) \
42 	__atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
43 #endif
44 
45 #ifndef arch_cmpxchg_release
46 #define arch_cmpxchg_release(...) \
47 	__atomic_op_release(arch_cmpxchg, __VA_ARGS__)
48 #endif
49 
50 #ifndef arch_cmpxchg
51 #define arch_cmpxchg(...) \
52 	__atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
53 #endif
54 
55 #endif /* arch_cmpxchg_relaxed */
56 
57 #ifndef arch_cmpxchg64_relaxed
58 #define arch_cmpxchg64_acquire arch_cmpxchg64
59 #define arch_cmpxchg64_release arch_cmpxchg64
60 #define arch_cmpxchg64_relaxed arch_cmpxchg64
61 #else /* arch_cmpxchg64_relaxed */
62 
63 #ifndef arch_cmpxchg64_acquire
64 #define arch_cmpxchg64_acquire(...) \
65 	__atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
66 #endif
67 
68 #ifndef arch_cmpxchg64_release
69 #define arch_cmpxchg64_release(...) \
70 	__atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
71 #endif
72 
73 #ifndef arch_cmpxchg64
74 #define arch_cmpxchg64(...) \
75 	__atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
76 #endif
77 
78 #endif /* arch_cmpxchg64_relaxed */
79 
80 #ifndef arch_try_cmpxchg_relaxed
81 #ifdef arch_try_cmpxchg
82 #define arch_try_cmpxchg_acquire arch_try_cmpxchg
83 #define arch_try_cmpxchg_release arch_try_cmpxchg
84 #define arch_try_cmpxchg_relaxed arch_try_cmpxchg
85 #endif /* arch_try_cmpxchg */
86 
87 #ifndef arch_try_cmpxchg
88 #define arch_try_cmpxchg(_ptr, _oldp, _new) \
89 ({ \
90 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
91 	___r = arch_cmpxchg((_ptr), ___o, (_new)); \
92 	if (unlikely(___r != ___o)) \
93 		*___op = ___r; \
94 	likely(___r == ___o); \
95 })
96 #endif /* arch_try_cmpxchg */
97 
98 #ifndef arch_try_cmpxchg_acquire
99 #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
100 ({ \
101 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
102 	___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
103 	if (unlikely(___r != ___o)) \
104 		*___op = ___r; \
105 	likely(___r == ___o); \
106 })
107 #endif /* arch_try_cmpxchg_acquire */
108 
109 #ifndef arch_try_cmpxchg_release
110 #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
111 ({ \
112 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
113 	___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
114 	if (unlikely(___r != ___o)) \
115 		*___op = ___r; \
116 	likely(___r == ___o); \
117 })
118 #endif /* arch_try_cmpxchg_release */
119 
120 #ifndef arch_try_cmpxchg_relaxed
121 #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
122 ({ \
123 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
124 	___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
125 	if (unlikely(___r != ___o)) \
126 		*___op = ___r; \
127 	likely(___r == ___o); \
128 })
129 #endif /* arch_try_cmpxchg_relaxed */
130 
131 #else /* arch_try_cmpxchg_relaxed */
132 
133 #ifndef arch_try_cmpxchg_acquire
134 #define arch_try_cmpxchg_acquire(...) \
135 	__atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
136 #endif
137 
138 #ifndef arch_try_cmpxchg_release
139 #define arch_try_cmpxchg_release(...) \
140 	__atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
141 #endif
142 
143 #ifndef arch_try_cmpxchg
144 #define arch_try_cmpxchg(...) \
145 	__atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
146 #endif
147 
148 #endif /* arch_try_cmpxchg_relaxed */
149 
150 #ifndef arch_try_cmpxchg64_relaxed
151 #ifdef arch_try_cmpxchg64
152 #define arch_try_cmpxchg64_acquire arch_try_cmpxchg64
153 #define arch_try_cmpxchg64_release arch_try_cmpxchg64
154 #define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64
155 #endif /* arch_try_cmpxchg64 */
156 
157 #ifndef arch_try_cmpxchg64
158 #define arch_try_cmpxchg64(_ptr, _oldp, _new) \
159 ({ \
160 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
161 	___r = arch_cmpxchg64((_ptr), ___o, (_new)); \
162 	if (unlikely(___r != ___o)) \
163 		*___op = ___r; \
164 	likely(___r == ___o); \
165 })
166 #endif /* arch_try_cmpxchg64 */
167 
168 #ifndef arch_try_cmpxchg64_acquire
169 #define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \
170 ({ \
171 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
172 	___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \
173 	if (unlikely(___r != ___o)) \
174 		*___op = ___r; \
175 	likely(___r == ___o); \
176 })
177 #endif /* arch_try_cmpxchg64_acquire */
178 
179 #ifndef arch_try_cmpxchg64_release
180 #define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \
181 ({ \
182 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
183 	___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \
184 	if (unlikely(___r != ___o)) \
185 		*___op = ___r; \
186 	likely(___r == ___o); \
187 })
188 #endif /* arch_try_cmpxchg64_release */
189 
190 #ifndef arch_try_cmpxchg64_relaxed
191 #define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \
192 ({ \
193 	typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
194 	___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \
195 	if (unlikely(___r != ___o)) \
196 		*___op = ___r; \
197 	likely(___r == ___o); \
198 })
199 #endif /* arch_try_cmpxchg64_relaxed */
200 
201 #else /* arch_try_cmpxchg64_relaxed */
202 
203 #ifndef arch_try_cmpxchg64_acquire
204 #define arch_try_cmpxchg64_acquire(...) \
205 	__atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__)
206 #endif
207 
208 #ifndef arch_try_cmpxchg64_release
209 #define arch_try_cmpxchg64_release(...) \
210 	__atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__)
211 #endif
212 
213 #ifndef arch_try_cmpxchg64
214 #define arch_try_cmpxchg64(...) \
215 	__atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__)
216 #endif
217 
218 #endif /* arch_try_cmpxchg64_relaxed */
219 
220 #ifndef arch_atomic_read_acquire
221 static __always_inline int
222 arch_atomic_read_acquire(const atomic_t *v)
223 {
224 	int ret;
225 
226 	if (__native_word(atomic_t)) {
227 		ret = smp_load_acquire(&(v)->counter);
228 	} else {
229 		ret = arch_atomic_read(v);
230 		__atomic_acquire_fence();
231 	}
232 
233 	return ret;
234 }
235 #define arch_atomic_read_acquire arch_atomic_read_acquire
236 #endif
237 
238 #ifndef arch_atomic_set_release
239 static __always_inline void
240 arch_atomic_set_release(atomic_t *v, int i)
241 {
242 	if (__native_word(atomic_t)) {
243 		smp_store_release(&(v)->counter, i);
244 	} else {
245 		__atomic_release_fence();
246 		arch_atomic_set(v, i);
247 	}
248 }
249 #define arch_atomic_set_release arch_atomic_set_release
250 #endif
251 
252 #ifndef arch_atomic_add_return_relaxed
253 #define arch_atomic_add_return_acquire arch_atomic_add_return
254 #define arch_atomic_add_return_release arch_atomic_add_return
255 #define arch_atomic_add_return_relaxed arch_atomic_add_return
256 #else /* arch_atomic_add_return_relaxed */
257 
258 #ifndef arch_atomic_add_return_acquire
259 static __always_inline int
260 arch_atomic_add_return_acquire(int i, atomic_t *v)
261 {
262 	int ret = arch_atomic_add_return_relaxed(i, v);
263 	__atomic_acquire_fence();
264 	return ret;
265 }
266 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
267 #endif
268 
269 #ifndef arch_atomic_add_return_release
270 static __always_inline int
271 arch_atomic_add_return_release(int i, atomic_t *v)
272 {
273 	__atomic_release_fence();
274 	return arch_atomic_add_return_relaxed(i, v);
275 }
276 #define arch_atomic_add_return_release arch_atomic_add_return_release
277 #endif
278 
279 #ifndef arch_atomic_add_return
280 static __always_inline int
281 arch_atomic_add_return(int i, atomic_t *v)
282 {
283 	int ret;
284 	__atomic_pre_full_fence();
285 	ret = arch_atomic_add_return_relaxed(i, v);
286 	__atomic_post_full_fence();
287 	return ret;
288 }
289 #define arch_atomic_add_return arch_atomic_add_return
290 #endif
291 
292 #endif /* arch_atomic_add_return_relaxed */
293 
294 #ifndef arch_atomic_fetch_add_relaxed
295 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
296 #define arch_atomic_fetch_add_release arch_atomic_fetch_add
297 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
298 #else /* arch_atomic_fetch_add_relaxed */
299 
300 #ifndef arch_atomic_fetch_add_acquire
301 static __always_inline int
302 arch_atomic_fetch_add_acquire(int i, atomic_t *v)
303 {
304 	int ret = arch_atomic_fetch_add_relaxed(i, v);
305 	__atomic_acquire_fence();
306 	return ret;
307 }
308 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
309 #endif
310 
311 #ifndef arch_atomic_fetch_add_release
312 static __always_inline int
313 arch_atomic_fetch_add_release(int i, atomic_t *v)
314 {
315 	__atomic_release_fence();
316 	return arch_atomic_fetch_add_relaxed(i, v);
317 }
318 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
319 #endif
320 
321 #ifndef arch_atomic_fetch_add
322 static __always_inline int
323 arch_atomic_fetch_add(int i, atomic_t *v)
324 {
325 	int ret;
326 	__atomic_pre_full_fence();
327 	ret = arch_atomic_fetch_add_relaxed(i, v);
328 	__atomic_post_full_fence();
329 	return ret;
330 }
331 #define arch_atomic_fetch_add arch_atomic_fetch_add
332 #endif
333 
334 #endif /* arch_atomic_fetch_add_relaxed */
335 
336 #ifndef arch_atomic_sub_return_relaxed
337 #define arch_atomic_sub_return_acquire arch_atomic_sub_return
338 #define arch_atomic_sub_return_release arch_atomic_sub_return
339 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return
340 #else /* arch_atomic_sub_return_relaxed */
341 
342 #ifndef arch_atomic_sub_return_acquire
343 static __always_inline int
344 arch_atomic_sub_return_acquire(int i, atomic_t *v)
345 {
346 	int ret = arch_atomic_sub_return_relaxed(i, v);
347 	__atomic_acquire_fence();
348 	return ret;
349 }
350 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
351 #endif
352 
353 #ifndef arch_atomic_sub_return_release
354 static __always_inline int
355 arch_atomic_sub_return_release(int i, atomic_t *v)
356 {
357 	__atomic_release_fence();
358 	return arch_atomic_sub_return_relaxed(i, v);
359 }
360 #define arch_atomic_sub_return_release arch_atomic_sub_return_release
361 #endif
362 
363 #ifndef arch_atomic_sub_return
364 static __always_inline int
365 arch_atomic_sub_return(int i, atomic_t *v)
366 {
367 	int ret;
368 	__atomic_pre_full_fence();
369 	ret = arch_atomic_sub_return_relaxed(i, v);
370 	__atomic_post_full_fence();
371 	return ret;
372 }
373 #define arch_atomic_sub_return arch_atomic_sub_return
374 #endif
375 
376 #endif /* arch_atomic_sub_return_relaxed */
377 
378 #ifndef arch_atomic_fetch_sub_relaxed
379 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
380 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
381 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
382 #else /* arch_atomic_fetch_sub_relaxed */
383 
384 #ifndef arch_atomic_fetch_sub_acquire
385 static __always_inline int
386 arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
387 {
388 	int ret = arch_atomic_fetch_sub_relaxed(i, v);
389 	__atomic_acquire_fence();
390 	return ret;
391 }
392 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
393 #endif
394 
395 #ifndef arch_atomic_fetch_sub_release
396 static __always_inline int
397 arch_atomic_fetch_sub_release(int i, atomic_t *v)
398 {
399 	__atomic_release_fence();
400 	return arch_atomic_fetch_sub_relaxed(i, v);
401 }
402 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
403 #endif
404 
405 #ifndef arch_atomic_fetch_sub
406 static __always_inline int
407 arch_atomic_fetch_sub(int i, atomic_t *v)
408 {
409 	int ret;
410 	__atomic_pre_full_fence();
411 	ret = arch_atomic_fetch_sub_relaxed(i, v);
412 	__atomic_post_full_fence();
413 	return ret;
414 }
415 #define arch_atomic_fetch_sub arch_atomic_fetch_sub
416 #endif
417 
418 #endif /* arch_atomic_fetch_sub_relaxed */
419 
420 #ifndef arch_atomic_inc
421 static __always_inline void
422 arch_atomic_inc(atomic_t *v)
423 {
424 	arch_atomic_add(1, v);
425 }
426 #define arch_atomic_inc arch_atomic_inc
427 #endif
428 
429 #ifndef arch_atomic_inc_return_relaxed
430 #ifdef arch_atomic_inc_return
431 #define arch_atomic_inc_return_acquire arch_atomic_inc_return
432 #define arch_atomic_inc_return_release arch_atomic_inc_return
433 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return
434 #endif /* arch_atomic_inc_return */
435 
436 #ifndef arch_atomic_inc_return
437 static __always_inline int
438 arch_atomic_inc_return(atomic_t *v)
439 {
440 	return arch_atomic_add_return(1, v);
441 }
442 #define arch_atomic_inc_return arch_atomic_inc_return
443 #endif
444 
445 #ifndef arch_atomic_inc_return_acquire
446 static __always_inline int
447 arch_atomic_inc_return_acquire(atomic_t *v)
448 {
449 	return arch_atomic_add_return_acquire(1, v);
450 }
451 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
452 #endif
453 
454 #ifndef arch_atomic_inc_return_release
455 static __always_inline int
456 arch_atomic_inc_return_release(atomic_t *v)
457 {
458 	return arch_atomic_add_return_release(1, v);
459 }
460 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
461 #endif
462 
463 #ifndef arch_atomic_inc_return_relaxed
464 static __always_inline int
465 arch_atomic_inc_return_relaxed(atomic_t *v)
466 {
467 	return arch_atomic_add_return_relaxed(1, v);
468 }
469 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
470 #endif
471 
472 #else /* arch_atomic_inc_return_relaxed */
473 
474 #ifndef arch_atomic_inc_return_acquire
475 static __always_inline int
476 arch_atomic_inc_return_acquire(atomic_t *v)
477 {
478 	int ret = arch_atomic_inc_return_relaxed(v);
479 	__atomic_acquire_fence();
480 	return ret;
481 }
482 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
483 #endif
484 
485 #ifndef arch_atomic_inc_return_release
486 static __always_inline int
487 arch_atomic_inc_return_release(atomic_t *v)
488 {
489 	__atomic_release_fence();
490 	return arch_atomic_inc_return_relaxed(v);
491 }
492 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
493 #endif
494 
495 #ifndef arch_atomic_inc_return
496 static __always_inline int
497 arch_atomic_inc_return(atomic_t *v)
498 {
499 	int ret;
500 	__atomic_pre_full_fence();
501 	ret = arch_atomic_inc_return_relaxed(v);
502 	__atomic_post_full_fence();
503 	return ret;
504 }
505 #define arch_atomic_inc_return arch_atomic_inc_return
506 #endif
507 
508 #endif /* arch_atomic_inc_return_relaxed */
509 
510 #ifndef arch_atomic_fetch_inc_relaxed
511 #ifdef arch_atomic_fetch_inc
512 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc
513 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc
514 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc
515 #endif /* arch_atomic_fetch_inc */
516 
517 #ifndef arch_atomic_fetch_inc
518 static __always_inline int
519 arch_atomic_fetch_inc(atomic_t *v)
520 {
521 	return arch_atomic_fetch_add(1, v);
522 }
523 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
524 #endif
525 
526 #ifndef arch_atomic_fetch_inc_acquire
527 static __always_inline int
528 arch_atomic_fetch_inc_acquire(atomic_t *v)
529 {
530 	return arch_atomic_fetch_add_acquire(1, v);
531 }
532 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
533 #endif
534 
535 #ifndef arch_atomic_fetch_inc_release
536 static __always_inline int
537 arch_atomic_fetch_inc_release(atomic_t *v)
538 {
539 	return arch_atomic_fetch_add_release(1, v);
540 }
541 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
542 #endif
543 
544 #ifndef arch_atomic_fetch_inc_relaxed
545 static __always_inline int
546 arch_atomic_fetch_inc_relaxed(atomic_t *v)
547 {
548 	return arch_atomic_fetch_add_relaxed(1, v);
549 }
550 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed
551 #endif
552 
553 #else /* arch_atomic_fetch_inc_relaxed */
554 
555 #ifndef arch_atomic_fetch_inc_acquire
556 static __always_inline int
557 arch_atomic_fetch_inc_acquire(atomic_t *v)
558 {
559 	int ret = arch_atomic_fetch_inc_relaxed(v);
560 	__atomic_acquire_fence();
561 	return ret;
562 }
563 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
564 #endif
565 
566 #ifndef arch_atomic_fetch_inc_release
567 static __always_inline int
568 arch_atomic_fetch_inc_release(atomic_t *v)
569 {
570 	__atomic_release_fence();
571 	return arch_atomic_fetch_inc_relaxed(v);
572 }
573 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
574 #endif
575 
576 #ifndef arch_atomic_fetch_inc
577 static __always_inline int
578 arch_atomic_fetch_inc(atomic_t *v)
579 {
580 	int ret;
581 	__atomic_pre_full_fence();
582 	ret = arch_atomic_fetch_inc_relaxed(v);
583 	__atomic_post_full_fence();
584 	return ret;
585 }
586 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
587 #endif
588 
589 #endif /* arch_atomic_fetch_inc_relaxed */
590 
591 #ifndef arch_atomic_dec
592 static __always_inline void
593 arch_atomic_dec(atomic_t *v)
594 {
595 	arch_atomic_sub(1, v);
596 }
597 #define arch_atomic_dec arch_atomic_dec
598 #endif
599 
600 #ifndef arch_atomic_dec_return_relaxed
601 #ifdef arch_atomic_dec_return
602 #define arch_atomic_dec_return_acquire arch_atomic_dec_return
603 #define arch_atomic_dec_return_release arch_atomic_dec_return
604 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return
605 #endif /* arch_atomic_dec_return */
606 
607 #ifndef arch_atomic_dec_return
608 static __always_inline int
609 arch_atomic_dec_return(atomic_t *v)
610 {
611 	return arch_atomic_sub_return(1, v);
612 }
613 #define arch_atomic_dec_return arch_atomic_dec_return
614 #endif
615 
616 #ifndef arch_atomic_dec_return_acquire
617 static __always_inline int
618 arch_atomic_dec_return_acquire(atomic_t *v)
619 {
620 	return arch_atomic_sub_return_acquire(1, v);
621 }
622 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
623 #endif
624 
625 #ifndef arch_atomic_dec_return_release
626 static __always_inline int
627 arch_atomic_dec_return_release(atomic_t *v)
628 {
629 	return arch_atomic_sub_return_release(1, v);
630 }
631 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
632 #endif
633 
634 #ifndef arch_atomic_dec_return_relaxed
635 static __always_inline int
636 arch_atomic_dec_return_relaxed(atomic_t *v)
637 {
638 	return arch_atomic_sub_return_relaxed(1, v);
639 }
640 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed
641 #endif
642 
643 #else /* arch_atomic_dec_return_relaxed */
644 
645 #ifndef arch_atomic_dec_return_acquire
646 static __always_inline int
647 arch_atomic_dec_return_acquire(atomic_t *v)
648 {
649 	int ret = arch_atomic_dec_return_relaxed(v);
650 	__atomic_acquire_fence();
651 	return ret;
652 }
653 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
654 #endif
655 
656 #ifndef arch_atomic_dec_return_release
657 static __always_inline int
658 arch_atomic_dec_return_release(atomic_t *v)
659 {
660 	__atomic_release_fence();
661 	return arch_atomic_dec_return_relaxed(v);
662 }
663 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
664 #endif
665 
666 #ifndef arch_atomic_dec_return
667 static __always_inline int
668 arch_atomic_dec_return(atomic_t *v)
669 {
670 	int ret;
671 	__atomic_pre_full_fence();
672 	ret = arch_atomic_dec_return_relaxed(v);
673 	__atomic_post_full_fence();
674 	return ret;
675 }
676 #define arch_atomic_dec_return arch_atomic_dec_return
677 #endif
678 
679 #endif /* arch_atomic_dec_return_relaxed */
680 
681 #ifndef arch_atomic_fetch_dec_relaxed
682 #ifdef arch_atomic_fetch_dec
683 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec
684 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec
685 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec
686 #endif /* arch_atomic_fetch_dec */
687 
688 #ifndef arch_atomic_fetch_dec
689 static __always_inline int
690 arch_atomic_fetch_dec(atomic_t *v)
691 {
692 	return arch_atomic_fetch_sub(1, v);
693 }
694 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
695 #endif
696 
697 #ifndef arch_atomic_fetch_dec_acquire
698 static __always_inline int
699 arch_atomic_fetch_dec_acquire(atomic_t *v)
700 {
701 	return arch_atomic_fetch_sub_acquire(1, v);
702 }
703 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
704 #endif
705 
706 #ifndef arch_atomic_fetch_dec_release
707 static __always_inline int
708 arch_atomic_fetch_dec_release(atomic_t *v)
709 {
710 	return arch_atomic_fetch_sub_release(1, v);
711 }
712 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
713 #endif
714 
715 #ifndef arch_atomic_fetch_dec_relaxed
716 static __always_inline int
717 arch_atomic_fetch_dec_relaxed(atomic_t *v)
718 {
719 	return arch_atomic_fetch_sub_relaxed(1, v);
720 }
721 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed
722 #endif
723 
724 #else /* arch_atomic_fetch_dec_relaxed */
725 
726 #ifndef arch_atomic_fetch_dec_acquire
727 static __always_inline int
728 arch_atomic_fetch_dec_acquire(atomic_t *v)
729 {
730 	int ret = arch_atomic_fetch_dec_relaxed(v);
731 	__atomic_acquire_fence();
732 	return ret;
733 }
734 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
735 #endif
736 
737 #ifndef arch_atomic_fetch_dec_release
738 static __always_inline int
739 arch_atomic_fetch_dec_release(atomic_t *v)
740 {
741 	__atomic_release_fence();
742 	return arch_atomic_fetch_dec_relaxed(v);
743 }
744 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
745 #endif
746 
747 #ifndef arch_atomic_fetch_dec
748 static __always_inline int
749 arch_atomic_fetch_dec(atomic_t *v)
750 {
751 	int ret;
752 	__atomic_pre_full_fence();
753 	ret = arch_atomic_fetch_dec_relaxed(v);
754 	__atomic_post_full_fence();
755 	return ret;
756 }
757 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
758 #endif
759 
760 #endif /* arch_atomic_fetch_dec_relaxed */
761 
762 #ifndef arch_atomic_fetch_and_relaxed
763 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
764 #define arch_atomic_fetch_and_release arch_atomic_fetch_and
765 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and
766 #else /* arch_atomic_fetch_and_relaxed */
767 
768 #ifndef arch_atomic_fetch_and_acquire
769 static __always_inline int
770 arch_atomic_fetch_and_acquire(int i, atomic_t *v)
771 {
772 	int ret = arch_atomic_fetch_and_relaxed(i, v);
773 	__atomic_acquire_fence();
774 	return ret;
775 }
776 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
777 #endif
778 
779 #ifndef arch_atomic_fetch_and_release
780 static __always_inline int
781 arch_atomic_fetch_and_release(int i, atomic_t *v)
782 {
783 	__atomic_release_fence();
784 	return arch_atomic_fetch_and_relaxed(i, v);
785 }
786 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
787 #endif
788 
789 #ifndef arch_atomic_fetch_and
790 static __always_inline int
791 arch_atomic_fetch_and(int i, atomic_t *v)
792 {
793 	int ret;
794 	__atomic_pre_full_fence();
795 	ret = arch_atomic_fetch_and_relaxed(i, v);
796 	__atomic_post_full_fence();
797 	return ret;
798 }
799 #define arch_atomic_fetch_and arch_atomic_fetch_and
800 #endif
801 
802 #endif /* arch_atomic_fetch_and_relaxed */
803 
804 #ifndef arch_atomic_andnot
805 static __always_inline void
806 arch_atomic_andnot(int i, atomic_t *v)
807 {
808 	arch_atomic_and(~i, v);
809 }
810 #define arch_atomic_andnot arch_atomic_andnot
811 #endif
812 
813 #ifndef arch_atomic_fetch_andnot_relaxed
814 #ifdef arch_atomic_fetch_andnot
815 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
816 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot
817 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot
818 #endif /* arch_atomic_fetch_andnot */
819 
820 #ifndef arch_atomic_fetch_andnot
821 static __always_inline int
822 arch_atomic_fetch_andnot(int i, atomic_t *v)
823 {
824 	return arch_atomic_fetch_and(~i, v);
825 }
826 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
827 #endif
828 
829 #ifndef arch_atomic_fetch_andnot_acquire
830 static __always_inline int
831 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
832 {
833 	return arch_atomic_fetch_and_acquire(~i, v);
834 }
835 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
836 #endif
837 
838 #ifndef arch_atomic_fetch_andnot_release
839 static __always_inline int
840 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
841 {
842 	return arch_atomic_fetch_and_release(~i, v);
843 }
844 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
845 #endif
846 
847 #ifndef arch_atomic_fetch_andnot_relaxed
848 static __always_inline int
849 arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v)
850 {
851 	return arch_atomic_fetch_and_relaxed(~i, v);
852 }
853 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
854 #endif
855 
856 #else /* arch_atomic_fetch_andnot_relaxed */
857 
858 #ifndef arch_atomic_fetch_andnot_acquire
859 static __always_inline int
860 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
861 {
862 	int ret = arch_atomic_fetch_andnot_relaxed(i, v);
863 	__atomic_acquire_fence();
864 	return ret;
865 }
866 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
867 #endif
868 
869 #ifndef arch_atomic_fetch_andnot_release
870 static __always_inline int
871 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
872 {
873 	__atomic_release_fence();
874 	return arch_atomic_fetch_andnot_relaxed(i, v);
875 }
876 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
877 #endif
878 
879 #ifndef arch_atomic_fetch_andnot
880 static __always_inline int
881 arch_atomic_fetch_andnot(int i, atomic_t *v)
882 {
883 	int ret;
884 	__atomic_pre_full_fence();
885 	ret = arch_atomic_fetch_andnot_relaxed(i, v);
886 	__atomic_post_full_fence();
887 	return ret;
888 }
889 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
890 #endif
891 
892 #endif /* arch_atomic_fetch_andnot_relaxed */
893 
894 #ifndef arch_atomic_fetch_or_relaxed
895 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
896 #define arch_atomic_fetch_or_release arch_atomic_fetch_or
897 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or
898 #else /* arch_atomic_fetch_or_relaxed */
899 
900 #ifndef arch_atomic_fetch_or_acquire
901 static __always_inline int
902 arch_atomic_fetch_or_acquire(int i, atomic_t *v)
903 {
904 	int ret = arch_atomic_fetch_or_relaxed(i, v);
905 	__atomic_acquire_fence();
906 	return ret;
907 }
908 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
909 #endif
910 
911 #ifndef arch_atomic_fetch_or_release
912 static __always_inline int
913 arch_atomic_fetch_or_release(int i, atomic_t *v)
914 {
915 	__atomic_release_fence();
916 	return arch_atomic_fetch_or_relaxed(i, v);
917 }
918 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
919 #endif
920 
921 #ifndef arch_atomic_fetch_or
922 static __always_inline int
923 arch_atomic_fetch_or(int i, atomic_t *v)
924 {
925 	int ret;
926 	__atomic_pre_full_fence();
927 	ret = arch_atomic_fetch_or_relaxed(i, v);
928 	__atomic_post_full_fence();
929 	return ret;
930 }
931 #define arch_atomic_fetch_or arch_atomic_fetch_or
932 #endif
933 
934 #endif /* arch_atomic_fetch_or_relaxed */
935 
936 #ifndef arch_atomic_fetch_xor_relaxed
937 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
938 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
939 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor
940 #else /* arch_atomic_fetch_xor_relaxed */
941 
942 #ifndef arch_atomic_fetch_xor_acquire
943 static __always_inline int
944 arch_atomic_fetch_xor_acquire(int i, atomic_t *v)
945 {
946 	int ret = arch_atomic_fetch_xor_relaxed(i, v);
947 	__atomic_acquire_fence();
948 	return ret;
949 }
950 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
951 #endif
952 
953 #ifndef arch_atomic_fetch_xor_release
954 static __always_inline int
955 arch_atomic_fetch_xor_release(int i, atomic_t *v)
956 {
957 	__atomic_release_fence();
958 	return arch_atomic_fetch_xor_relaxed(i, v);
959 }
960 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
961 #endif
962 
963 #ifndef arch_atomic_fetch_xor
964 static __always_inline int
965 arch_atomic_fetch_xor(int i, atomic_t *v)
966 {
967 	int ret;
968 	__atomic_pre_full_fence();
969 	ret = arch_atomic_fetch_xor_relaxed(i, v);
970 	__atomic_post_full_fence();
971 	return ret;
972 }
973 #define arch_atomic_fetch_xor arch_atomic_fetch_xor
974 #endif
975 
976 #endif /* arch_atomic_fetch_xor_relaxed */
977 
978 #ifndef arch_atomic_xchg_relaxed
979 #define arch_atomic_xchg_acquire arch_atomic_xchg
980 #define arch_atomic_xchg_release arch_atomic_xchg
981 #define arch_atomic_xchg_relaxed arch_atomic_xchg
982 #else /* arch_atomic_xchg_relaxed */
983 
984 #ifndef arch_atomic_xchg_acquire
985 static __always_inline int
986 arch_atomic_xchg_acquire(atomic_t *v, int i)
987 {
988 	int ret = arch_atomic_xchg_relaxed(v, i);
989 	__atomic_acquire_fence();
990 	return ret;
991 }
992 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
993 #endif
994 
995 #ifndef arch_atomic_xchg_release
996 static __always_inline int
997 arch_atomic_xchg_release(atomic_t *v, int i)
998 {
999 	__atomic_release_fence();
1000 	return arch_atomic_xchg_relaxed(v, i);
1001 }
1002 #define arch_atomic_xchg_release arch_atomic_xchg_release
1003 #endif
1004 
1005 #ifndef arch_atomic_xchg
1006 static __always_inline int
1007 arch_atomic_xchg(atomic_t *v, int i)
1008 {
1009 	int ret;
1010 	__atomic_pre_full_fence();
1011 	ret = arch_atomic_xchg_relaxed(v, i);
1012 	__atomic_post_full_fence();
1013 	return ret;
1014 }
1015 #define arch_atomic_xchg arch_atomic_xchg
1016 #endif
1017 
1018 #endif /* arch_atomic_xchg_relaxed */
1019 
1020 #ifndef arch_atomic_cmpxchg_relaxed
1021 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
1022 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
1023 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
1024 #else /* arch_atomic_cmpxchg_relaxed */
1025 
1026 #ifndef arch_atomic_cmpxchg_acquire
1027 static __always_inline int
1028 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
1029 {
1030 	int ret = arch_atomic_cmpxchg_relaxed(v, old, new);
1031 	__atomic_acquire_fence();
1032 	return ret;
1033 }
1034 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
1035 #endif
1036 
1037 #ifndef arch_atomic_cmpxchg_release
1038 static __always_inline int
1039 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
1040 {
1041 	__atomic_release_fence();
1042 	return arch_atomic_cmpxchg_relaxed(v, old, new);
1043 }
1044 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
1045 #endif
1046 
1047 #ifndef arch_atomic_cmpxchg
1048 static __always_inline int
1049 arch_atomic_cmpxchg(atomic_t *v, int old, int new)
1050 {
1051 	int ret;
1052 	__atomic_pre_full_fence();
1053 	ret = arch_atomic_cmpxchg_relaxed(v, old, new);
1054 	__atomic_post_full_fence();
1055 	return ret;
1056 }
1057 #define arch_atomic_cmpxchg arch_atomic_cmpxchg
1058 #endif
1059 
1060 #endif /* arch_atomic_cmpxchg_relaxed */
1061 
1062 #ifndef arch_atomic_try_cmpxchg_relaxed
1063 #ifdef arch_atomic_try_cmpxchg
1064 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg
1065 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg
1066 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg
1067 #endif /* arch_atomic_try_cmpxchg */
1068 
1069 #ifndef arch_atomic_try_cmpxchg
1070 static __always_inline bool
1071 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1072 {
1073 	int r, o = *old;
1074 	r = arch_atomic_cmpxchg(v, o, new);
1075 	if (unlikely(r != o))
1076 		*old = r;
1077 	return likely(r == o);
1078 }
1079 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1080 #endif
1081 
1082 #ifndef arch_atomic_try_cmpxchg_acquire
1083 static __always_inline bool
1084 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1085 {
1086 	int r, o = *old;
1087 	r = arch_atomic_cmpxchg_acquire(v, o, new);
1088 	if (unlikely(r != o))
1089 		*old = r;
1090 	return likely(r == o);
1091 }
1092 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1093 #endif
1094 
1095 #ifndef arch_atomic_try_cmpxchg_release
1096 static __always_inline bool
1097 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1098 {
1099 	int r, o = *old;
1100 	r = arch_atomic_cmpxchg_release(v, o, new);
1101 	if (unlikely(r != o))
1102 		*old = r;
1103 	return likely(r == o);
1104 }
1105 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1106 #endif
1107 
1108 #ifndef arch_atomic_try_cmpxchg_relaxed
1109 static __always_inline bool
1110 arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1111 {
1112 	int r, o = *old;
1113 	r = arch_atomic_cmpxchg_relaxed(v, o, new);
1114 	if (unlikely(r != o))
1115 		*old = r;
1116 	return likely(r == o);
1117 }
1118 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed
1119 #endif
1120 
1121 #else /* arch_atomic_try_cmpxchg_relaxed */
1122 
1123 #ifndef arch_atomic_try_cmpxchg_acquire
1124 static __always_inline bool
1125 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1126 {
1127 	bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1128 	__atomic_acquire_fence();
1129 	return ret;
1130 }
1131 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1132 #endif
1133 
1134 #ifndef arch_atomic_try_cmpxchg_release
1135 static __always_inline bool
1136 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1137 {
1138 	__atomic_release_fence();
1139 	return arch_atomic_try_cmpxchg_relaxed(v, old, new);
1140 }
1141 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1142 #endif
1143 
1144 #ifndef arch_atomic_try_cmpxchg
1145 static __always_inline bool
1146 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1147 {
1148 	bool ret;
1149 	__atomic_pre_full_fence();
1150 	ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1151 	__atomic_post_full_fence();
1152 	return ret;
1153 }
1154 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1155 #endif
1156 
1157 #endif /* arch_atomic_try_cmpxchg_relaxed */
1158 
1159 #ifndef arch_atomic_sub_and_test
1160 /**
1161  * arch_atomic_sub_and_test - subtract value from variable and test result
1162  * @i: integer value to subtract
1163  * @v: pointer of type atomic_t
1164  *
1165  * Atomically subtracts @i from @v and returns
1166  * true if the result is zero, or false for all
1167  * other cases.
1168  */
1169 static __always_inline bool
1170 arch_atomic_sub_and_test(int i, atomic_t *v)
1171 {
1172 	return arch_atomic_sub_return(i, v) == 0;
1173 }
1174 #define arch_atomic_sub_and_test arch_atomic_sub_and_test
1175 #endif
1176 
1177 #ifndef arch_atomic_dec_and_test
1178 /**
1179  * arch_atomic_dec_and_test - decrement and test
1180  * @v: pointer of type atomic_t
1181  *
1182  * Atomically decrements @v by 1 and
1183  * returns true if the result is 0, or false for all other
1184  * cases.
1185  */
1186 static __always_inline bool
1187 arch_atomic_dec_and_test(atomic_t *v)
1188 {
1189 	return arch_atomic_dec_return(v) == 0;
1190 }
1191 #define arch_atomic_dec_and_test arch_atomic_dec_and_test
1192 #endif
1193 
1194 #ifndef arch_atomic_inc_and_test
1195 /**
1196  * arch_atomic_inc_and_test - increment and test
1197  * @v: pointer of type atomic_t
1198  *
1199  * Atomically increments @v by 1
1200  * and returns true if the result is zero, or false for all
1201  * other cases.
1202  */
1203 static __always_inline bool
1204 arch_atomic_inc_and_test(atomic_t *v)
1205 {
1206 	return arch_atomic_inc_return(v) == 0;
1207 }
1208 #define arch_atomic_inc_and_test arch_atomic_inc_and_test
1209 #endif
1210 
1211 #ifndef arch_atomic_add_negative_relaxed
1212 #ifdef arch_atomic_add_negative
1213 #define arch_atomic_add_negative_acquire arch_atomic_add_negative
1214 #define arch_atomic_add_negative_release arch_atomic_add_negative
1215 #define arch_atomic_add_negative_relaxed arch_atomic_add_negative
1216 #endif /* arch_atomic_add_negative */
1217 
1218 #ifndef arch_atomic_add_negative
1219 /**
1220  * arch_atomic_add_negative - Add and test if negative
1221  * @i: integer value to add
1222  * @v: pointer of type atomic_t
1223  *
1224  * Atomically adds @i to @v and returns true if the result is negative,
1225  * or false when the result is greater than or equal to zero.
1226  */
1227 static __always_inline bool
1228 arch_atomic_add_negative(int i, atomic_t *v)
1229 {
1230 	return arch_atomic_add_return(i, v) < 0;
1231 }
1232 #define arch_atomic_add_negative arch_atomic_add_negative
1233 #endif
1234 
1235 #ifndef arch_atomic_add_negative_acquire
1236 /**
1237  * arch_atomic_add_negative_acquire - Add and test if negative
1238  * @i: integer value to add
1239  * @v: pointer of type atomic_t
1240  *
1241  * Atomically adds @i to @v and returns true if the result is negative,
1242  * or false when the result is greater than or equal to zero.
1243  */
1244 static __always_inline bool
1245 arch_atomic_add_negative_acquire(int i, atomic_t *v)
1246 {
1247 	return arch_atomic_add_return_acquire(i, v) < 0;
1248 }
1249 #define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire
1250 #endif
1251 
1252 #ifndef arch_atomic_add_negative_release
1253 /**
1254  * arch_atomic_add_negative_release - Add and test if negative
1255  * @i: integer value to add
1256  * @v: pointer of type atomic_t
1257  *
1258  * Atomically adds @i to @v and returns true if the result is negative,
1259  * or false when the result is greater than or equal to zero.
1260  */
1261 static __always_inline bool
1262 arch_atomic_add_negative_release(int i, atomic_t *v)
1263 {
1264 	return arch_atomic_add_return_release(i, v) < 0;
1265 }
1266 #define arch_atomic_add_negative_release arch_atomic_add_negative_release
1267 #endif
1268 
1269 #ifndef arch_atomic_add_negative_relaxed
1270 /**
1271  * arch_atomic_add_negative_relaxed - Add and test if negative
1272  * @i: integer value to add
1273  * @v: pointer of type atomic_t
1274  *
1275  * Atomically adds @i to @v and returns true if the result is negative,
1276  * or false when the result is greater than or equal to zero.
1277  */
1278 static __always_inline bool
1279 arch_atomic_add_negative_relaxed(int i, atomic_t *v)
1280 {
1281 	return arch_atomic_add_return_relaxed(i, v) < 0;
1282 }
1283 #define arch_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed
1284 #endif
1285 
1286 #else /* arch_atomic_add_negative_relaxed */
1287 
1288 #ifndef arch_atomic_add_negative_acquire
1289 static __always_inline bool
1290 arch_atomic_add_negative_acquire(int i, atomic_t *v)
1291 {
1292 	bool ret = arch_atomic_add_negative_relaxed(i, v);
1293 	__atomic_acquire_fence();
1294 	return ret;
1295 }
1296 #define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire
1297 #endif
1298 
1299 #ifndef arch_atomic_add_negative_release
1300 static __always_inline bool
1301 arch_atomic_add_negative_release(int i, atomic_t *v)
1302 {
1303 	__atomic_release_fence();
1304 	return arch_atomic_add_negative_relaxed(i, v);
1305 }
1306 #define arch_atomic_add_negative_release arch_atomic_add_negative_release
1307 #endif
1308 
1309 #ifndef arch_atomic_add_negative
1310 static __always_inline bool
1311 arch_atomic_add_negative(int i, atomic_t *v)
1312 {
1313 	bool ret;
1314 	__atomic_pre_full_fence();
1315 	ret = arch_atomic_add_negative_relaxed(i, v);
1316 	__atomic_post_full_fence();
1317 	return ret;
1318 }
1319 #define arch_atomic_add_negative arch_atomic_add_negative
1320 #endif
1321 
1322 #endif /* arch_atomic_add_negative_relaxed */
1323 
1324 #ifndef arch_atomic_fetch_add_unless
1325 /**
1326  * arch_atomic_fetch_add_unless - add unless the number is already a given value
1327  * @v: pointer of type atomic_t
1328  * @a: the amount to add to v...
1329  * @u: ...unless v is equal to u.
1330  *
1331  * Atomically adds @a to @v, so long as @v was not already @u.
1332  * Returns original value of @v
1333  */
1334 static __always_inline int
1335 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
1336 {
1337 	int c = arch_atomic_read(v);
1338 
1339 	do {
1340 		if (unlikely(c == u))
1341 			break;
1342 	} while (!arch_atomic_try_cmpxchg(v, &c, c + a));
1343 
1344 	return c;
1345 }
1346 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
1347 #endif
1348 
1349 #ifndef arch_atomic_add_unless
1350 /**
1351  * arch_atomic_add_unless - add unless the number is already a given value
1352  * @v: pointer of type atomic_t
1353  * @a: the amount to add to v...
1354  * @u: ...unless v is equal to u.
1355  *
1356  * Atomically adds @a to @v, if @v was not already @u.
1357  * Returns true if the addition was done.
1358  */
1359 static __always_inline bool
1360 arch_atomic_add_unless(atomic_t *v, int a, int u)
1361 {
1362 	return arch_atomic_fetch_add_unless(v, a, u) != u;
1363 }
1364 #define arch_atomic_add_unless arch_atomic_add_unless
1365 #endif
1366 
1367 #ifndef arch_atomic_inc_not_zero
1368 /**
1369  * arch_atomic_inc_not_zero - increment unless the number is zero
1370  * @v: pointer of type atomic_t
1371  *
1372  * Atomically increments @v by 1, if @v is non-zero.
1373  * Returns true if the increment was done.
1374  */
1375 static __always_inline bool
1376 arch_atomic_inc_not_zero(atomic_t *v)
1377 {
1378 	return arch_atomic_add_unless(v, 1, 0);
1379 }
1380 #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero
1381 #endif
1382 
1383 #ifndef arch_atomic_inc_unless_negative
1384 static __always_inline bool
1385 arch_atomic_inc_unless_negative(atomic_t *v)
1386 {
1387 	int c = arch_atomic_read(v);
1388 
1389 	do {
1390 		if (unlikely(c < 0))
1391 			return false;
1392 	} while (!arch_atomic_try_cmpxchg(v, &c, c + 1));
1393 
1394 	return true;
1395 }
1396 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
1397 #endif
1398 
1399 #ifndef arch_atomic_dec_unless_positive
1400 static __always_inline bool
1401 arch_atomic_dec_unless_positive(atomic_t *v)
1402 {
1403 	int c = arch_atomic_read(v);
1404 
1405 	do {
1406 		if (unlikely(c > 0))
1407 			return false;
1408 	} while (!arch_atomic_try_cmpxchg(v, &c, c - 1));
1409 
1410 	return true;
1411 }
1412 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
1413 #endif
1414 
1415 #ifndef arch_atomic_dec_if_positive
1416 static __always_inline int
1417 arch_atomic_dec_if_positive(atomic_t *v)
1418 {
1419 	int dec, c = arch_atomic_read(v);
1420 
1421 	do {
1422 		dec = c - 1;
1423 		if (unlikely(dec < 0))
1424 			break;
1425 	} while (!arch_atomic_try_cmpxchg(v, &c, dec));
1426 
1427 	return dec;
1428 }
1429 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
1430 #endif
1431 
1432 #ifdef CONFIG_GENERIC_ATOMIC64
1433 #include <asm-generic/atomic64.h>
1434 #endif
1435 
1436 #ifndef arch_atomic64_read_acquire
1437 static __always_inline s64
1438 arch_atomic64_read_acquire(const atomic64_t *v)
1439 {
1440 	s64 ret;
1441 
1442 	if (__native_word(atomic64_t)) {
1443 		ret = smp_load_acquire(&(v)->counter);
1444 	} else {
1445 		ret = arch_atomic64_read(v);
1446 		__atomic_acquire_fence();
1447 	}
1448 
1449 	return ret;
1450 }
1451 #define arch_atomic64_read_acquire arch_atomic64_read_acquire
1452 #endif
1453 
1454 #ifndef arch_atomic64_set_release
1455 static __always_inline void
1456 arch_atomic64_set_release(atomic64_t *v, s64 i)
1457 {
1458 	if (__native_word(atomic64_t)) {
1459 		smp_store_release(&(v)->counter, i);
1460 	} else {
1461 		__atomic_release_fence();
1462 		arch_atomic64_set(v, i);
1463 	}
1464 }
1465 #define arch_atomic64_set_release arch_atomic64_set_release
1466 #endif
1467 
1468 #ifndef arch_atomic64_add_return_relaxed
1469 #define arch_atomic64_add_return_acquire arch_atomic64_add_return
1470 #define arch_atomic64_add_return_release arch_atomic64_add_return
1471 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return
1472 #else /* arch_atomic64_add_return_relaxed */
1473 
1474 #ifndef arch_atomic64_add_return_acquire
1475 static __always_inline s64
1476 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v)
1477 {
1478 	s64 ret = arch_atomic64_add_return_relaxed(i, v);
1479 	__atomic_acquire_fence();
1480 	return ret;
1481 }
1482 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
1483 #endif
1484 
1485 #ifndef arch_atomic64_add_return_release
1486 static __always_inline s64
1487 arch_atomic64_add_return_release(s64 i, atomic64_t *v)
1488 {
1489 	__atomic_release_fence();
1490 	return arch_atomic64_add_return_relaxed(i, v);
1491 }
1492 #define arch_atomic64_add_return_release arch_atomic64_add_return_release
1493 #endif
1494 
1495 #ifndef arch_atomic64_add_return
1496 static __always_inline s64
1497 arch_atomic64_add_return(s64 i, atomic64_t *v)
1498 {
1499 	s64 ret;
1500 	__atomic_pre_full_fence();
1501 	ret = arch_atomic64_add_return_relaxed(i, v);
1502 	__atomic_post_full_fence();
1503 	return ret;
1504 }
1505 #define arch_atomic64_add_return arch_atomic64_add_return
1506 #endif
1507 
1508 #endif /* arch_atomic64_add_return_relaxed */
1509 
1510 #ifndef arch_atomic64_fetch_add_relaxed
1511 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
1512 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
1513 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add
1514 #else /* arch_atomic64_fetch_add_relaxed */
1515 
1516 #ifndef arch_atomic64_fetch_add_acquire
1517 static __always_inline s64
1518 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1519 {
1520 	s64 ret = arch_atomic64_fetch_add_relaxed(i, v);
1521 	__atomic_acquire_fence();
1522 	return ret;
1523 }
1524 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
1525 #endif
1526 
1527 #ifndef arch_atomic64_fetch_add_release
1528 static __always_inline s64
1529 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v)
1530 {
1531 	__atomic_release_fence();
1532 	return arch_atomic64_fetch_add_relaxed(i, v);
1533 }
1534 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
1535 #endif
1536 
1537 #ifndef arch_atomic64_fetch_add
1538 static __always_inline s64
1539 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
1540 {
1541 	s64 ret;
1542 	__atomic_pre_full_fence();
1543 	ret = arch_atomic64_fetch_add_relaxed(i, v);
1544 	__atomic_post_full_fence();
1545 	return ret;
1546 }
1547 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
1548 #endif
1549 
1550 #endif /* arch_atomic64_fetch_add_relaxed */
1551 
1552 #ifndef arch_atomic64_sub_return_relaxed
1553 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
1554 #define arch_atomic64_sub_return_release arch_atomic64_sub_return
1555 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return
1556 #else /* arch_atomic64_sub_return_relaxed */
1557 
1558 #ifndef arch_atomic64_sub_return_acquire
1559 static __always_inline s64
1560 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1561 {
1562 	s64 ret = arch_atomic64_sub_return_relaxed(i, v);
1563 	__atomic_acquire_fence();
1564 	return ret;
1565 }
1566 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
1567 #endif
1568 
1569 #ifndef arch_atomic64_sub_return_release
1570 static __always_inline s64
1571 arch_atomic64_sub_return_release(s64 i, atomic64_t *v)
1572 {
1573 	__atomic_release_fence();
1574 	return arch_atomic64_sub_return_relaxed(i, v);
1575 }
1576 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
1577 #endif
1578 
1579 #ifndef arch_atomic64_sub_return
1580 static __always_inline s64
1581 arch_atomic64_sub_return(s64 i, atomic64_t *v)
1582 {
1583 	s64 ret;
1584 	__atomic_pre_full_fence();
1585 	ret = arch_atomic64_sub_return_relaxed(i, v);
1586 	__atomic_post_full_fence();
1587 	return ret;
1588 }
1589 #define arch_atomic64_sub_return arch_atomic64_sub_return
1590 #endif
1591 
1592 #endif /* arch_atomic64_sub_return_relaxed */
1593 
1594 #ifndef arch_atomic64_fetch_sub_relaxed
1595 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
1596 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
1597 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub
1598 #else /* arch_atomic64_fetch_sub_relaxed */
1599 
1600 #ifndef arch_atomic64_fetch_sub_acquire
1601 static __always_inline s64
1602 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1603 {
1604 	s64 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1605 	__atomic_acquire_fence();
1606 	return ret;
1607 }
1608 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
1609 #endif
1610 
1611 #ifndef arch_atomic64_fetch_sub_release
1612 static __always_inline s64
1613 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1614 {
1615 	__atomic_release_fence();
1616 	return arch_atomic64_fetch_sub_relaxed(i, v);
1617 }
1618 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
1619 #endif
1620 
1621 #ifndef arch_atomic64_fetch_sub
1622 static __always_inline s64
1623 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
1624 {
1625 	s64 ret;
1626 	__atomic_pre_full_fence();
1627 	ret = arch_atomic64_fetch_sub_relaxed(i, v);
1628 	__atomic_post_full_fence();
1629 	return ret;
1630 }
1631 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
1632 #endif
1633 
1634 #endif /* arch_atomic64_fetch_sub_relaxed */
1635 
1636 #ifndef arch_atomic64_inc
1637 static __always_inline void
1638 arch_atomic64_inc(atomic64_t *v)
1639 {
1640 	arch_atomic64_add(1, v);
1641 }
1642 #define arch_atomic64_inc arch_atomic64_inc
1643 #endif
1644 
1645 #ifndef arch_atomic64_inc_return_relaxed
1646 #ifdef arch_atomic64_inc_return
1647 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return
1648 #define arch_atomic64_inc_return_release arch_atomic64_inc_return
1649 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return
1650 #endif /* arch_atomic64_inc_return */
1651 
1652 #ifndef arch_atomic64_inc_return
1653 static __always_inline s64
1654 arch_atomic64_inc_return(atomic64_t *v)
1655 {
1656 	return arch_atomic64_add_return(1, v);
1657 }
1658 #define arch_atomic64_inc_return arch_atomic64_inc_return
1659 #endif
1660 
1661 #ifndef arch_atomic64_inc_return_acquire
1662 static __always_inline s64
1663 arch_atomic64_inc_return_acquire(atomic64_t *v)
1664 {
1665 	return arch_atomic64_add_return_acquire(1, v);
1666 }
1667 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1668 #endif
1669 
1670 #ifndef arch_atomic64_inc_return_release
1671 static __always_inline s64
1672 arch_atomic64_inc_return_release(atomic64_t *v)
1673 {
1674 	return arch_atomic64_add_return_release(1, v);
1675 }
1676 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1677 #endif
1678 
1679 #ifndef arch_atomic64_inc_return_relaxed
1680 static __always_inline s64
1681 arch_atomic64_inc_return_relaxed(atomic64_t *v)
1682 {
1683 	return arch_atomic64_add_return_relaxed(1, v);
1684 }
1685 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed
1686 #endif
1687 
1688 #else /* arch_atomic64_inc_return_relaxed */
1689 
1690 #ifndef arch_atomic64_inc_return_acquire
1691 static __always_inline s64
1692 arch_atomic64_inc_return_acquire(atomic64_t *v)
1693 {
1694 	s64 ret = arch_atomic64_inc_return_relaxed(v);
1695 	__atomic_acquire_fence();
1696 	return ret;
1697 }
1698 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1699 #endif
1700 
1701 #ifndef arch_atomic64_inc_return_release
1702 static __always_inline s64
1703 arch_atomic64_inc_return_release(atomic64_t *v)
1704 {
1705 	__atomic_release_fence();
1706 	return arch_atomic64_inc_return_relaxed(v);
1707 }
1708 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1709 #endif
1710 
1711 #ifndef arch_atomic64_inc_return
1712 static __always_inline s64
1713 arch_atomic64_inc_return(atomic64_t *v)
1714 {
1715 	s64 ret;
1716 	__atomic_pre_full_fence();
1717 	ret = arch_atomic64_inc_return_relaxed(v);
1718 	__atomic_post_full_fence();
1719 	return ret;
1720 }
1721 #define arch_atomic64_inc_return arch_atomic64_inc_return
1722 #endif
1723 
1724 #endif /* arch_atomic64_inc_return_relaxed */
1725 
1726 #ifndef arch_atomic64_fetch_inc_relaxed
1727 #ifdef arch_atomic64_fetch_inc
1728 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc
1729 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc
1730 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc
1731 #endif /* arch_atomic64_fetch_inc */
1732 
1733 #ifndef arch_atomic64_fetch_inc
1734 static __always_inline s64
1735 arch_atomic64_fetch_inc(atomic64_t *v)
1736 {
1737 	return arch_atomic64_fetch_add(1, v);
1738 }
1739 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1740 #endif
1741 
1742 #ifndef arch_atomic64_fetch_inc_acquire
1743 static __always_inline s64
1744 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1745 {
1746 	return arch_atomic64_fetch_add_acquire(1, v);
1747 }
1748 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1749 #endif
1750 
1751 #ifndef arch_atomic64_fetch_inc_release
1752 static __always_inline s64
1753 arch_atomic64_fetch_inc_release(atomic64_t *v)
1754 {
1755 	return arch_atomic64_fetch_add_release(1, v);
1756 }
1757 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1758 #endif
1759 
1760 #ifndef arch_atomic64_fetch_inc_relaxed
1761 static __always_inline s64
1762 arch_atomic64_fetch_inc_relaxed(atomic64_t *v)
1763 {
1764 	return arch_atomic64_fetch_add_relaxed(1, v);
1765 }
1766 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed
1767 #endif
1768 
1769 #else /* arch_atomic64_fetch_inc_relaxed */
1770 
1771 #ifndef arch_atomic64_fetch_inc_acquire
1772 static __always_inline s64
1773 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1774 {
1775 	s64 ret = arch_atomic64_fetch_inc_relaxed(v);
1776 	__atomic_acquire_fence();
1777 	return ret;
1778 }
1779 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1780 #endif
1781 
1782 #ifndef arch_atomic64_fetch_inc_release
1783 static __always_inline s64
1784 arch_atomic64_fetch_inc_release(atomic64_t *v)
1785 {
1786 	__atomic_release_fence();
1787 	return arch_atomic64_fetch_inc_relaxed(v);
1788 }
1789 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1790 #endif
1791 
1792 #ifndef arch_atomic64_fetch_inc
1793 static __always_inline s64
1794 arch_atomic64_fetch_inc(atomic64_t *v)
1795 {
1796 	s64 ret;
1797 	__atomic_pre_full_fence();
1798 	ret = arch_atomic64_fetch_inc_relaxed(v);
1799 	__atomic_post_full_fence();
1800 	return ret;
1801 }
1802 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1803 #endif
1804 
1805 #endif /* arch_atomic64_fetch_inc_relaxed */
1806 
1807 #ifndef arch_atomic64_dec
1808 static __always_inline void
1809 arch_atomic64_dec(atomic64_t *v)
1810 {
1811 	arch_atomic64_sub(1, v);
1812 }
1813 #define arch_atomic64_dec arch_atomic64_dec
1814 #endif
1815 
1816 #ifndef arch_atomic64_dec_return_relaxed
1817 #ifdef arch_atomic64_dec_return
1818 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return
1819 #define arch_atomic64_dec_return_release arch_atomic64_dec_return
1820 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return
1821 #endif /* arch_atomic64_dec_return */
1822 
1823 #ifndef arch_atomic64_dec_return
1824 static __always_inline s64
1825 arch_atomic64_dec_return(atomic64_t *v)
1826 {
1827 	return arch_atomic64_sub_return(1, v);
1828 }
1829 #define arch_atomic64_dec_return arch_atomic64_dec_return
1830 #endif
1831 
1832 #ifndef arch_atomic64_dec_return_acquire
1833 static __always_inline s64
1834 arch_atomic64_dec_return_acquire(atomic64_t *v)
1835 {
1836 	return arch_atomic64_sub_return_acquire(1, v);
1837 }
1838 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1839 #endif
1840 
1841 #ifndef arch_atomic64_dec_return_release
1842 static __always_inline s64
1843 arch_atomic64_dec_return_release(atomic64_t *v)
1844 {
1845 	return arch_atomic64_sub_return_release(1, v);
1846 }
1847 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1848 #endif
1849 
1850 #ifndef arch_atomic64_dec_return_relaxed
1851 static __always_inline s64
1852 arch_atomic64_dec_return_relaxed(atomic64_t *v)
1853 {
1854 	return arch_atomic64_sub_return_relaxed(1, v);
1855 }
1856 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed
1857 #endif
1858 
1859 #else /* arch_atomic64_dec_return_relaxed */
1860 
1861 #ifndef arch_atomic64_dec_return_acquire
1862 static __always_inline s64
1863 arch_atomic64_dec_return_acquire(atomic64_t *v)
1864 {
1865 	s64 ret = arch_atomic64_dec_return_relaxed(v);
1866 	__atomic_acquire_fence();
1867 	return ret;
1868 }
1869 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1870 #endif
1871 
1872 #ifndef arch_atomic64_dec_return_release
1873 static __always_inline s64
1874 arch_atomic64_dec_return_release(atomic64_t *v)
1875 {
1876 	__atomic_release_fence();
1877 	return arch_atomic64_dec_return_relaxed(v);
1878 }
1879 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1880 #endif
1881 
1882 #ifndef arch_atomic64_dec_return
1883 static __always_inline s64
1884 arch_atomic64_dec_return(atomic64_t *v)
1885 {
1886 	s64 ret;
1887 	__atomic_pre_full_fence();
1888 	ret = arch_atomic64_dec_return_relaxed(v);
1889 	__atomic_post_full_fence();
1890 	return ret;
1891 }
1892 #define arch_atomic64_dec_return arch_atomic64_dec_return
1893 #endif
1894 
1895 #endif /* arch_atomic64_dec_return_relaxed */
1896 
1897 #ifndef arch_atomic64_fetch_dec_relaxed
1898 #ifdef arch_atomic64_fetch_dec
1899 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec
1900 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec
1901 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec
1902 #endif /* arch_atomic64_fetch_dec */
1903 
1904 #ifndef arch_atomic64_fetch_dec
1905 static __always_inline s64
1906 arch_atomic64_fetch_dec(atomic64_t *v)
1907 {
1908 	return arch_atomic64_fetch_sub(1, v);
1909 }
1910 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1911 #endif
1912 
1913 #ifndef arch_atomic64_fetch_dec_acquire
1914 static __always_inline s64
1915 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1916 {
1917 	return arch_atomic64_fetch_sub_acquire(1, v);
1918 }
1919 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1920 #endif
1921 
1922 #ifndef arch_atomic64_fetch_dec_release
1923 static __always_inline s64
1924 arch_atomic64_fetch_dec_release(atomic64_t *v)
1925 {
1926 	return arch_atomic64_fetch_sub_release(1, v);
1927 }
1928 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1929 #endif
1930 
1931 #ifndef arch_atomic64_fetch_dec_relaxed
1932 static __always_inline s64
1933 arch_atomic64_fetch_dec_relaxed(atomic64_t *v)
1934 {
1935 	return arch_atomic64_fetch_sub_relaxed(1, v);
1936 }
1937 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed
1938 #endif
1939 
1940 #else /* arch_atomic64_fetch_dec_relaxed */
1941 
1942 #ifndef arch_atomic64_fetch_dec_acquire
1943 static __always_inline s64
1944 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1945 {
1946 	s64 ret = arch_atomic64_fetch_dec_relaxed(v);
1947 	__atomic_acquire_fence();
1948 	return ret;
1949 }
1950 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1951 #endif
1952 
1953 #ifndef arch_atomic64_fetch_dec_release
1954 static __always_inline s64
1955 arch_atomic64_fetch_dec_release(atomic64_t *v)
1956 {
1957 	__atomic_release_fence();
1958 	return arch_atomic64_fetch_dec_relaxed(v);
1959 }
1960 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1961 #endif
1962 
1963 #ifndef arch_atomic64_fetch_dec
1964 static __always_inline s64
1965 arch_atomic64_fetch_dec(atomic64_t *v)
1966 {
1967 	s64 ret;
1968 	__atomic_pre_full_fence();
1969 	ret = arch_atomic64_fetch_dec_relaxed(v);
1970 	__atomic_post_full_fence();
1971 	return ret;
1972 }
1973 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1974 #endif
1975 
1976 #endif /* arch_atomic64_fetch_dec_relaxed */
1977 
1978 #ifndef arch_atomic64_fetch_and_relaxed
1979 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
1980 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
1981 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and
1982 #else /* arch_atomic64_fetch_and_relaxed */
1983 
1984 #ifndef arch_atomic64_fetch_and_acquire
1985 static __always_inline s64
1986 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1987 {
1988 	s64 ret = arch_atomic64_fetch_and_relaxed(i, v);
1989 	__atomic_acquire_fence();
1990 	return ret;
1991 }
1992 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
1993 #endif
1994 
1995 #ifndef arch_atomic64_fetch_and_release
1996 static __always_inline s64
1997 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v)
1998 {
1999 	__atomic_release_fence();
2000 	return arch_atomic64_fetch_and_relaxed(i, v);
2001 }
2002 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
2003 #endif
2004 
2005 #ifndef arch_atomic64_fetch_and
2006 static __always_inline s64
2007 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
2008 {
2009 	s64 ret;
2010 	__atomic_pre_full_fence();
2011 	ret = arch_atomic64_fetch_and_relaxed(i, v);
2012 	__atomic_post_full_fence();
2013 	return ret;
2014 }
2015 #define arch_atomic64_fetch_and arch_atomic64_fetch_and
2016 #endif
2017 
2018 #endif /* arch_atomic64_fetch_and_relaxed */
2019 
2020 #ifndef arch_atomic64_andnot
2021 static __always_inline void
2022 arch_atomic64_andnot(s64 i, atomic64_t *v)
2023 {
2024 	arch_atomic64_and(~i, v);
2025 }
2026 #define arch_atomic64_andnot arch_atomic64_andnot
2027 #endif
2028 
2029 #ifndef arch_atomic64_fetch_andnot_relaxed
2030 #ifdef arch_atomic64_fetch_andnot
2031 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot
2032 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot
2033 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot
2034 #endif /* arch_atomic64_fetch_andnot */
2035 
2036 #ifndef arch_atomic64_fetch_andnot
2037 static __always_inline s64
2038 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
2039 {
2040 	return arch_atomic64_fetch_and(~i, v);
2041 }
2042 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
2043 #endif
2044 
2045 #ifndef arch_atomic64_fetch_andnot_acquire
2046 static __always_inline s64
2047 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
2048 {
2049 	return arch_atomic64_fetch_and_acquire(~i, v);
2050 }
2051 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
2052 #endif
2053 
2054 #ifndef arch_atomic64_fetch_andnot_release
2055 static __always_inline s64
2056 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
2057 {
2058 	return arch_atomic64_fetch_and_release(~i, v);
2059 }
2060 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
2061 #endif
2062 
2063 #ifndef arch_atomic64_fetch_andnot_relaxed
2064 static __always_inline s64
2065 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
2066 {
2067 	return arch_atomic64_fetch_and_relaxed(~i, v);
2068 }
2069 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
2070 #endif
2071 
2072 #else /* arch_atomic64_fetch_andnot_relaxed */
2073 
2074 #ifndef arch_atomic64_fetch_andnot_acquire
2075 static __always_inline s64
2076 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
2077 {
2078 	s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
2079 	__atomic_acquire_fence();
2080 	return ret;
2081 }
2082 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
2083 #endif
2084 
2085 #ifndef arch_atomic64_fetch_andnot_release
2086 static __always_inline s64
2087 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
2088 {
2089 	__atomic_release_fence();
2090 	return arch_atomic64_fetch_andnot_relaxed(i, v);
2091 }
2092 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
2093 #endif
2094 
2095 #ifndef arch_atomic64_fetch_andnot
2096 static __always_inline s64
2097 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
2098 {
2099 	s64 ret;
2100 	__atomic_pre_full_fence();
2101 	ret = arch_atomic64_fetch_andnot_relaxed(i, v);
2102 	__atomic_post_full_fence();
2103 	return ret;
2104 }
2105 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
2106 #endif
2107 
2108 #endif /* arch_atomic64_fetch_andnot_relaxed */
2109 
2110 #ifndef arch_atomic64_fetch_or_relaxed
2111 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
2112 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
2113 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or
2114 #else /* arch_atomic64_fetch_or_relaxed */
2115 
2116 #ifndef arch_atomic64_fetch_or_acquire
2117 static __always_inline s64
2118 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2119 {
2120 	s64 ret = arch_atomic64_fetch_or_relaxed(i, v);
2121 	__atomic_acquire_fence();
2122 	return ret;
2123 }
2124 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
2125 #endif
2126 
2127 #ifndef arch_atomic64_fetch_or_release
2128 static __always_inline s64
2129 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v)
2130 {
2131 	__atomic_release_fence();
2132 	return arch_atomic64_fetch_or_relaxed(i, v);
2133 }
2134 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
2135 #endif
2136 
2137 #ifndef arch_atomic64_fetch_or
2138 static __always_inline s64
2139 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
2140 {
2141 	s64 ret;
2142 	__atomic_pre_full_fence();
2143 	ret = arch_atomic64_fetch_or_relaxed(i, v);
2144 	__atomic_post_full_fence();
2145 	return ret;
2146 }
2147 #define arch_atomic64_fetch_or arch_atomic64_fetch_or
2148 #endif
2149 
2150 #endif /* arch_atomic64_fetch_or_relaxed */
2151 
2152 #ifndef arch_atomic64_fetch_xor_relaxed
2153 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
2154 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
2155 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor
2156 #else /* arch_atomic64_fetch_xor_relaxed */
2157 
2158 #ifndef arch_atomic64_fetch_xor_acquire
2159 static __always_inline s64
2160 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2161 {
2162 	s64 ret = arch_atomic64_fetch_xor_relaxed(i, v);
2163 	__atomic_acquire_fence();
2164 	return ret;
2165 }
2166 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
2167 #endif
2168 
2169 #ifndef arch_atomic64_fetch_xor_release
2170 static __always_inline s64
2171 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2172 {
2173 	__atomic_release_fence();
2174 	return arch_atomic64_fetch_xor_relaxed(i, v);
2175 }
2176 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
2177 #endif
2178 
2179 #ifndef arch_atomic64_fetch_xor
2180 static __always_inline s64
2181 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
2182 {
2183 	s64 ret;
2184 	__atomic_pre_full_fence();
2185 	ret = arch_atomic64_fetch_xor_relaxed(i, v);
2186 	__atomic_post_full_fence();
2187 	return ret;
2188 }
2189 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
2190 #endif
2191 
2192 #endif /* arch_atomic64_fetch_xor_relaxed */
2193 
2194 #ifndef arch_atomic64_xchg_relaxed
2195 #define arch_atomic64_xchg_acquire arch_atomic64_xchg
2196 #define arch_atomic64_xchg_release arch_atomic64_xchg
2197 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg
2198 #else /* arch_atomic64_xchg_relaxed */
2199 
2200 #ifndef arch_atomic64_xchg_acquire
2201 static __always_inline s64
2202 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i)
2203 {
2204 	s64 ret = arch_atomic64_xchg_relaxed(v, i);
2205 	__atomic_acquire_fence();
2206 	return ret;
2207 }
2208 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
2209 #endif
2210 
2211 #ifndef arch_atomic64_xchg_release
2212 static __always_inline s64
2213 arch_atomic64_xchg_release(atomic64_t *v, s64 i)
2214 {
2215 	__atomic_release_fence();
2216 	return arch_atomic64_xchg_relaxed(v, i);
2217 }
2218 #define arch_atomic64_xchg_release arch_atomic64_xchg_release
2219 #endif
2220 
2221 #ifndef arch_atomic64_xchg
2222 static __always_inline s64
2223 arch_atomic64_xchg(atomic64_t *v, s64 i)
2224 {
2225 	s64 ret;
2226 	__atomic_pre_full_fence();
2227 	ret = arch_atomic64_xchg_relaxed(v, i);
2228 	__atomic_post_full_fence();
2229 	return ret;
2230 }
2231 #define arch_atomic64_xchg arch_atomic64_xchg
2232 #endif
2233 
2234 #endif /* arch_atomic64_xchg_relaxed */
2235 
2236 #ifndef arch_atomic64_cmpxchg_relaxed
2237 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
2238 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
2239 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
2240 #else /* arch_atomic64_cmpxchg_relaxed */
2241 
2242 #ifndef arch_atomic64_cmpxchg_acquire
2243 static __always_inline s64
2244 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2245 {
2246 	s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2247 	__atomic_acquire_fence();
2248 	return ret;
2249 }
2250 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
2251 #endif
2252 
2253 #ifndef arch_atomic64_cmpxchg_release
2254 static __always_inline s64
2255 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2256 {
2257 	__atomic_release_fence();
2258 	return arch_atomic64_cmpxchg_relaxed(v, old, new);
2259 }
2260 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
2261 #endif
2262 
2263 #ifndef arch_atomic64_cmpxchg
2264 static __always_inline s64
2265 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2266 {
2267 	s64 ret;
2268 	__atomic_pre_full_fence();
2269 	ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2270 	__atomic_post_full_fence();
2271 	return ret;
2272 }
2273 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
2274 #endif
2275 
2276 #endif /* arch_atomic64_cmpxchg_relaxed */
2277 
2278 #ifndef arch_atomic64_try_cmpxchg_relaxed
2279 #ifdef arch_atomic64_try_cmpxchg
2280 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg
2281 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg
2282 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg
2283 #endif /* arch_atomic64_try_cmpxchg */
2284 
2285 #ifndef arch_atomic64_try_cmpxchg
2286 static __always_inline bool
2287 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2288 {
2289 	s64 r, o = *old;
2290 	r = arch_atomic64_cmpxchg(v, o, new);
2291 	if (unlikely(r != o))
2292 		*old = r;
2293 	return likely(r == o);
2294 }
2295 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2296 #endif
2297 
2298 #ifndef arch_atomic64_try_cmpxchg_acquire
2299 static __always_inline bool
2300 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2301 {
2302 	s64 r, o = *old;
2303 	r = arch_atomic64_cmpxchg_acquire(v, o, new);
2304 	if (unlikely(r != o))
2305 		*old = r;
2306 	return likely(r == o);
2307 }
2308 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2309 #endif
2310 
2311 #ifndef arch_atomic64_try_cmpxchg_release
2312 static __always_inline bool
2313 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2314 {
2315 	s64 r, o = *old;
2316 	r = arch_atomic64_cmpxchg_release(v, o, new);
2317 	if (unlikely(r != o))
2318 		*old = r;
2319 	return likely(r == o);
2320 }
2321 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2322 #endif
2323 
2324 #ifndef arch_atomic64_try_cmpxchg_relaxed
2325 static __always_inline bool
2326 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2327 {
2328 	s64 r, o = *old;
2329 	r = arch_atomic64_cmpxchg_relaxed(v, o, new);
2330 	if (unlikely(r != o))
2331 		*old = r;
2332 	return likely(r == o);
2333 }
2334 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed
2335 #endif
2336 
2337 #else /* arch_atomic64_try_cmpxchg_relaxed */
2338 
2339 #ifndef arch_atomic64_try_cmpxchg_acquire
2340 static __always_inline bool
2341 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2342 {
2343 	bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2344 	__atomic_acquire_fence();
2345 	return ret;
2346 }
2347 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2348 #endif
2349 
2350 #ifndef arch_atomic64_try_cmpxchg_release
2351 static __always_inline bool
2352 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2353 {
2354 	__atomic_release_fence();
2355 	return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2356 }
2357 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2358 #endif
2359 
2360 #ifndef arch_atomic64_try_cmpxchg
2361 static __always_inline bool
2362 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2363 {
2364 	bool ret;
2365 	__atomic_pre_full_fence();
2366 	ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2367 	__atomic_post_full_fence();
2368 	return ret;
2369 }
2370 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2371 #endif
2372 
2373 #endif /* arch_atomic64_try_cmpxchg_relaxed */
2374 
2375 #ifndef arch_atomic64_sub_and_test
2376 /**
2377  * arch_atomic64_sub_and_test - subtract value from variable and test result
2378  * @i: integer value to subtract
2379  * @v: pointer of type atomic64_t
2380  *
2381  * Atomically subtracts @i from @v and returns
2382  * true if the result is zero, or false for all
2383  * other cases.
2384  */
2385 static __always_inline bool
2386 arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
2387 {
2388 	return arch_atomic64_sub_return(i, v) == 0;
2389 }
2390 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
2391 #endif
2392 
2393 #ifndef arch_atomic64_dec_and_test
2394 /**
2395  * arch_atomic64_dec_and_test - decrement and test
2396  * @v: pointer of type atomic64_t
2397  *
2398  * Atomically decrements @v by 1 and
2399  * returns true if the result is 0, or false for all other
2400  * cases.
2401  */
2402 static __always_inline bool
2403 arch_atomic64_dec_and_test(atomic64_t *v)
2404 {
2405 	return arch_atomic64_dec_return(v) == 0;
2406 }
2407 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
2408 #endif
2409 
2410 #ifndef arch_atomic64_inc_and_test
2411 /**
2412  * arch_atomic64_inc_and_test - increment and test
2413  * @v: pointer of type atomic64_t
2414  *
2415  * Atomically increments @v by 1
2416  * and returns true if the result is zero, or false for all
2417  * other cases.
2418  */
2419 static __always_inline bool
2420 arch_atomic64_inc_and_test(atomic64_t *v)
2421 {
2422 	return arch_atomic64_inc_return(v) == 0;
2423 }
2424 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
2425 #endif
2426 
2427 #ifndef arch_atomic64_add_negative_relaxed
2428 #ifdef arch_atomic64_add_negative
2429 #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative
2430 #define arch_atomic64_add_negative_release arch_atomic64_add_negative
2431 #define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative
2432 #endif /* arch_atomic64_add_negative */
2433 
2434 #ifndef arch_atomic64_add_negative
2435 /**
2436  * arch_atomic64_add_negative - Add and test if negative
2437  * @i: integer value to add
2438  * @v: pointer of type atomic64_t
2439  *
2440  * Atomically adds @i to @v and returns true if the result is negative,
2441  * or false when the result is greater than or equal to zero.
2442  */
2443 static __always_inline bool
2444 arch_atomic64_add_negative(s64 i, atomic64_t *v)
2445 {
2446 	return arch_atomic64_add_return(i, v) < 0;
2447 }
2448 #define arch_atomic64_add_negative arch_atomic64_add_negative
2449 #endif
2450 
2451 #ifndef arch_atomic64_add_negative_acquire
2452 /**
2453  * arch_atomic64_add_negative_acquire - Add and test if negative
2454  * @i: integer value to add
2455  * @v: pointer of type atomic64_t
2456  *
2457  * Atomically adds @i to @v and returns true if the result is negative,
2458  * or false when the result is greater than or equal to zero.
2459  */
2460 static __always_inline bool
2461 arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v)
2462 {
2463 	return arch_atomic64_add_return_acquire(i, v) < 0;
2464 }
2465 #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire
2466 #endif
2467 
2468 #ifndef arch_atomic64_add_negative_release
2469 /**
2470  * arch_atomic64_add_negative_release - Add and test if negative
2471  * @i: integer value to add
2472  * @v: pointer of type atomic64_t
2473  *
2474  * Atomically adds @i to @v and returns true if the result is negative,
2475  * or false when the result is greater than or equal to zero.
2476  */
2477 static __always_inline bool
2478 arch_atomic64_add_negative_release(s64 i, atomic64_t *v)
2479 {
2480 	return arch_atomic64_add_return_release(i, v) < 0;
2481 }
2482 #define arch_atomic64_add_negative_release arch_atomic64_add_negative_release
2483 #endif
2484 
2485 #ifndef arch_atomic64_add_negative_relaxed
2486 /**
2487  * arch_atomic64_add_negative_relaxed - Add and test if negative
2488  * @i: integer value to add
2489  * @v: pointer of type atomic64_t
2490  *
2491  * Atomically adds @i to @v and returns true if the result is negative,
2492  * or false when the result is greater than or equal to zero.
2493  */
2494 static __always_inline bool
2495 arch_atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
2496 {
2497 	return arch_atomic64_add_return_relaxed(i, v) < 0;
2498 }
2499 #define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed
2500 #endif
2501 
2502 #else /* arch_atomic64_add_negative_relaxed */
2503 
2504 #ifndef arch_atomic64_add_negative_acquire
2505 static __always_inline bool
2506 arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v)
2507 {
2508 	bool ret = arch_atomic64_add_negative_relaxed(i, v);
2509 	__atomic_acquire_fence();
2510 	return ret;
2511 }
2512 #define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire
2513 #endif
2514 
2515 #ifndef arch_atomic64_add_negative_release
2516 static __always_inline bool
2517 arch_atomic64_add_negative_release(s64 i, atomic64_t *v)
2518 {
2519 	__atomic_release_fence();
2520 	return arch_atomic64_add_negative_relaxed(i, v);
2521 }
2522 #define arch_atomic64_add_negative_release arch_atomic64_add_negative_release
2523 #endif
2524 
2525 #ifndef arch_atomic64_add_negative
2526 static __always_inline bool
2527 arch_atomic64_add_negative(s64 i, atomic64_t *v)
2528 {
2529 	bool ret;
2530 	__atomic_pre_full_fence();
2531 	ret = arch_atomic64_add_negative_relaxed(i, v);
2532 	__atomic_post_full_fence();
2533 	return ret;
2534 }
2535 #define arch_atomic64_add_negative arch_atomic64_add_negative
2536 #endif
2537 
2538 #endif /* arch_atomic64_add_negative_relaxed */
2539 
2540 #ifndef arch_atomic64_fetch_add_unless
2541 /**
2542  * arch_atomic64_fetch_add_unless - add unless the number is already a given value
2543  * @v: pointer of type atomic64_t
2544  * @a: the amount to add to v...
2545  * @u: ...unless v is equal to u.
2546  *
2547  * Atomically adds @a to @v, so long as @v was not already @u.
2548  * Returns original value of @v
2549  */
2550 static __always_inline s64
2551 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2552 {
2553 	s64 c = arch_atomic64_read(v);
2554 
2555 	do {
2556 		if (unlikely(c == u))
2557 			break;
2558 	} while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
2559 
2560 	return c;
2561 }
2562 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
2563 #endif
2564 
2565 #ifndef arch_atomic64_add_unless
2566 /**
2567  * arch_atomic64_add_unless - add unless the number is already a given value
2568  * @v: pointer of type atomic64_t
2569  * @a: the amount to add to v...
2570  * @u: ...unless v is equal to u.
2571  *
2572  * Atomically adds @a to @v, if @v was not already @u.
2573  * Returns true if the addition was done.
2574  */
2575 static __always_inline bool
2576 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2577 {
2578 	return arch_atomic64_fetch_add_unless(v, a, u) != u;
2579 }
2580 #define arch_atomic64_add_unless arch_atomic64_add_unless
2581 #endif
2582 
2583 #ifndef arch_atomic64_inc_not_zero
2584 /**
2585  * arch_atomic64_inc_not_zero - increment unless the number is zero
2586  * @v: pointer of type atomic64_t
2587  *
2588  * Atomically increments @v by 1, if @v is non-zero.
2589  * Returns true if the increment was done.
2590  */
2591 static __always_inline bool
2592 arch_atomic64_inc_not_zero(atomic64_t *v)
2593 {
2594 	return arch_atomic64_add_unless(v, 1, 0);
2595 }
2596 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
2597 #endif
2598 
2599 #ifndef arch_atomic64_inc_unless_negative
2600 static __always_inline bool
2601 arch_atomic64_inc_unless_negative(atomic64_t *v)
2602 {
2603 	s64 c = arch_atomic64_read(v);
2604 
2605 	do {
2606 		if (unlikely(c < 0))
2607 			return false;
2608 	} while (!arch_atomic64_try_cmpxchg(v, &c, c + 1));
2609 
2610 	return true;
2611 }
2612 #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
2613 #endif
2614 
2615 #ifndef arch_atomic64_dec_unless_positive
2616 static __always_inline bool
2617 arch_atomic64_dec_unless_positive(atomic64_t *v)
2618 {
2619 	s64 c = arch_atomic64_read(v);
2620 
2621 	do {
2622 		if (unlikely(c > 0))
2623 			return false;
2624 	} while (!arch_atomic64_try_cmpxchg(v, &c, c - 1));
2625 
2626 	return true;
2627 }
2628 #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
2629 #endif
2630 
2631 #ifndef arch_atomic64_dec_if_positive
2632 static __always_inline s64
2633 arch_atomic64_dec_if_positive(atomic64_t *v)
2634 {
2635 	s64 dec, c = arch_atomic64_read(v);
2636 
2637 	do {
2638 		dec = c - 1;
2639 		if (unlikely(dec < 0))
2640 			break;
2641 	} while (!arch_atomic64_try_cmpxchg(v, &c, dec));
2642 
2643 	return dec;
2644 }
2645 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
2646 #endif
2647 
2648 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2649 // 00071fffa021cec66f6290d706d69c91df87bade
2650