xref: /linux-6.15/include/linux/atomic.h (revision 18cc1814)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* Atomic operations usable in machine independent code */
3 #ifndef _LINUX_ATOMIC_H
4 #define _LINUX_ATOMIC_H
5 #include <linux/types.h>
6 
7 #include <asm/atomic.h>
8 #include <asm/barrier.h>
9 
10 /*
11  * Relaxed variants of xchg, cmpxchg and some atomic operations.
12  *
13  * We support four variants:
14  *
15  * - Fully ordered: The default implementation, no suffix required.
16  * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
17  * - Release: Provides RELEASE semantics, _release suffix.
18  * - Relaxed: No ordering guarantees, _relaxed suffix.
19  *
20  * For compound atomics performing both a load and a store, ACQUIRE
21  * semantics apply only to the load and RELEASE semantics only to the
22  * store portion of the operation. Note that a failed cmpxchg_acquire
23  * does -not- imply any memory ordering constraints.
24  *
25  * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
26  */
27 
28 #ifndef atomic_read_acquire
29 #define  atomic_read_acquire(v)		smp_load_acquire(&(v)->counter)
30 #endif
31 
32 #ifndef atomic_set_release
33 #define  atomic_set_release(v, i)	smp_store_release(&(v)->counter, (i))
34 #endif
35 
36 /*
37  * The idea here is to build acquire/release variants by adding explicit
38  * barriers on top of the relaxed variant. In the case where the relaxed
39  * variant is already fully ordered, no additional barriers are needed.
40  *
41  * Besides, if an arch has a special barrier for acquire/release, it could
42  * implement its own __atomic_op_* and use the same framework for building
43  * variants
44  *
45  * If an architecture overrides __atomic_op_acquire() it will probably want
46  * to define smp_mb__after_spinlock().
47  */
48 #ifndef __atomic_op_acquire
49 #define __atomic_op_acquire(op, args...)				\
50 ({									\
51 	typeof(op##_relaxed(args)) __ret  = op##_relaxed(args);		\
52 	smp_mb__after_atomic();						\
53 	__ret;								\
54 })
55 #endif
56 
57 #ifndef __atomic_op_release
58 #define __atomic_op_release(op, args...)				\
59 ({									\
60 	smp_mb__before_atomic();					\
61 	op##_relaxed(args);						\
62 })
63 #endif
64 
65 #ifndef __atomic_op_fence
66 #define __atomic_op_fence(op, args...)					\
67 ({									\
68 	typeof(op##_relaxed(args)) __ret;				\
69 	smp_mb__before_atomic();					\
70 	__ret = op##_relaxed(args);					\
71 	smp_mb__after_atomic();						\
72 	__ret;								\
73 })
74 #endif
75 
76 /* atomic_add_return_relaxed */
77 #ifndef atomic_add_return_relaxed
78 #define  atomic_add_return_relaxed	atomic_add_return
79 #define  atomic_add_return_acquire	atomic_add_return
80 #define  atomic_add_return_release	atomic_add_return
81 
82 #else /* atomic_add_return_relaxed */
83 
84 #ifndef atomic_add_return_acquire
85 #define  atomic_add_return_acquire(...)					\
86 	__atomic_op_acquire(atomic_add_return, __VA_ARGS__)
87 #endif
88 
89 #ifndef atomic_add_return_release
90 #define  atomic_add_return_release(...)					\
91 	__atomic_op_release(atomic_add_return, __VA_ARGS__)
92 #endif
93 
94 #ifndef atomic_add_return
95 #define  atomic_add_return(...)						\
96 	__atomic_op_fence(atomic_add_return, __VA_ARGS__)
97 #endif
98 #endif /* atomic_add_return_relaxed */
99 
100 /* atomic_inc_return_relaxed */
101 #ifndef atomic_inc_return_relaxed
102 #define  atomic_inc_return_relaxed	atomic_inc_return
103 #define  atomic_inc_return_acquire	atomic_inc_return
104 #define  atomic_inc_return_release	atomic_inc_return
105 
106 #else /* atomic_inc_return_relaxed */
107 
108 #ifndef atomic_inc_return_acquire
109 #define  atomic_inc_return_acquire(...)					\
110 	__atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
111 #endif
112 
113 #ifndef atomic_inc_return_release
114 #define  atomic_inc_return_release(...)					\
115 	__atomic_op_release(atomic_inc_return, __VA_ARGS__)
116 #endif
117 
118 #ifndef atomic_inc_return
119 #define  atomic_inc_return(...)						\
120 	__atomic_op_fence(atomic_inc_return, __VA_ARGS__)
121 #endif
122 #endif /* atomic_inc_return_relaxed */
123 
124 /* atomic_sub_return_relaxed */
125 #ifndef atomic_sub_return_relaxed
126 #define  atomic_sub_return_relaxed	atomic_sub_return
127 #define  atomic_sub_return_acquire	atomic_sub_return
128 #define  atomic_sub_return_release	atomic_sub_return
129 
130 #else /* atomic_sub_return_relaxed */
131 
132 #ifndef atomic_sub_return_acquire
133 #define  atomic_sub_return_acquire(...)					\
134 	__atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
135 #endif
136 
137 #ifndef atomic_sub_return_release
138 #define  atomic_sub_return_release(...)					\
139 	__atomic_op_release(atomic_sub_return, __VA_ARGS__)
140 #endif
141 
142 #ifndef atomic_sub_return
143 #define  atomic_sub_return(...)						\
144 	__atomic_op_fence(atomic_sub_return, __VA_ARGS__)
145 #endif
146 #endif /* atomic_sub_return_relaxed */
147 
148 /* atomic_dec_return_relaxed */
149 #ifndef atomic_dec_return_relaxed
150 #define  atomic_dec_return_relaxed	atomic_dec_return
151 #define  atomic_dec_return_acquire	atomic_dec_return
152 #define  atomic_dec_return_release	atomic_dec_return
153 
154 #else /* atomic_dec_return_relaxed */
155 
156 #ifndef atomic_dec_return_acquire
157 #define  atomic_dec_return_acquire(...)					\
158 	__atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
159 #endif
160 
161 #ifndef atomic_dec_return_release
162 #define  atomic_dec_return_release(...)					\
163 	__atomic_op_release(atomic_dec_return, __VA_ARGS__)
164 #endif
165 
166 #ifndef atomic_dec_return
167 #define  atomic_dec_return(...)						\
168 	__atomic_op_fence(atomic_dec_return, __VA_ARGS__)
169 #endif
170 #endif /* atomic_dec_return_relaxed */
171 
172 
173 /* atomic_fetch_add_relaxed */
174 #ifndef atomic_fetch_add_relaxed
175 #define atomic_fetch_add_relaxed	atomic_fetch_add
176 #define atomic_fetch_add_acquire	atomic_fetch_add
177 #define atomic_fetch_add_release	atomic_fetch_add
178 
179 #else /* atomic_fetch_add_relaxed */
180 
181 #ifndef atomic_fetch_add_acquire
182 #define atomic_fetch_add_acquire(...)					\
183 	__atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
184 #endif
185 
186 #ifndef atomic_fetch_add_release
187 #define atomic_fetch_add_release(...)					\
188 	__atomic_op_release(atomic_fetch_add, __VA_ARGS__)
189 #endif
190 
191 #ifndef atomic_fetch_add
192 #define atomic_fetch_add(...)						\
193 	__atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
194 #endif
195 #endif /* atomic_fetch_add_relaxed */
196 
197 /* atomic_fetch_inc_relaxed */
198 #ifndef atomic_fetch_inc_relaxed
199 
200 #ifndef atomic_fetch_inc
201 #define atomic_fetch_inc(v)	        atomic_fetch_add(1, (v))
202 #define atomic_fetch_inc_relaxed(v)	atomic_fetch_add_relaxed(1, (v))
203 #define atomic_fetch_inc_acquire(v)	atomic_fetch_add_acquire(1, (v))
204 #define atomic_fetch_inc_release(v)	atomic_fetch_add_release(1, (v))
205 #else /* atomic_fetch_inc */
206 #define atomic_fetch_inc_relaxed	atomic_fetch_inc
207 #define atomic_fetch_inc_acquire	atomic_fetch_inc
208 #define atomic_fetch_inc_release	atomic_fetch_inc
209 #endif /* atomic_fetch_inc */
210 
211 #else /* atomic_fetch_inc_relaxed */
212 
213 #ifndef atomic_fetch_inc_acquire
214 #define atomic_fetch_inc_acquire(...)					\
215 	__atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
216 #endif
217 
218 #ifndef atomic_fetch_inc_release
219 #define atomic_fetch_inc_release(...)					\
220 	__atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
221 #endif
222 
223 #ifndef atomic_fetch_inc
224 #define atomic_fetch_inc(...)						\
225 	__atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
226 #endif
227 #endif /* atomic_fetch_inc_relaxed */
228 
229 /* atomic_fetch_sub_relaxed */
230 #ifndef atomic_fetch_sub_relaxed
231 #define atomic_fetch_sub_relaxed	atomic_fetch_sub
232 #define atomic_fetch_sub_acquire	atomic_fetch_sub
233 #define atomic_fetch_sub_release	atomic_fetch_sub
234 
235 #else /* atomic_fetch_sub_relaxed */
236 
237 #ifndef atomic_fetch_sub_acquire
238 #define atomic_fetch_sub_acquire(...)					\
239 	__atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
240 #endif
241 
242 #ifndef atomic_fetch_sub_release
243 #define atomic_fetch_sub_release(...)					\
244 	__atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
245 #endif
246 
247 #ifndef atomic_fetch_sub
248 #define atomic_fetch_sub(...)						\
249 	__atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
250 #endif
251 #endif /* atomic_fetch_sub_relaxed */
252 
253 /* atomic_fetch_dec_relaxed */
254 #ifndef atomic_fetch_dec_relaxed
255 
256 #ifndef atomic_fetch_dec
257 #define atomic_fetch_dec(v)	        atomic_fetch_sub(1, (v))
258 #define atomic_fetch_dec_relaxed(v)	atomic_fetch_sub_relaxed(1, (v))
259 #define atomic_fetch_dec_acquire(v)	atomic_fetch_sub_acquire(1, (v))
260 #define atomic_fetch_dec_release(v)	atomic_fetch_sub_release(1, (v))
261 #else /* atomic_fetch_dec */
262 #define atomic_fetch_dec_relaxed	atomic_fetch_dec
263 #define atomic_fetch_dec_acquire	atomic_fetch_dec
264 #define atomic_fetch_dec_release	atomic_fetch_dec
265 #endif /* atomic_fetch_dec */
266 
267 #else /* atomic_fetch_dec_relaxed */
268 
269 #ifndef atomic_fetch_dec_acquire
270 #define atomic_fetch_dec_acquire(...)					\
271 	__atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
272 #endif
273 
274 #ifndef atomic_fetch_dec_release
275 #define atomic_fetch_dec_release(...)					\
276 	__atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
277 #endif
278 
279 #ifndef atomic_fetch_dec
280 #define atomic_fetch_dec(...)						\
281 	__atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
282 #endif
283 #endif /* atomic_fetch_dec_relaxed */
284 
285 /* atomic_fetch_or_relaxed */
286 #ifndef atomic_fetch_or_relaxed
287 #define atomic_fetch_or_relaxed	atomic_fetch_or
288 #define atomic_fetch_or_acquire	atomic_fetch_or
289 #define atomic_fetch_or_release	atomic_fetch_or
290 
291 #else /* atomic_fetch_or_relaxed */
292 
293 #ifndef atomic_fetch_or_acquire
294 #define atomic_fetch_or_acquire(...)					\
295 	__atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
296 #endif
297 
298 #ifndef atomic_fetch_or_release
299 #define atomic_fetch_or_release(...)					\
300 	__atomic_op_release(atomic_fetch_or, __VA_ARGS__)
301 #endif
302 
303 #ifndef atomic_fetch_or
304 #define atomic_fetch_or(...)						\
305 	__atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
306 #endif
307 #endif /* atomic_fetch_or_relaxed */
308 
309 /* atomic_fetch_and_relaxed */
310 #ifndef atomic_fetch_and_relaxed
311 #define atomic_fetch_and_relaxed	atomic_fetch_and
312 #define atomic_fetch_and_acquire	atomic_fetch_and
313 #define atomic_fetch_and_release	atomic_fetch_and
314 
315 #else /* atomic_fetch_and_relaxed */
316 
317 #ifndef atomic_fetch_and_acquire
318 #define atomic_fetch_and_acquire(...)					\
319 	__atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
320 #endif
321 
322 #ifndef atomic_fetch_and_release
323 #define atomic_fetch_and_release(...)					\
324 	__atomic_op_release(atomic_fetch_and, __VA_ARGS__)
325 #endif
326 
327 #ifndef atomic_fetch_and
328 #define atomic_fetch_and(...)						\
329 	__atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
330 #endif
331 #endif /* atomic_fetch_and_relaxed */
332 
333 #ifdef atomic_andnot
334 /* atomic_fetch_andnot_relaxed */
335 #ifndef atomic_fetch_andnot_relaxed
336 #define atomic_fetch_andnot_relaxed	atomic_fetch_andnot
337 #define atomic_fetch_andnot_acquire	atomic_fetch_andnot
338 #define atomic_fetch_andnot_release	atomic_fetch_andnot
339 
340 #else /* atomic_fetch_andnot_relaxed */
341 
342 #ifndef atomic_fetch_andnot_acquire
343 #define atomic_fetch_andnot_acquire(...)					\
344 	__atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
345 #endif
346 
347 #ifndef atomic_fetch_andnot_release
348 #define atomic_fetch_andnot_release(...)					\
349 	__atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
350 #endif
351 
352 #ifndef atomic_fetch_andnot
353 #define atomic_fetch_andnot(...)						\
354 	__atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
355 #endif
356 #endif /* atomic_fetch_andnot_relaxed */
357 #endif /* atomic_andnot */
358 
359 /* atomic_fetch_xor_relaxed */
360 #ifndef atomic_fetch_xor_relaxed
361 #define atomic_fetch_xor_relaxed	atomic_fetch_xor
362 #define atomic_fetch_xor_acquire	atomic_fetch_xor
363 #define atomic_fetch_xor_release	atomic_fetch_xor
364 
365 #else /* atomic_fetch_xor_relaxed */
366 
367 #ifndef atomic_fetch_xor_acquire
368 #define atomic_fetch_xor_acquire(...)					\
369 	__atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
370 #endif
371 
372 #ifndef atomic_fetch_xor_release
373 #define atomic_fetch_xor_release(...)					\
374 	__atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
375 #endif
376 
377 #ifndef atomic_fetch_xor
378 #define atomic_fetch_xor(...)						\
379 	__atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
380 #endif
381 #endif /* atomic_fetch_xor_relaxed */
382 
383 
384 /* atomic_xchg_relaxed */
385 #ifndef atomic_xchg_relaxed
386 #define  atomic_xchg_relaxed		atomic_xchg
387 #define  atomic_xchg_acquire		atomic_xchg
388 #define  atomic_xchg_release		atomic_xchg
389 
390 #else /* atomic_xchg_relaxed */
391 
392 #ifndef atomic_xchg_acquire
393 #define  atomic_xchg_acquire(...)					\
394 	__atomic_op_acquire(atomic_xchg, __VA_ARGS__)
395 #endif
396 
397 #ifndef atomic_xchg_release
398 #define  atomic_xchg_release(...)					\
399 	__atomic_op_release(atomic_xchg, __VA_ARGS__)
400 #endif
401 
402 #ifndef atomic_xchg
403 #define  atomic_xchg(...)						\
404 	__atomic_op_fence(atomic_xchg, __VA_ARGS__)
405 #endif
406 #endif /* atomic_xchg_relaxed */
407 
408 /* atomic_cmpxchg_relaxed */
409 #ifndef atomic_cmpxchg_relaxed
410 #define  atomic_cmpxchg_relaxed		atomic_cmpxchg
411 #define  atomic_cmpxchg_acquire		atomic_cmpxchg
412 #define  atomic_cmpxchg_release		atomic_cmpxchg
413 
414 #else /* atomic_cmpxchg_relaxed */
415 
416 #ifndef atomic_cmpxchg_acquire
417 #define  atomic_cmpxchg_acquire(...)					\
418 	__atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
419 #endif
420 
421 #ifndef atomic_cmpxchg_release
422 #define  atomic_cmpxchg_release(...)					\
423 	__atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
424 #endif
425 
426 #ifndef atomic_cmpxchg
427 #define  atomic_cmpxchg(...)						\
428 	__atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
429 #endif
430 #endif /* atomic_cmpxchg_relaxed */
431 
432 #ifndef atomic_try_cmpxchg
433 
434 #define __atomic_try_cmpxchg(type, _p, _po, _n)				\
435 ({									\
436 	typeof(_po) __po = (_po);					\
437 	typeof(*(_po)) __r, __o = *__po;				\
438 	__r = atomic_cmpxchg##type((_p), __o, (_n));			\
439 	if (unlikely(__r != __o))					\
440 		*__po = __r;						\
441 	likely(__r == __o);						\
442 })
443 
444 #define atomic_try_cmpxchg(_p, _po, _n)		__atomic_try_cmpxchg(, _p, _po, _n)
445 #define atomic_try_cmpxchg_relaxed(_p, _po, _n)	__atomic_try_cmpxchg(_relaxed, _p, _po, _n)
446 #define atomic_try_cmpxchg_acquire(_p, _po, _n)	__atomic_try_cmpxchg(_acquire, _p, _po, _n)
447 #define atomic_try_cmpxchg_release(_p, _po, _n)	__atomic_try_cmpxchg(_release, _p, _po, _n)
448 
449 #else /* atomic_try_cmpxchg */
450 #define atomic_try_cmpxchg_relaxed	atomic_try_cmpxchg
451 #define atomic_try_cmpxchg_acquire	atomic_try_cmpxchg
452 #define atomic_try_cmpxchg_release	atomic_try_cmpxchg
453 #endif /* atomic_try_cmpxchg */
454 
455 /* cmpxchg_relaxed */
456 #ifndef cmpxchg_relaxed
457 #define  cmpxchg_relaxed		cmpxchg
458 #define  cmpxchg_acquire		cmpxchg
459 #define  cmpxchg_release		cmpxchg
460 
461 #else /* cmpxchg_relaxed */
462 
463 #ifndef cmpxchg_acquire
464 #define  cmpxchg_acquire(...)						\
465 	__atomic_op_acquire(cmpxchg, __VA_ARGS__)
466 #endif
467 
468 #ifndef cmpxchg_release
469 #define  cmpxchg_release(...)						\
470 	__atomic_op_release(cmpxchg, __VA_ARGS__)
471 #endif
472 
473 #ifndef cmpxchg
474 #define  cmpxchg(...)							\
475 	__atomic_op_fence(cmpxchg, __VA_ARGS__)
476 #endif
477 #endif /* cmpxchg_relaxed */
478 
479 /* cmpxchg64_relaxed */
480 #ifndef cmpxchg64_relaxed
481 #define  cmpxchg64_relaxed		cmpxchg64
482 #define  cmpxchg64_acquire		cmpxchg64
483 #define  cmpxchg64_release		cmpxchg64
484 
485 #else /* cmpxchg64_relaxed */
486 
487 #ifndef cmpxchg64_acquire
488 #define  cmpxchg64_acquire(...)						\
489 	__atomic_op_acquire(cmpxchg64, __VA_ARGS__)
490 #endif
491 
492 #ifndef cmpxchg64_release
493 #define  cmpxchg64_release(...)						\
494 	__atomic_op_release(cmpxchg64, __VA_ARGS__)
495 #endif
496 
497 #ifndef cmpxchg64
498 #define  cmpxchg64(...)							\
499 	__atomic_op_fence(cmpxchg64, __VA_ARGS__)
500 #endif
501 #endif /* cmpxchg64_relaxed */
502 
503 /* xchg_relaxed */
504 #ifndef xchg_relaxed
505 #define  xchg_relaxed			xchg
506 #define  xchg_acquire			xchg
507 #define  xchg_release			xchg
508 
509 #else /* xchg_relaxed */
510 
511 #ifndef xchg_acquire
512 #define  xchg_acquire(...)		__atomic_op_acquire(xchg, __VA_ARGS__)
513 #endif
514 
515 #ifndef xchg_release
516 #define  xchg_release(...)		__atomic_op_release(xchg, __VA_ARGS__)
517 #endif
518 
519 #ifndef xchg
520 #define  xchg(...)			__atomic_op_fence(xchg, __VA_ARGS__)
521 #endif
522 #endif /* xchg_relaxed */
523 
524 /**
525  * atomic_fetch_add_unless - add unless the number is already a given value
526  * @v: pointer of type atomic_t
527  * @a: the amount to add to v...
528  * @u: ...unless v is equal to u.
529  *
530  * Atomically adds @a to @v, if @v was not already @u.
531  * Returns the original value of @v.
532  */
533 #ifndef atomic_fetch_add_unless
534 static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
535 {
536 	int c = atomic_read(v);
537 
538 	do {
539 		if (unlikely(c == u))
540 			break;
541 	} while (!atomic_try_cmpxchg(v, &c, c + a));
542 
543 	return c;
544 }
545 #endif
546 
547 /**
548  * atomic_add_unless - add unless the number is already a given value
549  * @v: pointer of type atomic_t
550  * @a: the amount to add to v...
551  * @u: ...unless v is equal to u.
552  *
553  * Atomically adds @a to @v, if @v was not already @u.
554  * Returns true if the addition was done.
555  */
556 static inline bool atomic_add_unless(atomic_t *v, int a, int u)
557 {
558 	return atomic_fetch_add_unless(v, a, u) != u;
559 }
560 
561 /**
562  * atomic_inc_not_zero - increment unless the number is zero
563  * @v: pointer of type atomic_t
564  *
565  * Atomically increments @v by 1, if @v is non-zero.
566  * Returns true if the increment was done.
567  */
568 #ifndef atomic_inc_not_zero
569 #define atomic_inc_not_zero(v)		atomic_add_unless((v), 1, 0)
570 #endif
571 
572 /**
573  * atomic_inc_and_test - increment and test
574  * @v: pointer of type atomic_t
575  *
576  * Atomically increments @v by 1
577  * and returns true if the result is zero, or false for all
578  * other cases.
579  */
580 #ifndef atomic_inc_and_test
581 static inline bool atomic_inc_and_test(atomic_t *v)
582 {
583 	return atomic_inc_return(v) == 0;
584 }
585 #endif
586 
587 /**
588  * atomic_dec_and_test - decrement and test
589  * @v: pointer of type atomic_t
590  *
591  * Atomically decrements @v by 1 and
592  * returns true if the result is 0, or false for all other
593  * cases.
594  */
595 #ifndef atomic_dec_and_test
596 static inline bool atomic_dec_and_test(atomic_t *v)
597 {
598 	return atomic_dec_return(v) == 0;
599 }
600 #endif
601 
602 /**
603  * atomic_sub_and_test - subtract value from variable and test result
604  * @i: integer value to subtract
605  * @v: pointer of type atomic_t
606  *
607  * Atomically subtracts @i from @v and returns
608  * true if the result is zero, or false for all
609  * other cases.
610  */
611 #ifndef atomic_sub_and_test
612 static inline bool atomic_sub_and_test(int i, atomic_t *v)
613 {
614 	return atomic_sub_return(i, v) == 0;
615 }
616 #endif
617 
618 /**
619  * atomic_add_negative - add and test if negative
620  * @i: integer value to add
621  * @v: pointer of type atomic_t
622  *
623  * Atomically adds @i to @v and returns true
624  * if the result is negative, or false when
625  * result is greater than or equal to zero.
626  */
627 #ifndef atomic_add_negative
628 static inline bool atomic_add_negative(int i, atomic_t *v)
629 {
630 	return atomic_add_return(i, v) < 0;
631 }
632 #endif
633 
634 #ifndef atomic_andnot
635 static inline void atomic_andnot(int i, atomic_t *v)
636 {
637 	atomic_and(~i, v);
638 }
639 
640 static inline int atomic_fetch_andnot(int i, atomic_t *v)
641 {
642 	return atomic_fetch_and(~i, v);
643 }
644 
645 static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
646 {
647 	return atomic_fetch_and_relaxed(~i, v);
648 }
649 
650 static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
651 {
652 	return atomic_fetch_and_acquire(~i, v);
653 }
654 
655 static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
656 {
657 	return atomic_fetch_and_release(~i, v);
658 }
659 #endif
660 
661 #ifndef atomic_inc_unless_negative
662 static inline bool atomic_inc_unless_negative(atomic_t *p)
663 {
664 	int v, v1;
665 	for (v = 0; v >= 0; v = v1) {
666 		v1 = atomic_cmpxchg(p, v, v + 1);
667 		if (likely(v1 == v))
668 			return true;
669 	}
670 	return false;
671 }
672 #endif
673 
674 #ifndef atomic_dec_unless_positive
675 static inline bool atomic_dec_unless_positive(atomic_t *p)
676 {
677 	int v, v1;
678 	for (v = 0; v <= 0; v = v1) {
679 		v1 = atomic_cmpxchg(p, v, v - 1);
680 		if (likely(v1 == v))
681 			return true;
682 	}
683 	return false;
684 }
685 #endif
686 
687 /*
688  * atomic_dec_if_positive - decrement by 1 if old value positive
689  * @v: pointer of type atomic_t
690  *
691  * The function returns the old value of *v minus 1, even if
692  * the atomic variable, v, was not decremented.
693  */
694 #ifndef atomic_dec_if_positive
695 static inline int atomic_dec_if_positive(atomic_t *v)
696 {
697 	int c, old, dec;
698 	c = atomic_read(v);
699 	for (;;) {
700 		dec = c - 1;
701 		if (unlikely(dec < 0))
702 			break;
703 		old = atomic_cmpxchg((v), c, dec);
704 		if (likely(old == c))
705 			break;
706 		c = old;
707 	}
708 	return dec;
709 }
710 #endif
711 
712 #define atomic_cond_read_relaxed(v, c)	smp_cond_load_relaxed(&(v)->counter, (c))
713 #define atomic_cond_read_acquire(v, c)	smp_cond_load_acquire(&(v)->counter, (c))
714 
715 #ifdef CONFIG_GENERIC_ATOMIC64
716 #include <asm-generic/atomic64.h>
717 #endif
718 
719 #ifndef atomic64_read_acquire
720 #define  atomic64_read_acquire(v)	smp_load_acquire(&(v)->counter)
721 #endif
722 
723 #ifndef atomic64_set_release
724 #define  atomic64_set_release(v, i)	smp_store_release(&(v)->counter, (i))
725 #endif
726 
727 /* atomic64_add_return_relaxed */
728 #ifndef atomic64_add_return_relaxed
729 #define  atomic64_add_return_relaxed	atomic64_add_return
730 #define  atomic64_add_return_acquire	atomic64_add_return
731 #define  atomic64_add_return_release	atomic64_add_return
732 
733 #else /* atomic64_add_return_relaxed */
734 
735 #ifndef atomic64_add_return_acquire
736 #define  atomic64_add_return_acquire(...)				\
737 	__atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
738 #endif
739 
740 #ifndef atomic64_add_return_release
741 #define  atomic64_add_return_release(...)				\
742 	__atomic_op_release(atomic64_add_return, __VA_ARGS__)
743 #endif
744 
745 #ifndef atomic64_add_return
746 #define  atomic64_add_return(...)					\
747 	__atomic_op_fence(atomic64_add_return, __VA_ARGS__)
748 #endif
749 #endif /* atomic64_add_return_relaxed */
750 
751 /* atomic64_inc_return_relaxed */
752 #ifndef atomic64_inc_return_relaxed
753 #define  atomic64_inc_return_relaxed	atomic64_inc_return
754 #define  atomic64_inc_return_acquire	atomic64_inc_return
755 #define  atomic64_inc_return_release	atomic64_inc_return
756 
757 #else /* atomic64_inc_return_relaxed */
758 
759 #ifndef atomic64_inc_return_acquire
760 #define  atomic64_inc_return_acquire(...)				\
761 	__atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
762 #endif
763 
764 #ifndef atomic64_inc_return_release
765 #define  atomic64_inc_return_release(...)				\
766 	__atomic_op_release(atomic64_inc_return, __VA_ARGS__)
767 #endif
768 
769 #ifndef atomic64_inc_return
770 #define  atomic64_inc_return(...)					\
771 	__atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
772 #endif
773 #endif /* atomic64_inc_return_relaxed */
774 
775 
776 /* atomic64_sub_return_relaxed */
777 #ifndef atomic64_sub_return_relaxed
778 #define  atomic64_sub_return_relaxed	atomic64_sub_return
779 #define  atomic64_sub_return_acquire	atomic64_sub_return
780 #define  atomic64_sub_return_release	atomic64_sub_return
781 
782 #else /* atomic64_sub_return_relaxed */
783 
784 #ifndef atomic64_sub_return_acquire
785 #define  atomic64_sub_return_acquire(...)				\
786 	__atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
787 #endif
788 
789 #ifndef atomic64_sub_return_release
790 #define  atomic64_sub_return_release(...)				\
791 	__atomic_op_release(atomic64_sub_return, __VA_ARGS__)
792 #endif
793 
794 #ifndef atomic64_sub_return
795 #define  atomic64_sub_return(...)					\
796 	__atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
797 #endif
798 #endif /* atomic64_sub_return_relaxed */
799 
800 /* atomic64_dec_return_relaxed */
801 #ifndef atomic64_dec_return_relaxed
802 #define  atomic64_dec_return_relaxed	atomic64_dec_return
803 #define  atomic64_dec_return_acquire	atomic64_dec_return
804 #define  atomic64_dec_return_release	atomic64_dec_return
805 
806 #else /* atomic64_dec_return_relaxed */
807 
808 #ifndef atomic64_dec_return_acquire
809 #define  atomic64_dec_return_acquire(...)				\
810 	__atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
811 #endif
812 
813 #ifndef atomic64_dec_return_release
814 #define  atomic64_dec_return_release(...)				\
815 	__atomic_op_release(atomic64_dec_return, __VA_ARGS__)
816 #endif
817 
818 #ifndef atomic64_dec_return
819 #define  atomic64_dec_return(...)					\
820 	__atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
821 #endif
822 #endif /* atomic64_dec_return_relaxed */
823 
824 
825 /* atomic64_fetch_add_relaxed */
826 #ifndef atomic64_fetch_add_relaxed
827 #define atomic64_fetch_add_relaxed	atomic64_fetch_add
828 #define atomic64_fetch_add_acquire	atomic64_fetch_add
829 #define atomic64_fetch_add_release	atomic64_fetch_add
830 
831 #else /* atomic64_fetch_add_relaxed */
832 
833 #ifndef atomic64_fetch_add_acquire
834 #define atomic64_fetch_add_acquire(...)					\
835 	__atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
836 #endif
837 
838 #ifndef atomic64_fetch_add_release
839 #define atomic64_fetch_add_release(...)					\
840 	__atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
841 #endif
842 
843 #ifndef atomic64_fetch_add
844 #define atomic64_fetch_add(...)						\
845 	__atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
846 #endif
847 #endif /* atomic64_fetch_add_relaxed */
848 
849 /* atomic64_fetch_inc_relaxed */
850 #ifndef atomic64_fetch_inc_relaxed
851 
852 #ifndef atomic64_fetch_inc
853 #define atomic64_fetch_inc(v)		atomic64_fetch_add(1, (v))
854 #define atomic64_fetch_inc_relaxed(v)	atomic64_fetch_add_relaxed(1, (v))
855 #define atomic64_fetch_inc_acquire(v)	atomic64_fetch_add_acquire(1, (v))
856 #define atomic64_fetch_inc_release(v)	atomic64_fetch_add_release(1, (v))
857 #else /* atomic64_fetch_inc */
858 #define atomic64_fetch_inc_relaxed	atomic64_fetch_inc
859 #define atomic64_fetch_inc_acquire	atomic64_fetch_inc
860 #define atomic64_fetch_inc_release	atomic64_fetch_inc
861 #endif /* atomic64_fetch_inc */
862 
863 #else /* atomic64_fetch_inc_relaxed */
864 
865 #ifndef atomic64_fetch_inc_acquire
866 #define atomic64_fetch_inc_acquire(...)					\
867 	__atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
868 #endif
869 
870 #ifndef atomic64_fetch_inc_release
871 #define atomic64_fetch_inc_release(...)					\
872 	__atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
873 #endif
874 
875 #ifndef atomic64_fetch_inc
876 #define atomic64_fetch_inc(...)						\
877 	__atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
878 #endif
879 #endif /* atomic64_fetch_inc_relaxed */
880 
881 /* atomic64_fetch_sub_relaxed */
882 #ifndef atomic64_fetch_sub_relaxed
883 #define atomic64_fetch_sub_relaxed	atomic64_fetch_sub
884 #define atomic64_fetch_sub_acquire	atomic64_fetch_sub
885 #define atomic64_fetch_sub_release	atomic64_fetch_sub
886 
887 #else /* atomic64_fetch_sub_relaxed */
888 
889 #ifndef atomic64_fetch_sub_acquire
890 #define atomic64_fetch_sub_acquire(...)					\
891 	__atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
892 #endif
893 
894 #ifndef atomic64_fetch_sub_release
895 #define atomic64_fetch_sub_release(...)					\
896 	__atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
897 #endif
898 
899 #ifndef atomic64_fetch_sub
900 #define atomic64_fetch_sub(...)						\
901 	__atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
902 #endif
903 #endif /* atomic64_fetch_sub_relaxed */
904 
905 /* atomic64_fetch_dec_relaxed */
906 #ifndef atomic64_fetch_dec_relaxed
907 
908 #ifndef atomic64_fetch_dec
909 #define atomic64_fetch_dec(v)		atomic64_fetch_sub(1, (v))
910 #define atomic64_fetch_dec_relaxed(v)	atomic64_fetch_sub_relaxed(1, (v))
911 #define atomic64_fetch_dec_acquire(v)	atomic64_fetch_sub_acquire(1, (v))
912 #define atomic64_fetch_dec_release(v)	atomic64_fetch_sub_release(1, (v))
913 #else /* atomic64_fetch_dec */
914 #define atomic64_fetch_dec_relaxed	atomic64_fetch_dec
915 #define atomic64_fetch_dec_acquire	atomic64_fetch_dec
916 #define atomic64_fetch_dec_release	atomic64_fetch_dec
917 #endif /* atomic64_fetch_dec */
918 
919 #else /* atomic64_fetch_dec_relaxed */
920 
921 #ifndef atomic64_fetch_dec_acquire
922 #define atomic64_fetch_dec_acquire(...)					\
923 	__atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
924 #endif
925 
926 #ifndef atomic64_fetch_dec_release
927 #define atomic64_fetch_dec_release(...)					\
928 	__atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
929 #endif
930 
931 #ifndef atomic64_fetch_dec
932 #define atomic64_fetch_dec(...)						\
933 	__atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
934 #endif
935 #endif /* atomic64_fetch_dec_relaxed */
936 
937 /* atomic64_fetch_or_relaxed */
938 #ifndef atomic64_fetch_or_relaxed
939 #define atomic64_fetch_or_relaxed	atomic64_fetch_or
940 #define atomic64_fetch_or_acquire	atomic64_fetch_or
941 #define atomic64_fetch_or_release	atomic64_fetch_or
942 
943 #else /* atomic64_fetch_or_relaxed */
944 
945 #ifndef atomic64_fetch_or_acquire
946 #define atomic64_fetch_or_acquire(...)					\
947 	__atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
948 #endif
949 
950 #ifndef atomic64_fetch_or_release
951 #define atomic64_fetch_or_release(...)					\
952 	__atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
953 #endif
954 
955 #ifndef atomic64_fetch_or
956 #define atomic64_fetch_or(...)						\
957 	__atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
958 #endif
959 #endif /* atomic64_fetch_or_relaxed */
960 
961 /* atomic64_fetch_and_relaxed */
962 #ifndef atomic64_fetch_and_relaxed
963 #define atomic64_fetch_and_relaxed	atomic64_fetch_and
964 #define atomic64_fetch_and_acquire	atomic64_fetch_and
965 #define atomic64_fetch_and_release	atomic64_fetch_and
966 
967 #else /* atomic64_fetch_and_relaxed */
968 
969 #ifndef atomic64_fetch_and_acquire
970 #define atomic64_fetch_and_acquire(...)					\
971 	__atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
972 #endif
973 
974 #ifndef atomic64_fetch_and_release
975 #define atomic64_fetch_and_release(...)					\
976 	__atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
977 #endif
978 
979 #ifndef atomic64_fetch_and
980 #define atomic64_fetch_and(...)						\
981 	__atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
982 #endif
983 #endif /* atomic64_fetch_and_relaxed */
984 
985 #ifdef atomic64_andnot
986 /* atomic64_fetch_andnot_relaxed */
987 #ifndef atomic64_fetch_andnot_relaxed
988 #define atomic64_fetch_andnot_relaxed	atomic64_fetch_andnot
989 #define atomic64_fetch_andnot_acquire	atomic64_fetch_andnot
990 #define atomic64_fetch_andnot_release	atomic64_fetch_andnot
991 
992 #else /* atomic64_fetch_andnot_relaxed */
993 
994 #ifndef atomic64_fetch_andnot_acquire
995 #define atomic64_fetch_andnot_acquire(...)					\
996 	__atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
997 #endif
998 
999 #ifndef atomic64_fetch_andnot_release
1000 #define atomic64_fetch_andnot_release(...)					\
1001 	__atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
1002 #endif
1003 
1004 #ifndef atomic64_fetch_andnot
1005 #define atomic64_fetch_andnot(...)						\
1006 	__atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
1007 #endif
1008 #endif /* atomic64_fetch_andnot_relaxed */
1009 #endif /* atomic64_andnot */
1010 
1011 /* atomic64_fetch_xor_relaxed */
1012 #ifndef atomic64_fetch_xor_relaxed
1013 #define atomic64_fetch_xor_relaxed	atomic64_fetch_xor
1014 #define atomic64_fetch_xor_acquire	atomic64_fetch_xor
1015 #define atomic64_fetch_xor_release	atomic64_fetch_xor
1016 
1017 #else /* atomic64_fetch_xor_relaxed */
1018 
1019 #ifndef atomic64_fetch_xor_acquire
1020 #define atomic64_fetch_xor_acquire(...)					\
1021 	__atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
1022 #endif
1023 
1024 #ifndef atomic64_fetch_xor_release
1025 #define atomic64_fetch_xor_release(...)					\
1026 	__atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
1027 #endif
1028 
1029 #ifndef atomic64_fetch_xor
1030 #define atomic64_fetch_xor(...)						\
1031 	__atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
1032 #endif
1033 #endif /* atomic64_fetch_xor_relaxed */
1034 
1035 
1036 /* atomic64_xchg_relaxed */
1037 #ifndef atomic64_xchg_relaxed
1038 #define  atomic64_xchg_relaxed		atomic64_xchg
1039 #define  atomic64_xchg_acquire		atomic64_xchg
1040 #define  atomic64_xchg_release		atomic64_xchg
1041 
1042 #else /* atomic64_xchg_relaxed */
1043 
1044 #ifndef atomic64_xchg_acquire
1045 #define  atomic64_xchg_acquire(...)					\
1046 	__atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
1047 #endif
1048 
1049 #ifndef atomic64_xchg_release
1050 #define  atomic64_xchg_release(...)					\
1051 	__atomic_op_release(atomic64_xchg, __VA_ARGS__)
1052 #endif
1053 
1054 #ifndef atomic64_xchg
1055 #define  atomic64_xchg(...)						\
1056 	__atomic_op_fence(atomic64_xchg, __VA_ARGS__)
1057 #endif
1058 #endif /* atomic64_xchg_relaxed */
1059 
1060 /* atomic64_cmpxchg_relaxed */
1061 #ifndef atomic64_cmpxchg_relaxed
1062 #define  atomic64_cmpxchg_relaxed	atomic64_cmpxchg
1063 #define  atomic64_cmpxchg_acquire	atomic64_cmpxchg
1064 #define  atomic64_cmpxchg_release	atomic64_cmpxchg
1065 
1066 #else /* atomic64_cmpxchg_relaxed */
1067 
1068 #ifndef atomic64_cmpxchg_acquire
1069 #define  atomic64_cmpxchg_acquire(...)					\
1070 	__atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1071 #endif
1072 
1073 #ifndef atomic64_cmpxchg_release
1074 #define  atomic64_cmpxchg_release(...)					\
1075 	__atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1076 #endif
1077 
1078 #ifndef atomic64_cmpxchg
1079 #define  atomic64_cmpxchg(...)						\
1080 	__atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1081 #endif
1082 #endif /* atomic64_cmpxchg_relaxed */
1083 
1084 #ifndef atomic64_try_cmpxchg
1085 
1086 #define __atomic64_try_cmpxchg(type, _p, _po, _n)			\
1087 ({									\
1088 	typeof(_po) __po = (_po);					\
1089 	typeof(*(_po)) __r, __o = *__po;				\
1090 	__r = atomic64_cmpxchg##type((_p), __o, (_n));			\
1091 	if (unlikely(__r != __o))					\
1092 		*__po = __r;						\
1093 	likely(__r == __o);						\
1094 })
1095 
1096 #define atomic64_try_cmpxchg(_p, _po, _n)		__atomic64_try_cmpxchg(, _p, _po, _n)
1097 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n)	__atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1098 #define atomic64_try_cmpxchg_acquire(_p, _po, _n)	__atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1099 #define atomic64_try_cmpxchg_release(_p, _po, _n)	__atomic64_try_cmpxchg(_release, _p, _po, _n)
1100 
1101 #else /* atomic64_try_cmpxchg */
1102 #define atomic64_try_cmpxchg_relaxed	atomic64_try_cmpxchg
1103 #define atomic64_try_cmpxchg_acquire	atomic64_try_cmpxchg
1104 #define atomic64_try_cmpxchg_release	atomic64_try_cmpxchg
1105 #endif /* atomic64_try_cmpxchg */
1106 
1107 /**
1108  * atomic64_fetch_add_unless - add unless the number is already a given value
1109  * @v: pointer of type atomic64_t
1110  * @a: the amount to add to v...
1111  * @u: ...unless v is equal to u.
1112  *
1113  * Atomically adds @a to @v, if @v was not already @u.
1114  * Returns the original value of @v.
1115  */
1116 #ifndef atomic64_fetch_add_unless
1117 static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
1118 						  long long u)
1119 {
1120 	long long c = atomic64_read(v);
1121 
1122 	do {
1123 		if (unlikely(c == u))
1124 			break;
1125 	} while (!atomic64_try_cmpxchg(v, &c, c + a));
1126 
1127 	return c;
1128 }
1129 #endif
1130 
1131 /**
1132  * atomic64_add_unless - add unless the number is already a given value
1133  * @v: pointer of type atomic_t
1134  * @a: the amount to add to v...
1135  * @u: ...unless v is equal to u.
1136  *
1137  * Atomically adds @a to @v, if @v was not already @u.
1138  * Returns true if the addition was done.
1139  */
1140 static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
1141 {
1142 	return atomic64_fetch_add_unless(v, a, u) != u;
1143 }
1144 
1145 /**
1146  * atomic64_inc_not_zero - increment unless the number is zero
1147  * @v: pointer of type atomic64_t
1148  *
1149  * Atomically increments @v by 1, if @v is non-zero.
1150  * Returns true if the increment was done.
1151  */
1152 #ifndef atomic64_inc_not_zero
1153 #define atomic64_inc_not_zero(v)	atomic64_add_unless((v), 1, 0)
1154 #endif
1155 
1156 /**
1157  * atomic64_inc_and_test - increment and test
1158  * @v: pointer of type atomic64_t
1159  *
1160  * Atomically increments @v by 1
1161  * and returns true if the result is zero, or false for all
1162  * other cases.
1163  */
1164 #ifndef atomic64_inc_and_test
1165 static inline bool atomic64_inc_and_test(atomic64_t *v)
1166 {
1167 	return atomic64_inc_return(v) == 0;
1168 }
1169 #endif
1170 
1171 /**
1172  * atomic64_dec_and_test - decrement and test
1173  * @v: pointer of type atomic64_t
1174  *
1175  * Atomically decrements @v by 1 and
1176  * returns true if the result is 0, or false for all other
1177  * cases.
1178  */
1179 #ifndef atomic64_dec_and_test
1180 static inline bool atomic64_dec_and_test(atomic64_t *v)
1181 {
1182 	return atomic64_dec_return(v) == 0;
1183 }
1184 #endif
1185 
1186 /**
1187  * atomic64_sub_and_test - subtract value from variable and test result
1188  * @i: integer value to subtract
1189  * @v: pointer of type atomic64_t
1190  *
1191  * Atomically subtracts @i from @v and returns
1192  * true if the result is zero, or false for all
1193  * other cases.
1194  */
1195 #ifndef atomic64_sub_and_test
1196 static inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
1197 {
1198 	return atomic64_sub_return(i, v) == 0;
1199 }
1200 #endif
1201 
1202 /**
1203  * atomic64_add_negative - add and test if negative
1204  * @i: integer value to add
1205  * @v: pointer of type atomic64_t
1206  *
1207  * Atomically adds @i to @v and returns true
1208  * if the result is negative, or false when
1209  * result is greater than or equal to zero.
1210  */
1211 #ifndef atomic64_add_negative
1212 static inline bool atomic64_add_negative(long long i, atomic64_t *v)
1213 {
1214 	return atomic64_add_return(i, v) < 0;
1215 }
1216 #endif
1217 
1218 #ifndef atomic64_andnot
1219 static inline void atomic64_andnot(long long i, atomic64_t *v)
1220 {
1221 	atomic64_and(~i, v);
1222 }
1223 
1224 static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1225 {
1226 	return atomic64_fetch_and(~i, v);
1227 }
1228 
1229 static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1230 {
1231 	return atomic64_fetch_and_relaxed(~i, v);
1232 }
1233 
1234 static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1235 {
1236 	return atomic64_fetch_and_acquire(~i, v);
1237 }
1238 
1239 static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1240 {
1241 	return atomic64_fetch_and_release(~i, v);
1242 }
1243 #endif
1244 
1245 #define atomic64_cond_read_relaxed(v, c)	smp_cond_load_relaxed(&(v)->counter, (c))
1246 #define atomic64_cond_read_acquire(v, c)	smp_cond_load_acquire(&(v)->counter, (c))
1247 
1248 #include <asm-generic/atomic-long.h>
1249 
1250 #endif /* _LINUX_ATOMIC_H */
1251