xref: /linux-6.15/include/linux/atomic.h (revision 7cc7eaad)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* Atomic operations usable in machine independent code */
3 #ifndef _LINUX_ATOMIC_H
4 #define _LINUX_ATOMIC_H
5 #include <linux/types.h>
6 
7 #include <asm/atomic.h>
8 #include <asm/barrier.h>
9 
10 /*
11  * Relaxed variants of xchg, cmpxchg and some atomic operations.
12  *
13  * We support four variants:
14  *
15  * - Fully ordered: The default implementation, no suffix required.
16  * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
17  * - Release: Provides RELEASE semantics, _release suffix.
18  * - Relaxed: No ordering guarantees, _relaxed suffix.
19  *
20  * For compound atomics performing both a load and a store, ACQUIRE
21  * semantics apply only to the load and RELEASE semantics only to the
22  * store portion of the operation. Note that a failed cmpxchg_acquire
23  * does -not- imply any memory ordering constraints.
24  *
25  * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
26  */
27 
28 #ifndef atomic_read_acquire
29 #define  atomic_read_acquire(v)		smp_load_acquire(&(v)->counter)
30 #endif
31 
32 #ifndef atomic_set_release
33 #define  atomic_set_release(v, i)	smp_store_release(&(v)->counter, (i))
34 #endif
35 
36 /*
37  * The idea here is to build acquire/release variants by adding explicit
38  * barriers on top of the relaxed variant. In the case where the relaxed
39  * variant is already fully ordered, no additional barriers are needed.
40  *
41  * Besides, if an arch has a special barrier for acquire/release, it could
42  * implement its own __atomic_op_* and use the same framework for building
43  * variants
44  *
45  * If an architecture overrides __atomic_op_acquire() it will probably want
46  * to define smp_mb__after_spinlock().
47  */
48 #ifndef __atomic_op_acquire
49 #define __atomic_op_acquire(op, args...)				\
50 ({									\
51 	typeof(op##_relaxed(args)) __ret  = op##_relaxed(args);		\
52 	smp_mb__after_atomic();						\
53 	__ret;								\
54 })
55 #endif
56 
57 #ifndef __atomic_op_release
58 #define __atomic_op_release(op, args...)				\
59 ({									\
60 	smp_mb__before_atomic();					\
61 	op##_relaxed(args);						\
62 })
63 #endif
64 
65 #ifndef __atomic_op_fence
66 #define __atomic_op_fence(op, args...)					\
67 ({									\
68 	typeof(op##_relaxed(args)) __ret;				\
69 	smp_mb__before_atomic();					\
70 	__ret = op##_relaxed(args);					\
71 	smp_mb__after_atomic();						\
72 	__ret;								\
73 })
74 #endif
75 
76 /* atomic_add_return_relaxed */
77 #ifndef atomic_add_return_relaxed
78 #define  atomic_add_return_relaxed	atomic_add_return
79 #define  atomic_add_return_acquire	atomic_add_return
80 #define  atomic_add_return_release	atomic_add_return
81 
82 #else /* atomic_add_return_relaxed */
83 
84 #ifndef atomic_add_return_acquire
85 #define  atomic_add_return_acquire(...)					\
86 	__atomic_op_acquire(atomic_add_return, __VA_ARGS__)
87 #endif
88 
89 #ifndef atomic_add_return_release
90 #define  atomic_add_return_release(...)					\
91 	__atomic_op_release(atomic_add_return, __VA_ARGS__)
92 #endif
93 
94 #ifndef atomic_add_return
95 #define  atomic_add_return(...)						\
96 	__atomic_op_fence(atomic_add_return, __VA_ARGS__)
97 #endif
98 #endif /* atomic_add_return_relaxed */
99 
100 #ifndef atomic_inc
101 #define atomic_inc(v)			atomic_add(1, (v))
102 #endif
103 
104 /* atomic_inc_return_relaxed */
105 #ifndef atomic_inc_return_relaxed
106 
107 #ifndef atomic_inc_return
108 #define atomic_inc_return(v)		atomic_add_return(1, (v))
109 #define atomic_inc_return_relaxed(v)	atomic_add_return_relaxed(1, (v))
110 #define atomic_inc_return_acquire(v)	atomic_add_return_acquire(1, (v))
111 #define atomic_inc_return_release(v)	atomic_add_return_release(1, (v))
112 #else /* atomic_inc_return */
113 #define  atomic_inc_return_relaxed	atomic_inc_return
114 #define  atomic_inc_return_acquire	atomic_inc_return
115 #define  atomic_inc_return_release	atomic_inc_return
116 #endif /* atomic_inc_return */
117 
118 #else /* atomic_inc_return_relaxed */
119 
120 #ifndef atomic_inc_return_acquire
121 #define  atomic_inc_return_acquire(...)					\
122 	__atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
123 #endif
124 
125 #ifndef atomic_inc_return_release
126 #define  atomic_inc_return_release(...)					\
127 	__atomic_op_release(atomic_inc_return, __VA_ARGS__)
128 #endif
129 
130 #ifndef atomic_inc_return
131 #define  atomic_inc_return(...)						\
132 	__atomic_op_fence(atomic_inc_return, __VA_ARGS__)
133 #endif
134 #endif /* atomic_inc_return_relaxed */
135 
136 /* atomic_sub_return_relaxed */
137 #ifndef atomic_sub_return_relaxed
138 #define  atomic_sub_return_relaxed	atomic_sub_return
139 #define  atomic_sub_return_acquire	atomic_sub_return
140 #define  atomic_sub_return_release	atomic_sub_return
141 
142 #else /* atomic_sub_return_relaxed */
143 
144 #ifndef atomic_sub_return_acquire
145 #define  atomic_sub_return_acquire(...)					\
146 	__atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
147 #endif
148 
149 #ifndef atomic_sub_return_release
150 #define  atomic_sub_return_release(...)					\
151 	__atomic_op_release(atomic_sub_return, __VA_ARGS__)
152 #endif
153 
154 #ifndef atomic_sub_return
155 #define  atomic_sub_return(...)						\
156 	__atomic_op_fence(atomic_sub_return, __VA_ARGS__)
157 #endif
158 #endif /* atomic_sub_return_relaxed */
159 
160 #ifndef atomic_dec
161 #define atomic_dec(v)			atomic_sub(1, (v))
162 #endif
163 
164 /* atomic_dec_return_relaxed */
165 #ifndef atomic_dec_return_relaxed
166 
167 #ifndef atomic_dec_return
168 #define atomic_dec_return(v)		atomic_sub_return(1, (v))
169 #define atomic_dec_return_relaxed(v)	atomic_sub_return_relaxed(1, (v))
170 #define atomic_dec_return_acquire(v)	atomic_sub_return_acquire(1, (v))
171 #define atomic_dec_return_release(v)	atomic_sub_return_release(1, (v))
172 #else /* atomic_dec_return */
173 #define  atomic_dec_return_relaxed	atomic_dec_return
174 #define  atomic_dec_return_acquire	atomic_dec_return
175 #define  atomic_dec_return_release	atomic_dec_return
176 #endif /* atomic_dec_return */
177 
178 #else /* atomic_dec_return_relaxed */
179 
180 #ifndef atomic_dec_return_acquire
181 #define  atomic_dec_return_acquire(...)					\
182 	__atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
183 #endif
184 
185 #ifndef atomic_dec_return_release
186 #define  atomic_dec_return_release(...)					\
187 	__atomic_op_release(atomic_dec_return, __VA_ARGS__)
188 #endif
189 
190 #ifndef atomic_dec_return
191 #define  atomic_dec_return(...)						\
192 	__atomic_op_fence(atomic_dec_return, __VA_ARGS__)
193 #endif
194 #endif /* atomic_dec_return_relaxed */
195 
196 
197 /* atomic_fetch_add_relaxed */
198 #ifndef atomic_fetch_add_relaxed
199 #define atomic_fetch_add_relaxed	atomic_fetch_add
200 #define atomic_fetch_add_acquire	atomic_fetch_add
201 #define atomic_fetch_add_release	atomic_fetch_add
202 
203 #else /* atomic_fetch_add_relaxed */
204 
205 #ifndef atomic_fetch_add_acquire
206 #define atomic_fetch_add_acquire(...)					\
207 	__atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
208 #endif
209 
210 #ifndef atomic_fetch_add_release
211 #define atomic_fetch_add_release(...)					\
212 	__atomic_op_release(atomic_fetch_add, __VA_ARGS__)
213 #endif
214 
215 #ifndef atomic_fetch_add
216 #define atomic_fetch_add(...)						\
217 	__atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
218 #endif
219 #endif /* atomic_fetch_add_relaxed */
220 
221 /* atomic_fetch_inc_relaxed */
222 #ifndef atomic_fetch_inc_relaxed
223 
224 #ifndef atomic_fetch_inc
225 #define atomic_fetch_inc(v)	        atomic_fetch_add(1, (v))
226 #define atomic_fetch_inc_relaxed(v)	atomic_fetch_add_relaxed(1, (v))
227 #define atomic_fetch_inc_acquire(v)	atomic_fetch_add_acquire(1, (v))
228 #define atomic_fetch_inc_release(v)	atomic_fetch_add_release(1, (v))
229 #else /* atomic_fetch_inc */
230 #define atomic_fetch_inc_relaxed	atomic_fetch_inc
231 #define atomic_fetch_inc_acquire	atomic_fetch_inc
232 #define atomic_fetch_inc_release	atomic_fetch_inc
233 #endif /* atomic_fetch_inc */
234 
235 #else /* atomic_fetch_inc_relaxed */
236 
237 #ifndef atomic_fetch_inc_acquire
238 #define atomic_fetch_inc_acquire(...)					\
239 	__atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
240 #endif
241 
242 #ifndef atomic_fetch_inc_release
243 #define atomic_fetch_inc_release(...)					\
244 	__atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
245 #endif
246 
247 #ifndef atomic_fetch_inc
248 #define atomic_fetch_inc(...)						\
249 	__atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
250 #endif
251 #endif /* atomic_fetch_inc_relaxed */
252 
253 /* atomic_fetch_sub_relaxed */
254 #ifndef atomic_fetch_sub_relaxed
255 #define atomic_fetch_sub_relaxed	atomic_fetch_sub
256 #define atomic_fetch_sub_acquire	atomic_fetch_sub
257 #define atomic_fetch_sub_release	atomic_fetch_sub
258 
259 #else /* atomic_fetch_sub_relaxed */
260 
261 #ifndef atomic_fetch_sub_acquire
262 #define atomic_fetch_sub_acquire(...)					\
263 	__atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
264 #endif
265 
266 #ifndef atomic_fetch_sub_release
267 #define atomic_fetch_sub_release(...)					\
268 	__atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
269 #endif
270 
271 #ifndef atomic_fetch_sub
272 #define atomic_fetch_sub(...)						\
273 	__atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
274 #endif
275 #endif /* atomic_fetch_sub_relaxed */
276 
277 /* atomic_fetch_dec_relaxed */
278 #ifndef atomic_fetch_dec_relaxed
279 
280 #ifndef atomic_fetch_dec
281 #define atomic_fetch_dec(v)	        atomic_fetch_sub(1, (v))
282 #define atomic_fetch_dec_relaxed(v)	atomic_fetch_sub_relaxed(1, (v))
283 #define atomic_fetch_dec_acquire(v)	atomic_fetch_sub_acquire(1, (v))
284 #define atomic_fetch_dec_release(v)	atomic_fetch_sub_release(1, (v))
285 #else /* atomic_fetch_dec */
286 #define atomic_fetch_dec_relaxed	atomic_fetch_dec
287 #define atomic_fetch_dec_acquire	atomic_fetch_dec
288 #define atomic_fetch_dec_release	atomic_fetch_dec
289 #endif /* atomic_fetch_dec */
290 
291 #else /* atomic_fetch_dec_relaxed */
292 
293 #ifndef atomic_fetch_dec_acquire
294 #define atomic_fetch_dec_acquire(...)					\
295 	__atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
296 #endif
297 
298 #ifndef atomic_fetch_dec_release
299 #define atomic_fetch_dec_release(...)					\
300 	__atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
301 #endif
302 
303 #ifndef atomic_fetch_dec
304 #define atomic_fetch_dec(...)						\
305 	__atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
306 #endif
307 #endif /* atomic_fetch_dec_relaxed */
308 
309 /* atomic_fetch_or_relaxed */
310 #ifndef atomic_fetch_or_relaxed
311 #define atomic_fetch_or_relaxed	atomic_fetch_or
312 #define atomic_fetch_or_acquire	atomic_fetch_or
313 #define atomic_fetch_or_release	atomic_fetch_or
314 
315 #else /* atomic_fetch_or_relaxed */
316 
317 #ifndef atomic_fetch_or_acquire
318 #define atomic_fetch_or_acquire(...)					\
319 	__atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
320 #endif
321 
322 #ifndef atomic_fetch_or_release
323 #define atomic_fetch_or_release(...)					\
324 	__atomic_op_release(atomic_fetch_or, __VA_ARGS__)
325 #endif
326 
327 #ifndef atomic_fetch_or
328 #define atomic_fetch_or(...)						\
329 	__atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
330 #endif
331 #endif /* atomic_fetch_or_relaxed */
332 
333 /* atomic_fetch_and_relaxed */
334 #ifndef atomic_fetch_and_relaxed
335 #define atomic_fetch_and_relaxed	atomic_fetch_and
336 #define atomic_fetch_and_acquire	atomic_fetch_and
337 #define atomic_fetch_and_release	atomic_fetch_and
338 
339 #else /* atomic_fetch_and_relaxed */
340 
341 #ifndef atomic_fetch_and_acquire
342 #define atomic_fetch_and_acquire(...)					\
343 	__atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
344 #endif
345 
346 #ifndef atomic_fetch_and_release
347 #define atomic_fetch_and_release(...)					\
348 	__atomic_op_release(atomic_fetch_and, __VA_ARGS__)
349 #endif
350 
351 #ifndef atomic_fetch_and
352 #define atomic_fetch_and(...)						\
353 	__atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
354 #endif
355 #endif /* atomic_fetch_and_relaxed */
356 
357 #ifndef atomic_andnot
358 #define atomic_andnot(i, v)		atomic_and(~(int)(i), (v))
359 #endif
360 
361 #ifndef atomic_fetch_andnot_relaxed
362 
363 #ifndef atomic_fetch_andnot
364 #define atomic_fetch_andnot(i, v)		atomic_fetch_and(~(int)(i), (v))
365 #define atomic_fetch_andnot_relaxed(i, v)	atomic_fetch_and_relaxed(~(int)(i), (v))
366 #define atomic_fetch_andnot_acquire(i, v)	atomic_fetch_and_acquire(~(int)(i), (v))
367 #define atomic_fetch_andnot_release(i, v)	atomic_fetch_and_release(~(int)(i), (v))
368 #else /* atomic_fetch_andnot */
369 #define atomic_fetch_andnot_relaxed		atomic_fetch_andnot
370 #define atomic_fetch_andnot_acquire		atomic_fetch_andnot
371 #define atomic_fetch_andnot_release		atomic_fetch_andnot
372 #endif /* atomic_fetch_andnot */
373 
374 #else /* atomic_fetch_andnot_relaxed */
375 
376 #ifndef atomic_fetch_andnot_acquire
377 #define atomic_fetch_andnot_acquire(...)					\
378 	__atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
379 #endif
380 
381 #ifndef atomic_fetch_andnot_release
382 #define atomic_fetch_andnot_release(...)					\
383 	__atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
384 #endif
385 
386 #ifndef atomic_fetch_andnot
387 #define atomic_fetch_andnot(...)						\
388 	__atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
389 #endif
390 #endif /* atomic_fetch_andnot_relaxed */
391 
392 /* atomic_fetch_xor_relaxed */
393 #ifndef atomic_fetch_xor_relaxed
394 #define atomic_fetch_xor_relaxed	atomic_fetch_xor
395 #define atomic_fetch_xor_acquire	atomic_fetch_xor
396 #define atomic_fetch_xor_release	atomic_fetch_xor
397 
398 #else /* atomic_fetch_xor_relaxed */
399 
400 #ifndef atomic_fetch_xor_acquire
401 #define atomic_fetch_xor_acquire(...)					\
402 	__atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
403 #endif
404 
405 #ifndef atomic_fetch_xor_release
406 #define atomic_fetch_xor_release(...)					\
407 	__atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
408 #endif
409 
410 #ifndef atomic_fetch_xor
411 #define atomic_fetch_xor(...)						\
412 	__atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
413 #endif
414 #endif /* atomic_fetch_xor_relaxed */
415 
416 
417 /* atomic_xchg_relaxed */
418 #ifndef atomic_xchg_relaxed
419 #define  atomic_xchg_relaxed		atomic_xchg
420 #define  atomic_xchg_acquire		atomic_xchg
421 #define  atomic_xchg_release		atomic_xchg
422 
423 #else /* atomic_xchg_relaxed */
424 
425 #ifndef atomic_xchg_acquire
426 #define  atomic_xchg_acquire(...)					\
427 	__atomic_op_acquire(atomic_xchg, __VA_ARGS__)
428 #endif
429 
430 #ifndef atomic_xchg_release
431 #define  atomic_xchg_release(...)					\
432 	__atomic_op_release(atomic_xchg, __VA_ARGS__)
433 #endif
434 
435 #ifndef atomic_xchg
436 #define  atomic_xchg(...)						\
437 	__atomic_op_fence(atomic_xchg, __VA_ARGS__)
438 #endif
439 #endif /* atomic_xchg_relaxed */
440 
441 /* atomic_cmpxchg_relaxed */
442 #ifndef atomic_cmpxchg_relaxed
443 #define  atomic_cmpxchg_relaxed		atomic_cmpxchg
444 #define  atomic_cmpxchg_acquire		atomic_cmpxchg
445 #define  atomic_cmpxchg_release		atomic_cmpxchg
446 
447 #else /* atomic_cmpxchg_relaxed */
448 
449 #ifndef atomic_cmpxchg_acquire
450 #define  atomic_cmpxchg_acquire(...)					\
451 	__atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
452 #endif
453 
454 #ifndef atomic_cmpxchg_release
455 #define  atomic_cmpxchg_release(...)					\
456 	__atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
457 #endif
458 
459 #ifndef atomic_cmpxchg
460 #define  atomic_cmpxchg(...)						\
461 	__atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
462 #endif
463 #endif /* atomic_cmpxchg_relaxed */
464 
465 #ifndef atomic_try_cmpxchg
466 
467 #define __atomic_try_cmpxchg(type, _p, _po, _n)				\
468 ({									\
469 	typeof(_po) __po = (_po);					\
470 	typeof(*(_po)) __r, __o = *__po;				\
471 	__r = atomic_cmpxchg##type((_p), __o, (_n));			\
472 	if (unlikely(__r != __o))					\
473 		*__po = __r;						\
474 	likely(__r == __o);						\
475 })
476 
477 #define atomic_try_cmpxchg(_p, _po, _n)		__atomic_try_cmpxchg(, _p, _po, _n)
478 #define atomic_try_cmpxchg_relaxed(_p, _po, _n)	__atomic_try_cmpxchg(_relaxed, _p, _po, _n)
479 #define atomic_try_cmpxchg_acquire(_p, _po, _n)	__atomic_try_cmpxchg(_acquire, _p, _po, _n)
480 #define atomic_try_cmpxchg_release(_p, _po, _n)	__atomic_try_cmpxchg(_release, _p, _po, _n)
481 
482 #else /* atomic_try_cmpxchg */
483 #define atomic_try_cmpxchg_relaxed	atomic_try_cmpxchg
484 #define atomic_try_cmpxchg_acquire	atomic_try_cmpxchg
485 #define atomic_try_cmpxchg_release	atomic_try_cmpxchg
486 #endif /* atomic_try_cmpxchg */
487 
488 /* cmpxchg_relaxed */
489 #ifndef cmpxchg_relaxed
490 #define  cmpxchg_relaxed		cmpxchg
491 #define  cmpxchg_acquire		cmpxchg
492 #define  cmpxchg_release		cmpxchg
493 
494 #else /* cmpxchg_relaxed */
495 
496 #ifndef cmpxchg_acquire
497 #define  cmpxchg_acquire(...)						\
498 	__atomic_op_acquire(cmpxchg, __VA_ARGS__)
499 #endif
500 
501 #ifndef cmpxchg_release
502 #define  cmpxchg_release(...)						\
503 	__atomic_op_release(cmpxchg, __VA_ARGS__)
504 #endif
505 
506 #ifndef cmpxchg
507 #define  cmpxchg(...)							\
508 	__atomic_op_fence(cmpxchg, __VA_ARGS__)
509 #endif
510 #endif /* cmpxchg_relaxed */
511 
512 /* cmpxchg64_relaxed */
513 #ifndef cmpxchg64_relaxed
514 #define  cmpxchg64_relaxed		cmpxchg64
515 #define  cmpxchg64_acquire		cmpxchg64
516 #define  cmpxchg64_release		cmpxchg64
517 
518 #else /* cmpxchg64_relaxed */
519 
520 #ifndef cmpxchg64_acquire
521 #define  cmpxchg64_acquire(...)						\
522 	__atomic_op_acquire(cmpxchg64, __VA_ARGS__)
523 #endif
524 
525 #ifndef cmpxchg64_release
526 #define  cmpxchg64_release(...)						\
527 	__atomic_op_release(cmpxchg64, __VA_ARGS__)
528 #endif
529 
530 #ifndef cmpxchg64
531 #define  cmpxchg64(...)							\
532 	__atomic_op_fence(cmpxchg64, __VA_ARGS__)
533 #endif
534 #endif /* cmpxchg64_relaxed */
535 
536 /* xchg_relaxed */
537 #ifndef xchg_relaxed
538 #define  xchg_relaxed			xchg
539 #define  xchg_acquire			xchg
540 #define  xchg_release			xchg
541 
542 #else /* xchg_relaxed */
543 
544 #ifndef xchg_acquire
545 #define  xchg_acquire(...)		__atomic_op_acquire(xchg, __VA_ARGS__)
546 #endif
547 
548 #ifndef xchg_release
549 #define  xchg_release(...)		__atomic_op_release(xchg, __VA_ARGS__)
550 #endif
551 
552 #ifndef xchg
553 #define  xchg(...)			__atomic_op_fence(xchg, __VA_ARGS__)
554 #endif
555 #endif /* xchg_relaxed */
556 
557 /**
558  * atomic_fetch_add_unless - add unless the number is already a given value
559  * @v: pointer of type atomic_t
560  * @a: the amount to add to v...
561  * @u: ...unless v is equal to u.
562  *
563  * Atomically adds @a to @v, if @v was not already @u.
564  * Returns the original value of @v.
565  */
566 #ifndef atomic_fetch_add_unless
567 static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
568 {
569 	int c = atomic_read(v);
570 
571 	do {
572 		if (unlikely(c == u))
573 			break;
574 	} while (!atomic_try_cmpxchg(v, &c, c + a));
575 
576 	return c;
577 }
578 #endif
579 
580 /**
581  * atomic_add_unless - add unless the number is already a given value
582  * @v: pointer of type atomic_t
583  * @a: the amount to add to v...
584  * @u: ...unless v is equal to u.
585  *
586  * Atomically adds @a to @v, if @v was not already @u.
587  * Returns true if the addition was done.
588  */
589 static inline bool atomic_add_unless(atomic_t *v, int a, int u)
590 {
591 	return atomic_fetch_add_unless(v, a, u) != u;
592 }
593 
594 /**
595  * atomic_inc_not_zero - increment unless the number is zero
596  * @v: pointer of type atomic_t
597  *
598  * Atomically increments @v by 1, if @v is non-zero.
599  * Returns true if the increment was done.
600  */
601 #ifndef atomic_inc_not_zero
602 #define atomic_inc_not_zero(v)		atomic_add_unless((v), 1, 0)
603 #endif
604 
605 /**
606  * atomic_inc_and_test - increment and test
607  * @v: pointer of type atomic_t
608  *
609  * Atomically increments @v by 1
610  * and returns true if the result is zero, or false for all
611  * other cases.
612  */
613 #ifndef atomic_inc_and_test
614 static inline bool atomic_inc_and_test(atomic_t *v)
615 {
616 	return atomic_inc_return(v) == 0;
617 }
618 #endif
619 
620 /**
621  * atomic_dec_and_test - decrement and test
622  * @v: pointer of type atomic_t
623  *
624  * Atomically decrements @v by 1 and
625  * returns true if the result is 0, or false for all other
626  * cases.
627  */
628 #ifndef atomic_dec_and_test
629 static inline bool atomic_dec_and_test(atomic_t *v)
630 {
631 	return atomic_dec_return(v) == 0;
632 }
633 #endif
634 
635 /**
636  * atomic_sub_and_test - subtract value from variable and test result
637  * @i: integer value to subtract
638  * @v: pointer of type atomic_t
639  *
640  * Atomically subtracts @i from @v and returns
641  * true if the result is zero, or false for all
642  * other cases.
643  */
644 #ifndef atomic_sub_and_test
645 static inline bool atomic_sub_and_test(int i, atomic_t *v)
646 {
647 	return atomic_sub_return(i, v) == 0;
648 }
649 #endif
650 
651 /**
652  * atomic_add_negative - add and test if negative
653  * @i: integer value to add
654  * @v: pointer of type atomic_t
655  *
656  * Atomically adds @i to @v and returns true
657  * if the result is negative, or false when
658  * result is greater than or equal to zero.
659  */
660 #ifndef atomic_add_negative
661 static inline bool atomic_add_negative(int i, atomic_t *v)
662 {
663 	return atomic_add_return(i, v) < 0;
664 }
665 #endif
666 
667 #ifndef atomic_inc_unless_negative
668 static inline bool atomic_inc_unless_negative(atomic_t *v)
669 {
670 	int c = atomic_read(v);
671 
672 	do {
673 		if (unlikely(c < 0))
674 			return false;
675 	} while (!atomic_try_cmpxchg(v, &c, c + 1));
676 
677 	return true;
678 }
679 #endif
680 
681 #ifndef atomic_dec_unless_positive
682 static inline bool atomic_dec_unless_positive(atomic_t *v)
683 {
684 	int c = atomic_read(v);
685 
686 	do {
687 		if (unlikely(c > 0))
688 			return false;
689 	} while (!atomic_try_cmpxchg(v, &c, c - 1));
690 
691 	return true;
692 }
693 #endif
694 
695 /*
696  * atomic_dec_if_positive - decrement by 1 if old value positive
697  * @v: pointer of type atomic_t
698  *
699  * The function returns the old value of *v minus 1, even if
700  * the atomic variable, v, was not decremented.
701  */
702 #ifndef atomic_dec_if_positive
703 static inline int atomic_dec_if_positive(atomic_t *v)
704 {
705 	int dec, c = atomic_read(v);
706 
707 	do {
708 		dec = c - 1;
709 		if (unlikely(dec < 0))
710 			break;
711 	} while (!atomic_try_cmpxchg(v, &c, dec));
712 
713 	return dec;
714 }
715 #endif
716 
717 #define atomic_cond_read_relaxed(v, c)	smp_cond_load_relaxed(&(v)->counter, (c))
718 #define atomic_cond_read_acquire(v, c)	smp_cond_load_acquire(&(v)->counter, (c))
719 
720 #ifdef CONFIG_GENERIC_ATOMIC64
721 #include <asm-generic/atomic64.h>
722 #endif
723 
724 #ifndef atomic64_read_acquire
725 #define  atomic64_read_acquire(v)	smp_load_acquire(&(v)->counter)
726 #endif
727 
728 #ifndef atomic64_set_release
729 #define  atomic64_set_release(v, i)	smp_store_release(&(v)->counter, (i))
730 #endif
731 
732 /* atomic64_add_return_relaxed */
733 #ifndef atomic64_add_return_relaxed
734 #define  atomic64_add_return_relaxed	atomic64_add_return
735 #define  atomic64_add_return_acquire	atomic64_add_return
736 #define  atomic64_add_return_release	atomic64_add_return
737 
738 #else /* atomic64_add_return_relaxed */
739 
740 #ifndef atomic64_add_return_acquire
741 #define  atomic64_add_return_acquire(...)				\
742 	__atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
743 #endif
744 
745 #ifndef atomic64_add_return_release
746 #define  atomic64_add_return_release(...)				\
747 	__atomic_op_release(atomic64_add_return, __VA_ARGS__)
748 #endif
749 
750 #ifndef atomic64_add_return
751 #define  atomic64_add_return(...)					\
752 	__atomic_op_fence(atomic64_add_return, __VA_ARGS__)
753 #endif
754 #endif /* atomic64_add_return_relaxed */
755 
756 #ifndef atomic64_inc
757 #define atomic64_inc(v)			atomic64_add(1, (v))
758 #endif
759 
760 /* atomic64_inc_return_relaxed */
761 #ifndef atomic64_inc_return_relaxed
762 
763 #ifndef atomic64_inc_return
764 #define atomic64_inc_return(v)		atomic64_add_return(1, (v))
765 #define atomic64_inc_return_relaxed(v)	atomic64_add_return_relaxed(1, (v))
766 #define atomic64_inc_return_acquire(v)	atomic64_add_return_acquire(1, (v))
767 #define atomic64_inc_return_release(v)	atomic64_add_return_release(1, (v))
768 #else /* atomic64_inc_return */
769 #define  atomic64_inc_return_relaxed	atomic64_inc_return
770 #define  atomic64_inc_return_acquire	atomic64_inc_return
771 #define  atomic64_inc_return_release	atomic64_inc_return
772 #endif /* atomic64_inc_return */
773 
774 #else /* atomic64_inc_return_relaxed */
775 
776 #ifndef atomic64_inc_return_acquire
777 #define  atomic64_inc_return_acquire(...)				\
778 	__atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
779 #endif
780 
781 #ifndef atomic64_inc_return_release
782 #define  atomic64_inc_return_release(...)				\
783 	__atomic_op_release(atomic64_inc_return, __VA_ARGS__)
784 #endif
785 
786 #ifndef atomic64_inc_return
787 #define  atomic64_inc_return(...)					\
788 	__atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
789 #endif
790 #endif /* atomic64_inc_return_relaxed */
791 
792 
793 /* atomic64_sub_return_relaxed */
794 #ifndef atomic64_sub_return_relaxed
795 #define  atomic64_sub_return_relaxed	atomic64_sub_return
796 #define  atomic64_sub_return_acquire	atomic64_sub_return
797 #define  atomic64_sub_return_release	atomic64_sub_return
798 
799 #else /* atomic64_sub_return_relaxed */
800 
801 #ifndef atomic64_sub_return_acquire
802 #define  atomic64_sub_return_acquire(...)				\
803 	__atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
804 #endif
805 
806 #ifndef atomic64_sub_return_release
807 #define  atomic64_sub_return_release(...)				\
808 	__atomic_op_release(atomic64_sub_return, __VA_ARGS__)
809 #endif
810 
811 #ifndef atomic64_sub_return
812 #define  atomic64_sub_return(...)					\
813 	__atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
814 #endif
815 #endif /* atomic64_sub_return_relaxed */
816 
817 #ifndef atomic64_dec
818 #define atomic64_dec(v)			atomic64_sub(1, (v))
819 #endif
820 
821 /* atomic64_dec_return_relaxed */
822 #ifndef atomic64_dec_return_relaxed
823 
824 #ifndef atomic64_dec_return
825 #define atomic64_dec_return(v)		atomic64_sub_return(1, (v))
826 #define atomic64_dec_return_relaxed(v)	atomic64_sub_return_relaxed(1, (v))
827 #define atomic64_dec_return_acquire(v)	atomic64_sub_return_acquire(1, (v))
828 #define atomic64_dec_return_release(v)	atomic64_sub_return_release(1, (v))
829 #else /* atomic64_dec_return */
830 #define  atomic64_dec_return_relaxed	atomic64_dec_return
831 #define  atomic64_dec_return_acquire	atomic64_dec_return
832 #define  atomic64_dec_return_release	atomic64_dec_return
833 #endif /* atomic64_dec_return */
834 
835 #else /* atomic64_dec_return_relaxed */
836 
837 #ifndef atomic64_dec_return_acquire
838 #define  atomic64_dec_return_acquire(...)				\
839 	__atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
840 #endif
841 
842 #ifndef atomic64_dec_return_release
843 #define  atomic64_dec_return_release(...)				\
844 	__atomic_op_release(atomic64_dec_return, __VA_ARGS__)
845 #endif
846 
847 #ifndef atomic64_dec_return
848 #define  atomic64_dec_return(...)					\
849 	__atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
850 #endif
851 #endif /* atomic64_dec_return_relaxed */
852 
853 
854 /* atomic64_fetch_add_relaxed */
855 #ifndef atomic64_fetch_add_relaxed
856 #define atomic64_fetch_add_relaxed	atomic64_fetch_add
857 #define atomic64_fetch_add_acquire	atomic64_fetch_add
858 #define atomic64_fetch_add_release	atomic64_fetch_add
859 
860 #else /* atomic64_fetch_add_relaxed */
861 
862 #ifndef atomic64_fetch_add_acquire
863 #define atomic64_fetch_add_acquire(...)					\
864 	__atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
865 #endif
866 
867 #ifndef atomic64_fetch_add_release
868 #define atomic64_fetch_add_release(...)					\
869 	__atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
870 #endif
871 
872 #ifndef atomic64_fetch_add
873 #define atomic64_fetch_add(...)						\
874 	__atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
875 #endif
876 #endif /* atomic64_fetch_add_relaxed */
877 
878 /* atomic64_fetch_inc_relaxed */
879 #ifndef atomic64_fetch_inc_relaxed
880 
881 #ifndef atomic64_fetch_inc
882 #define atomic64_fetch_inc(v)		atomic64_fetch_add(1, (v))
883 #define atomic64_fetch_inc_relaxed(v)	atomic64_fetch_add_relaxed(1, (v))
884 #define atomic64_fetch_inc_acquire(v)	atomic64_fetch_add_acquire(1, (v))
885 #define atomic64_fetch_inc_release(v)	atomic64_fetch_add_release(1, (v))
886 #else /* atomic64_fetch_inc */
887 #define atomic64_fetch_inc_relaxed	atomic64_fetch_inc
888 #define atomic64_fetch_inc_acquire	atomic64_fetch_inc
889 #define atomic64_fetch_inc_release	atomic64_fetch_inc
890 #endif /* atomic64_fetch_inc */
891 
892 #else /* atomic64_fetch_inc_relaxed */
893 
894 #ifndef atomic64_fetch_inc_acquire
895 #define atomic64_fetch_inc_acquire(...)					\
896 	__atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
897 #endif
898 
899 #ifndef atomic64_fetch_inc_release
900 #define atomic64_fetch_inc_release(...)					\
901 	__atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
902 #endif
903 
904 #ifndef atomic64_fetch_inc
905 #define atomic64_fetch_inc(...)						\
906 	__atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
907 #endif
908 #endif /* atomic64_fetch_inc_relaxed */
909 
910 /* atomic64_fetch_sub_relaxed */
911 #ifndef atomic64_fetch_sub_relaxed
912 #define atomic64_fetch_sub_relaxed	atomic64_fetch_sub
913 #define atomic64_fetch_sub_acquire	atomic64_fetch_sub
914 #define atomic64_fetch_sub_release	atomic64_fetch_sub
915 
916 #else /* atomic64_fetch_sub_relaxed */
917 
918 #ifndef atomic64_fetch_sub_acquire
919 #define atomic64_fetch_sub_acquire(...)					\
920 	__atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
921 #endif
922 
923 #ifndef atomic64_fetch_sub_release
924 #define atomic64_fetch_sub_release(...)					\
925 	__atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
926 #endif
927 
928 #ifndef atomic64_fetch_sub
929 #define atomic64_fetch_sub(...)						\
930 	__atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
931 #endif
932 #endif /* atomic64_fetch_sub_relaxed */
933 
934 /* atomic64_fetch_dec_relaxed */
935 #ifndef atomic64_fetch_dec_relaxed
936 
937 #ifndef atomic64_fetch_dec
938 #define atomic64_fetch_dec(v)		atomic64_fetch_sub(1, (v))
939 #define atomic64_fetch_dec_relaxed(v)	atomic64_fetch_sub_relaxed(1, (v))
940 #define atomic64_fetch_dec_acquire(v)	atomic64_fetch_sub_acquire(1, (v))
941 #define atomic64_fetch_dec_release(v)	atomic64_fetch_sub_release(1, (v))
942 #else /* atomic64_fetch_dec */
943 #define atomic64_fetch_dec_relaxed	atomic64_fetch_dec
944 #define atomic64_fetch_dec_acquire	atomic64_fetch_dec
945 #define atomic64_fetch_dec_release	atomic64_fetch_dec
946 #endif /* atomic64_fetch_dec */
947 
948 #else /* atomic64_fetch_dec_relaxed */
949 
950 #ifndef atomic64_fetch_dec_acquire
951 #define atomic64_fetch_dec_acquire(...)					\
952 	__atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
953 #endif
954 
955 #ifndef atomic64_fetch_dec_release
956 #define atomic64_fetch_dec_release(...)					\
957 	__atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
958 #endif
959 
960 #ifndef atomic64_fetch_dec
961 #define atomic64_fetch_dec(...)						\
962 	__atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
963 #endif
964 #endif /* atomic64_fetch_dec_relaxed */
965 
966 /* atomic64_fetch_or_relaxed */
967 #ifndef atomic64_fetch_or_relaxed
968 #define atomic64_fetch_or_relaxed	atomic64_fetch_or
969 #define atomic64_fetch_or_acquire	atomic64_fetch_or
970 #define atomic64_fetch_or_release	atomic64_fetch_or
971 
972 #else /* atomic64_fetch_or_relaxed */
973 
974 #ifndef atomic64_fetch_or_acquire
975 #define atomic64_fetch_or_acquire(...)					\
976 	__atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
977 #endif
978 
979 #ifndef atomic64_fetch_or_release
980 #define atomic64_fetch_or_release(...)					\
981 	__atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
982 #endif
983 
984 #ifndef atomic64_fetch_or
985 #define atomic64_fetch_or(...)						\
986 	__atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
987 #endif
988 #endif /* atomic64_fetch_or_relaxed */
989 
990 /* atomic64_fetch_and_relaxed */
991 #ifndef atomic64_fetch_and_relaxed
992 #define atomic64_fetch_and_relaxed	atomic64_fetch_and
993 #define atomic64_fetch_and_acquire	atomic64_fetch_and
994 #define atomic64_fetch_and_release	atomic64_fetch_and
995 
996 #else /* atomic64_fetch_and_relaxed */
997 
998 #ifndef atomic64_fetch_and_acquire
999 #define atomic64_fetch_and_acquire(...)					\
1000 	__atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
1001 #endif
1002 
1003 #ifndef atomic64_fetch_and_release
1004 #define atomic64_fetch_and_release(...)					\
1005 	__atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
1006 #endif
1007 
1008 #ifndef atomic64_fetch_and
1009 #define atomic64_fetch_and(...)						\
1010 	__atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
1011 #endif
1012 #endif /* atomic64_fetch_and_relaxed */
1013 
1014 #ifndef atomic64_andnot
1015 #define atomic64_andnot(i, v)		atomic64_and(~(long long)(i), (v))
1016 #endif
1017 
1018 #ifndef atomic64_fetch_andnot_relaxed
1019 
1020 #ifndef atomic64_fetch_andnot
1021 #define atomic64_fetch_andnot(i, v)		atomic64_fetch_and(~(long long)(i), (v))
1022 #define atomic64_fetch_andnot_relaxed(i, v)	atomic64_fetch_and_relaxed(~(long long)(i), (v))
1023 #define atomic64_fetch_andnot_acquire(i, v)	atomic64_fetch_and_acquire(~(long long)(i), (v))
1024 #define atomic64_fetch_andnot_release(i, v)	atomic64_fetch_and_release(~(long long)(i), (v))
1025 #else /* atomic64_fetch_andnot */
1026 #define atomic64_fetch_andnot_relaxed		atomic64_fetch_andnot
1027 #define atomic64_fetch_andnot_acquire		atomic64_fetch_andnot
1028 #define atomic64_fetch_andnot_release		atomic64_fetch_andnot
1029 #endif /* atomic64_fetch_andnot */
1030 
1031 #else /* atomic64_fetch_andnot_relaxed */
1032 
1033 #ifndef atomic64_fetch_andnot_acquire
1034 #define atomic64_fetch_andnot_acquire(...)					\
1035 	__atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
1036 #endif
1037 
1038 #ifndef atomic64_fetch_andnot_release
1039 #define atomic64_fetch_andnot_release(...)					\
1040 	__atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
1041 #endif
1042 
1043 #ifndef atomic64_fetch_andnot
1044 #define atomic64_fetch_andnot(...)						\
1045 	__atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
1046 #endif
1047 #endif /* atomic64_fetch_andnot_relaxed */
1048 
1049 /* atomic64_fetch_xor_relaxed */
1050 #ifndef atomic64_fetch_xor_relaxed
1051 #define atomic64_fetch_xor_relaxed	atomic64_fetch_xor
1052 #define atomic64_fetch_xor_acquire	atomic64_fetch_xor
1053 #define atomic64_fetch_xor_release	atomic64_fetch_xor
1054 
1055 #else /* atomic64_fetch_xor_relaxed */
1056 
1057 #ifndef atomic64_fetch_xor_acquire
1058 #define atomic64_fetch_xor_acquire(...)					\
1059 	__atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
1060 #endif
1061 
1062 #ifndef atomic64_fetch_xor_release
1063 #define atomic64_fetch_xor_release(...)					\
1064 	__atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
1065 #endif
1066 
1067 #ifndef atomic64_fetch_xor
1068 #define atomic64_fetch_xor(...)						\
1069 	__atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
1070 #endif
1071 #endif /* atomic64_fetch_xor_relaxed */
1072 
1073 
1074 /* atomic64_xchg_relaxed */
1075 #ifndef atomic64_xchg_relaxed
1076 #define  atomic64_xchg_relaxed		atomic64_xchg
1077 #define  atomic64_xchg_acquire		atomic64_xchg
1078 #define  atomic64_xchg_release		atomic64_xchg
1079 
1080 #else /* atomic64_xchg_relaxed */
1081 
1082 #ifndef atomic64_xchg_acquire
1083 #define  atomic64_xchg_acquire(...)					\
1084 	__atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
1085 #endif
1086 
1087 #ifndef atomic64_xchg_release
1088 #define  atomic64_xchg_release(...)					\
1089 	__atomic_op_release(atomic64_xchg, __VA_ARGS__)
1090 #endif
1091 
1092 #ifndef atomic64_xchg
1093 #define  atomic64_xchg(...)						\
1094 	__atomic_op_fence(atomic64_xchg, __VA_ARGS__)
1095 #endif
1096 #endif /* atomic64_xchg_relaxed */
1097 
1098 /* atomic64_cmpxchg_relaxed */
1099 #ifndef atomic64_cmpxchg_relaxed
1100 #define  atomic64_cmpxchg_relaxed	atomic64_cmpxchg
1101 #define  atomic64_cmpxchg_acquire	atomic64_cmpxchg
1102 #define  atomic64_cmpxchg_release	atomic64_cmpxchg
1103 
1104 #else /* atomic64_cmpxchg_relaxed */
1105 
1106 #ifndef atomic64_cmpxchg_acquire
1107 #define  atomic64_cmpxchg_acquire(...)					\
1108 	__atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1109 #endif
1110 
1111 #ifndef atomic64_cmpxchg_release
1112 #define  atomic64_cmpxchg_release(...)					\
1113 	__atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1114 #endif
1115 
1116 #ifndef atomic64_cmpxchg
1117 #define  atomic64_cmpxchg(...)						\
1118 	__atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1119 #endif
1120 #endif /* atomic64_cmpxchg_relaxed */
1121 
1122 #ifndef atomic64_try_cmpxchg
1123 
1124 #define __atomic64_try_cmpxchg(type, _p, _po, _n)			\
1125 ({									\
1126 	typeof(_po) __po = (_po);					\
1127 	typeof(*(_po)) __r, __o = *__po;				\
1128 	__r = atomic64_cmpxchg##type((_p), __o, (_n));			\
1129 	if (unlikely(__r != __o))					\
1130 		*__po = __r;						\
1131 	likely(__r == __o);						\
1132 })
1133 
1134 #define atomic64_try_cmpxchg(_p, _po, _n)		__atomic64_try_cmpxchg(, _p, _po, _n)
1135 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n)	__atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1136 #define atomic64_try_cmpxchg_acquire(_p, _po, _n)	__atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1137 #define atomic64_try_cmpxchg_release(_p, _po, _n)	__atomic64_try_cmpxchg(_release, _p, _po, _n)
1138 
1139 #else /* atomic64_try_cmpxchg */
1140 #define atomic64_try_cmpxchg_relaxed	atomic64_try_cmpxchg
1141 #define atomic64_try_cmpxchg_acquire	atomic64_try_cmpxchg
1142 #define atomic64_try_cmpxchg_release	atomic64_try_cmpxchg
1143 #endif /* atomic64_try_cmpxchg */
1144 
1145 /**
1146  * atomic64_fetch_add_unless - add unless the number is already a given value
1147  * @v: pointer of type atomic64_t
1148  * @a: the amount to add to v...
1149  * @u: ...unless v is equal to u.
1150  *
1151  * Atomically adds @a to @v, if @v was not already @u.
1152  * Returns the original value of @v.
1153  */
1154 #ifndef atomic64_fetch_add_unless
1155 static inline long long atomic64_fetch_add_unless(atomic64_t *v, long long a,
1156 						  long long u)
1157 {
1158 	long long c = atomic64_read(v);
1159 
1160 	do {
1161 		if (unlikely(c == u))
1162 			break;
1163 	} while (!atomic64_try_cmpxchg(v, &c, c + a));
1164 
1165 	return c;
1166 }
1167 #endif
1168 
1169 /**
1170  * atomic64_add_unless - add unless the number is already a given value
1171  * @v: pointer of type atomic_t
1172  * @a: the amount to add to v...
1173  * @u: ...unless v is equal to u.
1174  *
1175  * Atomically adds @a to @v, if @v was not already @u.
1176  * Returns true if the addition was done.
1177  */
1178 static inline bool atomic64_add_unless(atomic64_t *v, long long a, long long u)
1179 {
1180 	return atomic64_fetch_add_unless(v, a, u) != u;
1181 }
1182 
1183 /**
1184  * atomic64_inc_not_zero - increment unless the number is zero
1185  * @v: pointer of type atomic64_t
1186  *
1187  * Atomically increments @v by 1, if @v is non-zero.
1188  * Returns true if the increment was done.
1189  */
1190 #ifndef atomic64_inc_not_zero
1191 #define atomic64_inc_not_zero(v)	atomic64_add_unless((v), 1, 0)
1192 #endif
1193 
1194 /**
1195  * atomic64_inc_and_test - increment and test
1196  * @v: pointer of type atomic64_t
1197  *
1198  * Atomically increments @v by 1
1199  * and returns true if the result is zero, or false for all
1200  * other cases.
1201  */
1202 #ifndef atomic64_inc_and_test
1203 static inline bool atomic64_inc_and_test(atomic64_t *v)
1204 {
1205 	return atomic64_inc_return(v) == 0;
1206 }
1207 #endif
1208 
1209 /**
1210  * atomic64_dec_and_test - decrement and test
1211  * @v: pointer of type atomic64_t
1212  *
1213  * Atomically decrements @v by 1 and
1214  * returns true if the result is 0, or false for all other
1215  * cases.
1216  */
1217 #ifndef atomic64_dec_and_test
1218 static inline bool atomic64_dec_and_test(atomic64_t *v)
1219 {
1220 	return atomic64_dec_return(v) == 0;
1221 }
1222 #endif
1223 
1224 /**
1225  * atomic64_sub_and_test - subtract value from variable and test result
1226  * @i: integer value to subtract
1227  * @v: pointer of type atomic64_t
1228  *
1229  * Atomically subtracts @i from @v and returns
1230  * true if the result is zero, or false for all
1231  * other cases.
1232  */
1233 #ifndef atomic64_sub_and_test
1234 static inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
1235 {
1236 	return atomic64_sub_return(i, v) == 0;
1237 }
1238 #endif
1239 
1240 /**
1241  * atomic64_add_negative - add and test if negative
1242  * @i: integer value to add
1243  * @v: pointer of type atomic64_t
1244  *
1245  * Atomically adds @i to @v and returns true
1246  * if the result is negative, or false when
1247  * result is greater than or equal to zero.
1248  */
1249 #ifndef atomic64_add_negative
1250 static inline bool atomic64_add_negative(long long i, atomic64_t *v)
1251 {
1252 	return atomic64_add_return(i, v) < 0;
1253 }
1254 #endif
1255 
1256 #ifndef atomic64_inc_unless_negative
1257 static inline bool atomic64_inc_unless_negative(atomic64_t *v)
1258 {
1259 	long long c = atomic64_read(v);
1260 
1261 	do {
1262 		if (unlikely(c < 0))
1263 			return false;
1264 	} while (!atomic64_try_cmpxchg(v, &c, c + 1));
1265 
1266 	return true;
1267 }
1268 #endif
1269 
1270 #ifndef atomic64_dec_unless_positive
1271 static inline bool atomic64_dec_unless_positive(atomic64_t *v)
1272 {
1273 	long long c = atomic64_read(v);
1274 
1275 	do {
1276 		if (unlikely(c > 0))
1277 			return false;
1278 	} while (!atomic64_try_cmpxchg(v, &c, c - 1));
1279 
1280 	return true;
1281 }
1282 #endif
1283 
1284 /*
1285  * atomic64_dec_if_positive - decrement by 1 if old value positive
1286  * @v: pointer of type atomic64_t
1287  *
1288  * The function returns the old value of *v minus 1, even if
1289  * the atomic64 variable, v, was not decremented.
1290  */
1291 #ifndef atomic64_dec_if_positive
1292 static inline long long atomic64_dec_if_positive(atomic64_t *v)
1293 {
1294 	long long dec, c = atomic64_read(v);
1295 
1296 	do {
1297 		dec = c - 1;
1298 		if (unlikely(dec < 0))
1299 			break;
1300 	} while (!atomic64_try_cmpxchg(v, &c, dec));
1301 
1302 	return dec;
1303 }
1304 #endif
1305 
1306 #define atomic64_cond_read_relaxed(v, c)	smp_cond_load_relaxed(&(v)->counter, (c))
1307 #define atomic64_cond_read_acquire(v, c)	smp_cond_load_acquire(&(v)->counter, (c))
1308 
1309 #include <asm-generic/atomic-long.h>
1310 
1311 #endif /* _LINUX_ATOMIC_H */
1312