xref: /linux-6.15/include/linux/atomic.h (revision bbb03029)
1 /* Atomic operations usable in machine independent code */
2 #ifndef _LINUX_ATOMIC_H
3 #define _LINUX_ATOMIC_H
4 #include <asm/atomic.h>
5 #include <asm/barrier.h>
6 
7 /*
8  * Relaxed variants of xchg, cmpxchg and some atomic operations.
9  *
10  * We support four variants:
11  *
12  * - Fully ordered: The default implementation, no suffix required.
13  * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
14  * - Release: Provides RELEASE semantics, _release suffix.
15  * - Relaxed: No ordering guarantees, _relaxed suffix.
16  *
17  * For compound atomics performing both a load and a store, ACQUIRE
18  * semantics apply only to the load and RELEASE semantics only to the
19  * store portion of the operation. Note that a failed cmpxchg_acquire
20  * does -not- imply any memory ordering constraints.
21  *
22  * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
23  */
24 
25 #ifndef atomic_read_acquire
26 #define  atomic_read_acquire(v)		smp_load_acquire(&(v)->counter)
27 #endif
28 
29 #ifndef atomic_set_release
30 #define  atomic_set_release(v, i)	smp_store_release(&(v)->counter, (i))
31 #endif
32 
33 /*
34  * The idea here is to build acquire/release variants by adding explicit
35  * barriers on top of the relaxed variant. In the case where the relaxed
36  * variant is already fully ordered, no additional barriers are needed.
37  *
38  * Besides, if an arch has a special barrier for acquire/release, it could
39  * implement its own __atomic_op_* and use the same framework for building
40  * variants
41  */
42 #ifndef __atomic_op_acquire
43 #define __atomic_op_acquire(op, args...)				\
44 ({									\
45 	typeof(op##_relaxed(args)) __ret  = op##_relaxed(args);		\
46 	smp_mb__after_atomic();						\
47 	__ret;								\
48 })
49 #endif
50 
51 #ifndef __atomic_op_release
52 #define __atomic_op_release(op, args...)				\
53 ({									\
54 	smp_mb__before_atomic();					\
55 	op##_relaxed(args);						\
56 })
57 #endif
58 
59 #ifndef __atomic_op_fence
60 #define __atomic_op_fence(op, args...)					\
61 ({									\
62 	typeof(op##_relaxed(args)) __ret;				\
63 	smp_mb__before_atomic();					\
64 	__ret = op##_relaxed(args);					\
65 	smp_mb__after_atomic();						\
66 	__ret;								\
67 })
68 #endif
69 
70 /* atomic_add_return_relaxed */
71 #ifndef atomic_add_return_relaxed
72 #define  atomic_add_return_relaxed	atomic_add_return
73 #define  atomic_add_return_acquire	atomic_add_return
74 #define  atomic_add_return_release	atomic_add_return
75 
76 #else /* atomic_add_return_relaxed */
77 
78 #ifndef atomic_add_return_acquire
79 #define  atomic_add_return_acquire(...)					\
80 	__atomic_op_acquire(atomic_add_return, __VA_ARGS__)
81 #endif
82 
83 #ifndef atomic_add_return_release
84 #define  atomic_add_return_release(...)					\
85 	__atomic_op_release(atomic_add_return, __VA_ARGS__)
86 #endif
87 
88 #ifndef atomic_add_return
89 #define  atomic_add_return(...)						\
90 	__atomic_op_fence(atomic_add_return, __VA_ARGS__)
91 #endif
92 #endif /* atomic_add_return_relaxed */
93 
94 /* atomic_inc_return_relaxed */
95 #ifndef atomic_inc_return_relaxed
96 #define  atomic_inc_return_relaxed	atomic_inc_return
97 #define  atomic_inc_return_acquire	atomic_inc_return
98 #define  atomic_inc_return_release	atomic_inc_return
99 
100 #else /* atomic_inc_return_relaxed */
101 
102 #ifndef atomic_inc_return_acquire
103 #define  atomic_inc_return_acquire(...)					\
104 	__atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
105 #endif
106 
107 #ifndef atomic_inc_return_release
108 #define  atomic_inc_return_release(...)					\
109 	__atomic_op_release(atomic_inc_return, __VA_ARGS__)
110 #endif
111 
112 #ifndef atomic_inc_return
113 #define  atomic_inc_return(...)						\
114 	__atomic_op_fence(atomic_inc_return, __VA_ARGS__)
115 #endif
116 #endif /* atomic_inc_return_relaxed */
117 
118 /* atomic_sub_return_relaxed */
119 #ifndef atomic_sub_return_relaxed
120 #define  atomic_sub_return_relaxed	atomic_sub_return
121 #define  atomic_sub_return_acquire	atomic_sub_return
122 #define  atomic_sub_return_release	atomic_sub_return
123 
124 #else /* atomic_sub_return_relaxed */
125 
126 #ifndef atomic_sub_return_acquire
127 #define  atomic_sub_return_acquire(...)					\
128 	__atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
129 #endif
130 
131 #ifndef atomic_sub_return_release
132 #define  atomic_sub_return_release(...)					\
133 	__atomic_op_release(atomic_sub_return, __VA_ARGS__)
134 #endif
135 
136 #ifndef atomic_sub_return
137 #define  atomic_sub_return(...)						\
138 	__atomic_op_fence(atomic_sub_return, __VA_ARGS__)
139 #endif
140 #endif /* atomic_sub_return_relaxed */
141 
142 /* atomic_dec_return_relaxed */
143 #ifndef atomic_dec_return_relaxed
144 #define  atomic_dec_return_relaxed	atomic_dec_return
145 #define  atomic_dec_return_acquire	atomic_dec_return
146 #define  atomic_dec_return_release	atomic_dec_return
147 
148 #else /* atomic_dec_return_relaxed */
149 
150 #ifndef atomic_dec_return_acquire
151 #define  atomic_dec_return_acquire(...)					\
152 	__atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
153 #endif
154 
155 #ifndef atomic_dec_return_release
156 #define  atomic_dec_return_release(...)					\
157 	__atomic_op_release(atomic_dec_return, __VA_ARGS__)
158 #endif
159 
160 #ifndef atomic_dec_return
161 #define  atomic_dec_return(...)						\
162 	__atomic_op_fence(atomic_dec_return, __VA_ARGS__)
163 #endif
164 #endif /* atomic_dec_return_relaxed */
165 
166 
167 /* atomic_fetch_add_relaxed */
168 #ifndef atomic_fetch_add_relaxed
169 #define atomic_fetch_add_relaxed	atomic_fetch_add
170 #define atomic_fetch_add_acquire	atomic_fetch_add
171 #define atomic_fetch_add_release	atomic_fetch_add
172 
173 #else /* atomic_fetch_add_relaxed */
174 
175 #ifndef atomic_fetch_add_acquire
176 #define atomic_fetch_add_acquire(...)					\
177 	__atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
178 #endif
179 
180 #ifndef atomic_fetch_add_release
181 #define atomic_fetch_add_release(...)					\
182 	__atomic_op_release(atomic_fetch_add, __VA_ARGS__)
183 #endif
184 
185 #ifndef atomic_fetch_add
186 #define atomic_fetch_add(...)						\
187 	__atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
188 #endif
189 #endif /* atomic_fetch_add_relaxed */
190 
191 /* atomic_fetch_inc_relaxed */
192 #ifndef atomic_fetch_inc_relaxed
193 
194 #ifndef atomic_fetch_inc
195 #define atomic_fetch_inc(v)	        atomic_fetch_add(1, (v))
196 #define atomic_fetch_inc_relaxed(v)	atomic_fetch_add_relaxed(1, (v))
197 #define atomic_fetch_inc_acquire(v)	atomic_fetch_add_acquire(1, (v))
198 #define atomic_fetch_inc_release(v)	atomic_fetch_add_release(1, (v))
199 #else /* atomic_fetch_inc */
200 #define atomic_fetch_inc_relaxed	atomic_fetch_inc
201 #define atomic_fetch_inc_acquire	atomic_fetch_inc
202 #define atomic_fetch_inc_release	atomic_fetch_inc
203 #endif /* atomic_fetch_inc */
204 
205 #else /* atomic_fetch_inc_relaxed */
206 
207 #ifndef atomic_fetch_inc_acquire
208 #define atomic_fetch_inc_acquire(...)					\
209 	__atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
210 #endif
211 
212 #ifndef atomic_fetch_inc_release
213 #define atomic_fetch_inc_release(...)					\
214 	__atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
215 #endif
216 
217 #ifndef atomic_fetch_inc
218 #define atomic_fetch_inc(...)						\
219 	__atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
220 #endif
221 #endif /* atomic_fetch_inc_relaxed */
222 
223 /* atomic_fetch_sub_relaxed */
224 #ifndef atomic_fetch_sub_relaxed
225 #define atomic_fetch_sub_relaxed	atomic_fetch_sub
226 #define atomic_fetch_sub_acquire	atomic_fetch_sub
227 #define atomic_fetch_sub_release	atomic_fetch_sub
228 
229 #else /* atomic_fetch_sub_relaxed */
230 
231 #ifndef atomic_fetch_sub_acquire
232 #define atomic_fetch_sub_acquire(...)					\
233 	__atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
234 #endif
235 
236 #ifndef atomic_fetch_sub_release
237 #define atomic_fetch_sub_release(...)					\
238 	__atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
239 #endif
240 
241 #ifndef atomic_fetch_sub
242 #define atomic_fetch_sub(...)						\
243 	__atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
244 #endif
245 #endif /* atomic_fetch_sub_relaxed */
246 
247 /* atomic_fetch_dec_relaxed */
248 #ifndef atomic_fetch_dec_relaxed
249 
250 #ifndef atomic_fetch_dec
251 #define atomic_fetch_dec(v)	        atomic_fetch_sub(1, (v))
252 #define atomic_fetch_dec_relaxed(v)	atomic_fetch_sub_relaxed(1, (v))
253 #define atomic_fetch_dec_acquire(v)	atomic_fetch_sub_acquire(1, (v))
254 #define atomic_fetch_dec_release(v)	atomic_fetch_sub_release(1, (v))
255 #else /* atomic_fetch_dec */
256 #define atomic_fetch_dec_relaxed	atomic_fetch_dec
257 #define atomic_fetch_dec_acquire	atomic_fetch_dec
258 #define atomic_fetch_dec_release	atomic_fetch_dec
259 #endif /* atomic_fetch_dec */
260 
261 #else /* atomic_fetch_dec_relaxed */
262 
263 #ifndef atomic_fetch_dec_acquire
264 #define atomic_fetch_dec_acquire(...)					\
265 	__atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
266 #endif
267 
268 #ifndef atomic_fetch_dec_release
269 #define atomic_fetch_dec_release(...)					\
270 	__atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
271 #endif
272 
273 #ifndef atomic_fetch_dec
274 #define atomic_fetch_dec(...)						\
275 	__atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
276 #endif
277 #endif /* atomic_fetch_dec_relaxed */
278 
279 /* atomic_fetch_or_relaxed */
280 #ifndef atomic_fetch_or_relaxed
281 #define atomic_fetch_or_relaxed	atomic_fetch_or
282 #define atomic_fetch_or_acquire	atomic_fetch_or
283 #define atomic_fetch_or_release	atomic_fetch_or
284 
285 #else /* atomic_fetch_or_relaxed */
286 
287 #ifndef atomic_fetch_or_acquire
288 #define atomic_fetch_or_acquire(...)					\
289 	__atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
290 #endif
291 
292 #ifndef atomic_fetch_or_release
293 #define atomic_fetch_or_release(...)					\
294 	__atomic_op_release(atomic_fetch_or, __VA_ARGS__)
295 #endif
296 
297 #ifndef atomic_fetch_or
298 #define atomic_fetch_or(...)						\
299 	__atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
300 #endif
301 #endif /* atomic_fetch_or_relaxed */
302 
303 /* atomic_fetch_and_relaxed */
304 #ifndef atomic_fetch_and_relaxed
305 #define atomic_fetch_and_relaxed	atomic_fetch_and
306 #define atomic_fetch_and_acquire	atomic_fetch_and
307 #define atomic_fetch_and_release	atomic_fetch_and
308 
309 #else /* atomic_fetch_and_relaxed */
310 
311 #ifndef atomic_fetch_and_acquire
312 #define atomic_fetch_and_acquire(...)					\
313 	__atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
314 #endif
315 
316 #ifndef atomic_fetch_and_release
317 #define atomic_fetch_and_release(...)					\
318 	__atomic_op_release(atomic_fetch_and, __VA_ARGS__)
319 #endif
320 
321 #ifndef atomic_fetch_and
322 #define atomic_fetch_and(...)						\
323 	__atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
324 #endif
325 #endif /* atomic_fetch_and_relaxed */
326 
327 #ifdef atomic_andnot
328 /* atomic_fetch_andnot_relaxed */
329 #ifndef atomic_fetch_andnot_relaxed
330 #define atomic_fetch_andnot_relaxed	atomic_fetch_andnot
331 #define atomic_fetch_andnot_acquire	atomic_fetch_andnot
332 #define atomic_fetch_andnot_release	atomic_fetch_andnot
333 
334 #else /* atomic_fetch_andnot_relaxed */
335 
336 #ifndef atomic_fetch_andnot_acquire
337 #define atomic_fetch_andnot_acquire(...)					\
338 	__atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
339 #endif
340 
341 #ifndef atomic_fetch_andnot_release
342 #define atomic_fetch_andnot_release(...)					\
343 	__atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
344 #endif
345 
346 #ifndef atomic_fetch_andnot
347 #define atomic_fetch_andnot(...)						\
348 	__atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
349 #endif
350 #endif /* atomic_fetch_andnot_relaxed */
351 #endif /* atomic_andnot */
352 
353 /* atomic_fetch_xor_relaxed */
354 #ifndef atomic_fetch_xor_relaxed
355 #define atomic_fetch_xor_relaxed	atomic_fetch_xor
356 #define atomic_fetch_xor_acquire	atomic_fetch_xor
357 #define atomic_fetch_xor_release	atomic_fetch_xor
358 
359 #else /* atomic_fetch_xor_relaxed */
360 
361 #ifndef atomic_fetch_xor_acquire
362 #define atomic_fetch_xor_acquire(...)					\
363 	__atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
364 #endif
365 
366 #ifndef atomic_fetch_xor_release
367 #define atomic_fetch_xor_release(...)					\
368 	__atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
369 #endif
370 
371 #ifndef atomic_fetch_xor
372 #define atomic_fetch_xor(...)						\
373 	__atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
374 #endif
375 #endif /* atomic_fetch_xor_relaxed */
376 
377 
378 /* atomic_xchg_relaxed */
379 #ifndef atomic_xchg_relaxed
380 #define  atomic_xchg_relaxed		atomic_xchg
381 #define  atomic_xchg_acquire		atomic_xchg
382 #define  atomic_xchg_release		atomic_xchg
383 
384 #else /* atomic_xchg_relaxed */
385 
386 #ifndef atomic_xchg_acquire
387 #define  atomic_xchg_acquire(...)					\
388 	__atomic_op_acquire(atomic_xchg, __VA_ARGS__)
389 #endif
390 
391 #ifndef atomic_xchg_release
392 #define  atomic_xchg_release(...)					\
393 	__atomic_op_release(atomic_xchg, __VA_ARGS__)
394 #endif
395 
396 #ifndef atomic_xchg
397 #define  atomic_xchg(...)						\
398 	__atomic_op_fence(atomic_xchg, __VA_ARGS__)
399 #endif
400 #endif /* atomic_xchg_relaxed */
401 
402 /* atomic_cmpxchg_relaxed */
403 #ifndef atomic_cmpxchg_relaxed
404 #define  atomic_cmpxchg_relaxed		atomic_cmpxchg
405 #define  atomic_cmpxchg_acquire		atomic_cmpxchg
406 #define  atomic_cmpxchg_release		atomic_cmpxchg
407 
408 #else /* atomic_cmpxchg_relaxed */
409 
410 #ifndef atomic_cmpxchg_acquire
411 #define  atomic_cmpxchg_acquire(...)					\
412 	__atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
413 #endif
414 
415 #ifndef atomic_cmpxchg_release
416 #define  atomic_cmpxchg_release(...)					\
417 	__atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
418 #endif
419 
420 #ifndef atomic_cmpxchg
421 #define  atomic_cmpxchg(...)						\
422 	__atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
423 #endif
424 #endif /* atomic_cmpxchg_relaxed */
425 
426 #ifndef atomic_try_cmpxchg
427 
428 #define __atomic_try_cmpxchg(type, _p, _po, _n)				\
429 ({									\
430 	typeof(_po) __po = (_po);					\
431 	typeof(*(_po)) __r, __o = *__po;				\
432 	__r = atomic_cmpxchg##type((_p), __o, (_n));			\
433 	if (unlikely(__r != __o))					\
434 		*__po = __r;						\
435 	likely(__r == __o);						\
436 })
437 
438 #define atomic_try_cmpxchg(_p, _po, _n)		__atomic_try_cmpxchg(, _p, _po, _n)
439 #define atomic_try_cmpxchg_relaxed(_p, _po, _n)	__atomic_try_cmpxchg(_relaxed, _p, _po, _n)
440 #define atomic_try_cmpxchg_acquire(_p, _po, _n)	__atomic_try_cmpxchg(_acquire, _p, _po, _n)
441 #define atomic_try_cmpxchg_release(_p, _po, _n)	__atomic_try_cmpxchg(_release, _p, _po, _n)
442 
443 #else /* atomic_try_cmpxchg */
444 #define atomic_try_cmpxchg_relaxed	atomic_try_cmpxchg
445 #define atomic_try_cmpxchg_acquire	atomic_try_cmpxchg
446 #define atomic_try_cmpxchg_release	atomic_try_cmpxchg
447 #endif /* atomic_try_cmpxchg */
448 
449 /* cmpxchg_relaxed */
450 #ifndef cmpxchg_relaxed
451 #define  cmpxchg_relaxed		cmpxchg
452 #define  cmpxchg_acquire		cmpxchg
453 #define  cmpxchg_release		cmpxchg
454 
455 #else /* cmpxchg_relaxed */
456 
457 #ifndef cmpxchg_acquire
458 #define  cmpxchg_acquire(...)						\
459 	__atomic_op_acquire(cmpxchg, __VA_ARGS__)
460 #endif
461 
462 #ifndef cmpxchg_release
463 #define  cmpxchg_release(...)						\
464 	__atomic_op_release(cmpxchg, __VA_ARGS__)
465 #endif
466 
467 #ifndef cmpxchg
468 #define  cmpxchg(...)							\
469 	__atomic_op_fence(cmpxchg, __VA_ARGS__)
470 #endif
471 #endif /* cmpxchg_relaxed */
472 
473 /* cmpxchg64_relaxed */
474 #ifndef cmpxchg64_relaxed
475 #define  cmpxchg64_relaxed		cmpxchg64
476 #define  cmpxchg64_acquire		cmpxchg64
477 #define  cmpxchg64_release		cmpxchg64
478 
479 #else /* cmpxchg64_relaxed */
480 
481 #ifndef cmpxchg64_acquire
482 #define  cmpxchg64_acquire(...)						\
483 	__atomic_op_acquire(cmpxchg64, __VA_ARGS__)
484 #endif
485 
486 #ifndef cmpxchg64_release
487 #define  cmpxchg64_release(...)						\
488 	__atomic_op_release(cmpxchg64, __VA_ARGS__)
489 #endif
490 
491 #ifndef cmpxchg64
492 #define  cmpxchg64(...)							\
493 	__atomic_op_fence(cmpxchg64, __VA_ARGS__)
494 #endif
495 #endif /* cmpxchg64_relaxed */
496 
497 /* xchg_relaxed */
498 #ifndef xchg_relaxed
499 #define  xchg_relaxed			xchg
500 #define  xchg_acquire			xchg
501 #define  xchg_release			xchg
502 
503 #else /* xchg_relaxed */
504 
505 #ifndef xchg_acquire
506 #define  xchg_acquire(...)		__atomic_op_acquire(xchg, __VA_ARGS__)
507 #endif
508 
509 #ifndef xchg_release
510 #define  xchg_release(...)		__atomic_op_release(xchg, __VA_ARGS__)
511 #endif
512 
513 #ifndef xchg
514 #define  xchg(...)			__atomic_op_fence(xchg, __VA_ARGS__)
515 #endif
516 #endif /* xchg_relaxed */
517 
518 /**
519  * atomic_add_unless - add unless the number is already a given value
520  * @v: pointer of type atomic_t
521  * @a: the amount to add to v...
522  * @u: ...unless v is equal to u.
523  *
524  * Atomically adds @a to @v, so long as @v was not already @u.
525  * Returns non-zero if @v was not @u, and zero otherwise.
526  */
527 static inline int atomic_add_unless(atomic_t *v, int a, int u)
528 {
529 	return __atomic_add_unless(v, a, u) != u;
530 }
531 
532 /**
533  * atomic_inc_not_zero - increment unless the number is zero
534  * @v: pointer of type atomic_t
535  *
536  * Atomically increments @v by 1, so long as @v is non-zero.
537  * Returns non-zero if @v was non-zero, and zero otherwise.
538  */
539 #ifndef atomic_inc_not_zero
540 #define atomic_inc_not_zero(v)		atomic_add_unless((v), 1, 0)
541 #endif
542 
543 #ifndef atomic_andnot
544 static inline void atomic_andnot(int i, atomic_t *v)
545 {
546 	atomic_and(~i, v);
547 }
548 
549 static inline int atomic_fetch_andnot(int i, atomic_t *v)
550 {
551 	return atomic_fetch_and(~i, v);
552 }
553 
554 static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
555 {
556 	return atomic_fetch_and_relaxed(~i, v);
557 }
558 
559 static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
560 {
561 	return atomic_fetch_and_acquire(~i, v);
562 }
563 
564 static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
565 {
566 	return atomic_fetch_and_release(~i, v);
567 }
568 #endif
569 
570 /**
571  * atomic_inc_not_zero_hint - increment if not null
572  * @v: pointer of type atomic_t
573  * @hint: probable value of the atomic before the increment
574  *
575  * This version of atomic_inc_not_zero() gives a hint of probable
576  * value of the atomic. This helps processor to not read the memory
577  * before doing the atomic read/modify/write cycle, lowering
578  * number of bus transactions on some arches.
579  *
580  * Returns: 0 if increment was not done, 1 otherwise.
581  */
582 #ifndef atomic_inc_not_zero_hint
583 static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
584 {
585 	int val, c = hint;
586 
587 	/* sanity test, should be removed by compiler if hint is a constant */
588 	if (!hint)
589 		return atomic_inc_not_zero(v);
590 
591 	do {
592 		val = atomic_cmpxchg(v, c, c + 1);
593 		if (val == c)
594 			return 1;
595 		c = val;
596 	} while (c);
597 
598 	return 0;
599 }
600 #endif
601 
602 #ifndef atomic_inc_unless_negative
603 static inline int atomic_inc_unless_negative(atomic_t *p)
604 {
605 	int v, v1;
606 	for (v = 0; v >= 0; v = v1) {
607 		v1 = atomic_cmpxchg(p, v, v + 1);
608 		if (likely(v1 == v))
609 			return 1;
610 	}
611 	return 0;
612 }
613 #endif
614 
615 #ifndef atomic_dec_unless_positive
616 static inline int atomic_dec_unless_positive(atomic_t *p)
617 {
618 	int v, v1;
619 	for (v = 0; v <= 0; v = v1) {
620 		v1 = atomic_cmpxchg(p, v, v - 1);
621 		if (likely(v1 == v))
622 			return 1;
623 	}
624 	return 0;
625 }
626 #endif
627 
628 /*
629  * atomic_dec_if_positive - decrement by 1 if old value positive
630  * @v: pointer of type atomic_t
631  *
632  * The function returns the old value of *v minus 1, even if
633  * the atomic variable, v, was not decremented.
634  */
635 #ifndef atomic_dec_if_positive
636 static inline int atomic_dec_if_positive(atomic_t *v)
637 {
638 	int c, old, dec;
639 	c = atomic_read(v);
640 	for (;;) {
641 		dec = c - 1;
642 		if (unlikely(dec < 0))
643 			break;
644 		old = atomic_cmpxchg((v), c, dec);
645 		if (likely(old == c))
646 			break;
647 		c = old;
648 	}
649 	return dec;
650 }
651 #endif
652 
653 #ifdef CONFIG_GENERIC_ATOMIC64
654 #include <asm-generic/atomic64.h>
655 #endif
656 
657 #ifndef atomic64_read_acquire
658 #define  atomic64_read_acquire(v)	smp_load_acquire(&(v)->counter)
659 #endif
660 
661 #ifndef atomic64_set_release
662 #define  atomic64_set_release(v, i)	smp_store_release(&(v)->counter, (i))
663 #endif
664 
665 /* atomic64_add_return_relaxed */
666 #ifndef atomic64_add_return_relaxed
667 #define  atomic64_add_return_relaxed	atomic64_add_return
668 #define  atomic64_add_return_acquire	atomic64_add_return
669 #define  atomic64_add_return_release	atomic64_add_return
670 
671 #else /* atomic64_add_return_relaxed */
672 
673 #ifndef atomic64_add_return_acquire
674 #define  atomic64_add_return_acquire(...)				\
675 	__atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
676 #endif
677 
678 #ifndef atomic64_add_return_release
679 #define  atomic64_add_return_release(...)				\
680 	__atomic_op_release(atomic64_add_return, __VA_ARGS__)
681 #endif
682 
683 #ifndef atomic64_add_return
684 #define  atomic64_add_return(...)					\
685 	__atomic_op_fence(atomic64_add_return, __VA_ARGS__)
686 #endif
687 #endif /* atomic64_add_return_relaxed */
688 
689 /* atomic64_inc_return_relaxed */
690 #ifndef atomic64_inc_return_relaxed
691 #define  atomic64_inc_return_relaxed	atomic64_inc_return
692 #define  atomic64_inc_return_acquire	atomic64_inc_return
693 #define  atomic64_inc_return_release	atomic64_inc_return
694 
695 #else /* atomic64_inc_return_relaxed */
696 
697 #ifndef atomic64_inc_return_acquire
698 #define  atomic64_inc_return_acquire(...)				\
699 	__atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
700 #endif
701 
702 #ifndef atomic64_inc_return_release
703 #define  atomic64_inc_return_release(...)				\
704 	__atomic_op_release(atomic64_inc_return, __VA_ARGS__)
705 #endif
706 
707 #ifndef atomic64_inc_return
708 #define  atomic64_inc_return(...)					\
709 	__atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
710 #endif
711 #endif /* atomic64_inc_return_relaxed */
712 
713 
714 /* atomic64_sub_return_relaxed */
715 #ifndef atomic64_sub_return_relaxed
716 #define  atomic64_sub_return_relaxed	atomic64_sub_return
717 #define  atomic64_sub_return_acquire	atomic64_sub_return
718 #define  atomic64_sub_return_release	atomic64_sub_return
719 
720 #else /* atomic64_sub_return_relaxed */
721 
722 #ifndef atomic64_sub_return_acquire
723 #define  atomic64_sub_return_acquire(...)				\
724 	__atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
725 #endif
726 
727 #ifndef atomic64_sub_return_release
728 #define  atomic64_sub_return_release(...)				\
729 	__atomic_op_release(atomic64_sub_return, __VA_ARGS__)
730 #endif
731 
732 #ifndef atomic64_sub_return
733 #define  atomic64_sub_return(...)					\
734 	__atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
735 #endif
736 #endif /* atomic64_sub_return_relaxed */
737 
738 /* atomic64_dec_return_relaxed */
739 #ifndef atomic64_dec_return_relaxed
740 #define  atomic64_dec_return_relaxed	atomic64_dec_return
741 #define  atomic64_dec_return_acquire	atomic64_dec_return
742 #define  atomic64_dec_return_release	atomic64_dec_return
743 
744 #else /* atomic64_dec_return_relaxed */
745 
746 #ifndef atomic64_dec_return_acquire
747 #define  atomic64_dec_return_acquire(...)				\
748 	__atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
749 #endif
750 
751 #ifndef atomic64_dec_return_release
752 #define  atomic64_dec_return_release(...)				\
753 	__atomic_op_release(atomic64_dec_return, __VA_ARGS__)
754 #endif
755 
756 #ifndef atomic64_dec_return
757 #define  atomic64_dec_return(...)					\
758 	__atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
759 #endif
760 #endif /* atomic64_dec_return_relaxed */
761 
762 
763 /* atomic64_fetch_add_relaxed */
764 #ifndef atomic64_fetch_add_relaxed
765 #define atomic64_fetch_add_relaxed	atomic64_fetch_add
766 #define atomic64_fetch_add_acquire	atomic64_fetch_add
767 #define atomic64_fetch_add_release	atomic64_fetch_add
768 
769 #else /* atomic64_fetch_add_relaxed */
770 
771 #ifndef atomic64_fetch_add_acquire
772 #define atomic64_fetch_add_acquire(...)					\
773 	__atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
774 #endif
775 
776 #ifndef atomic64_fetch_add_release
777 #define atomic64_fetch_add_release(...)					\
778 	__atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
779 #endif
780 
781 #ifndef atomic64_fetch_add
782 #define atomic64_fetch_add(...)						\
783 	__atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
784 #endif
785 #endif /* atomic64_fetch_add_relaxed */
786 
787 /* atomic64_fetch_inc_relaxed */
788 #ifndef atomic64_fetch_inc_relaxed
789 
790 #ifndef atomic64_fetch_inc
791 #define atomic64_fetch_inc(v)		atomic64_fetch_add(1, (v))
792 #define atomic64_fetch_inc_relaxed(v)	atomic64_fetch_add_relaxed(1, (v))
793 #define atomic64_fetch_inc_acquire(v)	atomic64_fetch_add_acquire(1, (v))
794 #define atomic64_fetch_inc_release(v)	atomic64_fetch_add_release(1, (v))
795 #else /* atomic64_fetch_inc */
796 #define atomic64_fetch_inc_relaxed	atomic64_fetch_inc
797 #define atomic64_fetch_inc_acquire	atomic64_fetch_inc
798 #define atomic64_fetch_inc_release	atomic64_fetch_inc
799 #endif /* atomic64_fetch_inc */
800 
801 #else /* atomic64_fetch_inc_relaxed */
802 
803 #ifndef atomic64_fetch_inc_acquire
804 #define atomic64_fetch_inc_acquire(...)					\
805 	__atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
806 #endif
807 
808 #ifndef atomic64_fetch_inc_release
809 #define atomic64_fetch_inc_release(...)					\
810 	__atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
811 #endif
812 
813 #ifndef atomic64_fetch_inc
814 #define atomic64_fetch_inc(...)						\
815 	__atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
816 #endif
817 #endif /* atomic64_fetch_inc_relaxed */
818 
819 /* atomic64_fetch_sub_relaxed */
820 #ifndef atomic64_fetch_sub_relaxed
821 #define atomic64_fetch_sub_relaxed	atomic64_fetch_sub
822 #define atomic64_fetch_sub_acquire	atomic64_fetch_sub
823 #define atomic64_fetch_sub_release	atomic64_fetch_sub
824 
825 #else /* atomic64_fetch_sub_relaxed */
826 
827 #ifndef atomic64_fetch_sub_acquire
828 #define atomic64_fetch_sub_acquire(...)					\
829 	__atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
830 #endif
831 
832 #ifndef atomic64_fetch_sub_release
833 #define atomic64_fetch_sub_release(...)					\
834 	__atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
835 #endif
836 
837 #ifndef atomic64_fetch_sub
838 #define atomic64_fetch_sub(...)						\
839 	__atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
840 #endif
841 #endif /* atomic64_fetch_sub_relaxed */
842 
843 /* atomic64_fetch_dec_relaxed */
844 #ifndef atomic64_fetch_dec_relaxed
845 
846 #ifndef atomic64_fetch_dec
847 #define atomic64_fetch_dec(v)		atomic64_fetch_sub(1, (v))
848 #define atomic64_fetch_dec_relaxed(v)	atomic64_fetch_sub_relaxed(1, (v))
849 #define atomic64_fetch_dec_acquire(v)	atomic64_fetch_sub_acquire(1, (v))
850 #define atomic64_fetch_dec_release(v)	atomic64_fetch_sub_release(1, (v))
851 #else /* atomic64_fetch_dec */
852 #define atomic64_fetch_dec_relaxed	atomic64_fetch_dec
853 #define atomic64_fetch_dec_acquire	atomic64_fetch_dec
854 #define atomic64_fetch_dec_release	atomic64_fetch_dec
855 #endif /* atomic64_fetch_dec */
856 
857 #else /* atomic64_fetch_dec_relaxed */
858 
859 #ifndef atomic64_fetch_dec_acquire
860 #define atomic64_fetch_dec_acquire(...)					\
861 	__atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
862 #endif
863 
864 #ifndef atomic64_fetch_dec_release
865 #define atomic64_fetch_dec_release(...)					\
866 	__atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
867 #endif
868 
869 #ifndef atomic64_fetch_dec
870 #define atomic64_fetch_dec(...)						\
871 	__atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
872 #endif
873 #endif /* atomic64_fetch_dec_relaxed */
874 
875 /* atomic64_fetch_or_relaxed */
876 #ifndef atomic64_fetch_or_relaxed
877 #define atomic64_fetch_or_relaxed	atomic64_fetch_or
878 #define atomic64_fetch_or_acquire	atomic64_fetch_or
879 #define atomic64_fetch_or_release	atomic64_fetch_or
880 
881 #else /* atomic64_fetch_or_relaxed */
882 
883 #ifndef atomic64_fetch_or_acquire
884 #define atomic64_fetch_or_acquire(...)					\
885 	__atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
886 #endif
887 
888 #ifndef atomic64_fetch_or_release
889 #define atomic64_fetch_or_release(...)					\
890 	__atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
891 #endif
892 
893 #ifndef atomic64_fetch_or
894 #define atomic64_fetch_or(...)						\
895 	__atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
896 #endif
897 #endif /* atomic64_fetch_or_relaxed */
898 
899 /* atomic64_fetch_and_relaxed */
900 #ifndef atomic64_fetch_and_relaxed
901 #define atomic64_fetch_and_relaxed	atomic64_fetch_and
902 #define atomic64_fetch_and_acquire	atomic64_fetch_and
903 #define atomic64_fetch_and_release	atomic64_fetch_and
904 
905 #else /* atomic64_fetch_and_relaxed */
906 
907 #ifndef atomic64_fetch_and_acquire
908 #define atomic64_fetch_and_acquire(...)					\
909 	__atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
910 #endif
911 
912 #ifndef atomic64_fetch_and_release
913 #define atomic64_fetch_and_release(...)					\
914 	__atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
915 #endif
916 
917 #ifndef atomic64_fetch_and
918 #define atomic64_fetch_and(...)						\
919 	__atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
920 #endif
921 #endif /* atomic64_fetch_and_relaxed */
922 
923 #ifdef atomic64_andnot
924 /* atomic64_fetch_andnot_relaxed */
925 #ifndef atomic64_fetch_andnot_relaxed
926 #define atomic64_fetch_andnot_relaxed	atomic64_fetch_andnot
927 #define atomic64_fetch_andnot_acquire	atomic64_fetch_andnot
928 #define atomic64_fetch_andnot_release	atomic64_fetch_andnot
929 
930 #else /* atomic64_fetch_andnot_relaxed */
931 
932 #ifndef atomic64_fetch_andnot_acquire
933 #define atomic64_fetch_andnot_acquire(...)					\
934 	__atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
935 #endif
936 
937 #ifndef atomic64_fetch_andnot_release
938 #define atomic64_fetch_andnot_release(...)					\
939 	__atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
940 #endif
941 
942 #ifndef atomic64_fetch_andnot
943 #define atomic64_fetch_andnot(...)						\
944 	__atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
945 #endif
946 #endif /* atomic64_fetch_andnot_relaxed */
947 #endif /* atomic64_andnot */
948 
949 /* atomic64_fetch_xor_relaxed */
950 #ifndef atomic64_fetch_xor_relaxed
951 #define atomic64_fetch_xor_relaxed	atomic64_fetch_xor
952 #define atomic64_fetch_xor_acquire	atomic64_fetch_xor
953 #define atomic64_fetch_xor_release	atomic64_fetch_xor
954 
955 #else /* atomic64_fetch_xor_relaxed */
956 
957 #ifndef atomic64_fetch_xor_acquire
958 #define atomic64_fetch_xor_acquire(...)					\
959 	__atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
960 #endif
961 
962 #ifndef atomic64_fetch_xor_release
963 #define atomic64_fetch_xor_release(...)					\
964 	__atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
965 #endif
966 
967 #ifndef atomic64_fetch_xor
968 #define atomic64_fetch_xor(...)						\
969 	__atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
970 #endif
971 #endif /* atomic64_fetch_xor_relaxed */
972 
973 
974 /* atomic64_xchg_relaxed */
975 #ifndef atomic64_xchg_relaxed
976 #define  atomic64_xchg_relaxed		atomic64_xchg
977 #define  atomic64_xchg_acquire		atomic64_xchg
978 #define  atomic64_xchg_release		atomic64_xchg
979 
980 #else /* atomic64_xchg_relaxed */
981 
982 #ifndef atomic64_xchg_acquire
983 #define  atomic64_xchg_acquire(...)					\
984 	__atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
985 #endif
986 
987 #ifndef atomic64_xchg_release
988 #define  atomic64_xchg_release(...)					\
989 	__atomic_op_release(atomic64_xchg, __VA_ARGS__)
990 #endif
991 
992 #ifndef atomic64_xchg
993 #define  atomic64_xchg(...)						\
994 	__atomic_op_fence(atomic64_xchg, __VA_ARGS__)
995 #endif
996 #endif /* atomic64_xchg_relaxed */
997 
998 /* atomic64_cmpxchg_relaxed */
999 #ifndef atomic64_cmpxchg_relaxed
1000 #define  atomic64_cmpxchg_relaxed	atomic64_cmpxchg
1001 #define  atomic64_cmpxchg_acquire	atomic64_cmpxchg
1002 #define  atomic64_cmpxchg_release	atomic64_cmpxchg
1003 
1004 #else /* atomic64_cmpxchg_relaxed */
1005 
1006 #ifndef atomic64_cmpxchg_acquire
1007 #define  atomic64_cmpxchg_acquire(...)					\
1008 	__atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1009 #endif
1010 
1011 #ifndef atomic64_cmpxchg_release
1012 #define  atomic64_cmpxchg_release(...)					\
1013 	__atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1014 #endif
1015 
1016 #ifndef atomic64_cmpxchg
1017 #define  atomic64_cmpxchg(...)						\
1018 	__atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1019 #endif
1020 #endif /* atomic64_cmpxchg_relaxed */
1021 
1022 #ifndef atomic64_try_cmpxchg
1023 
1024 #define __atomic64_try_cmpxchg(type, _p, _po, _n)			\
1025 ({									\
1026 	typeof(_po) __po = (_po);					\
1027 	typeof(*(_po)) __r, __o = *__po;				\
1028 	__r = atomic64_cmpxchg##type((_p), __o, (_n));			\
1029 	if (unlikely(__r != __o))					\
1030 		*__po = __r;						\
1031 	likely(__r == __o);						\
1032 })
1033 
1034 #define atomic64_try_cmpxchg(_p, _po, _n)		__atomic64_try_cmpxchg(, _p, _po, _n)
1035 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n)	__atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1036 #define atomic64_try_cmpxchg_acquire(_p, _po, _n)	__atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1037 #define atomic64_try_cmpxchg_release(_p, _po, _n)	__atomic64_try_cmpxchg(_release, _p, _po, _n)
1038 
1039 #else /* atomic64_try_cmpxchg */
1040 #define atomic64_try_cmpxchg_relaxed	atomic64_try_cmpxchg
1041 #define atomic64_try_cmpxchg_acquire	atomic64_try_cmpxchg
1042 #define atomic64_try_cmpxchg_release	atomic64_try_cmpxchg
1043 #endif /* atomic64_try_cmpxchg */
1044 
1045 #ifndef atomic64_andnot
1046 static inline void atomic64_andnot(long long i, atomic64_t *v)
1047 {
1048 	atomic64_and(~i, v);
1049 }
1050 
1051 static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1052 {
1053 	return atomic64_fetch_and(~i, v);
1054 }
1055 
1056 static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1057 {
1058 	return atomic64_fetch_and_relaxed(~i, v);
1059 }
1060 
1061 static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1062 {
1063 	return atomic64_fetch_and_acquire(~i, v);
1064 }
1065 
1066 static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1067 {
1068 	return atomic64_fetch_and_release(~i, v);
1069 }
1070 #endif
1071 
1072 #include <asm-generic/atomic-long.h>
1073 
1074 #endif /* _LINUX_ATOMIC_H */
1075