xref: /linux-6.15/include/linux/atomic.h (revision 4df714be)
1 /* Atomic operations usable in machine independent code */
2 #ifndef _LINUX_ATOMIC_H
3 #define _LINUX_ATOMIC_H
4 #include <asm/atomic.h>
5 #include <asm/barrier.h>
6 
7 /*
8  * Relaxed variants of xchg, cmpxchg and some atomic operations.
9  *
10  * We support four variants:
11  *
12  * - Fully ordered: The default implementation, no suffix required.
13  * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
14  * - Release: Provides RELEASE semantics, _release suffix.
15  * - Relaxed: No ordering guarantees, _relaxed suffix.
16  *
17  * For compound atomics performing both a load and a store, ACQUIRE
18  * semantics apply only to the load and RELEASE semantics only to the
19  * store portion of the operation. Note that a failed cmpxchg_acquire
20  * does -not- imply any memory ordering constraints.
21  *
22  * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
23  */
24 
25 #ifndef atomic_read_acquire
26 #define  atomic_read_acquire(v)		smp_load_acquire(&(v)->counter)
27 #endif
28 
29 #ifndef atomic_set_release
30 #define  atomic_set_release(v, i)	smp_store_release(&(v)->counter, (i))
31 #endif
32 
33 /*
34  * The idea here is to build acquire/release variants by adding explicit
35  * barriers on top of the relaxed variant. In the case where the relaxed
36  * variant is already fully ordered, no additional barriers are needed.
37  *
38  * Besides, if an arch has a special barrier for acquire/release, it could
39  * implement its own __atomic_op_* and use the same framework for building
40  * variants
41  *
42  * If an architecture overrides __atomic_op_acquire() it will probably want
43  * to define smp_mb__after_spinlock().
44  */
45 #ifndef __atomic_op_acquire
46 #define __atomic_op_acquire(op, args...)				\
47 ({									\
48 	typeof(op##_relaxed(args)) __ret  = op##_relaxed(args);		\
49 	smp_mb__after_atomic();						\
50 	__ret;								\
51 })
52 #endif
53 
54 #ifndef __atomic_op_release
55 #define __atomic_op_release(op, args...)				\
56 ({									\
57 	smp_mb__before_atomic();					\
58 	op##_relaxed(args);						\
59 })
60 #endif
61 
62 #ifndef __atomic_op_fence
63 #define __atomic_op_fence(op, args...)					\
64 ({									\
65 	typeof(op##_relaxed(args)) __ret;				\
66 	smp_mb__before_atomic();					\
67 	__ret = op##_relaxed(args);					\
68 	smp_mb__after_atomic();						\
69 	__ret;								\
70 })
71 #endif
72 
73 /* atomic_add_return_relaxed */
74 #ifndef atomic_add_return_relaxed
75 #define  atomic_add_return_relaxed	atomic_add_return
76 #define  atomic_add_return_acquire	atomic_add_return
77 #define  atomic_add_return_release	atomic_add_return
78 
79 #else /* atomic_add_return_relaxed */
80 
81 #ifndef atomic_add_return_acquire
82 #define  atomic_add_return_acquire(...)					\
83 	__atomic_op_acquire(atomic_add_return, __VA_ARGS__)
84 #endif
85 
86 #ifndef atomic_add_return_release
87 #define  atomic_add_return_release(...)					\
88 	__atomic_op_release(atomic_add_return, __VA_ARGS__)
89 #endif
90 
91 #ifndef atomic_add_return
92 #define  atomic_add_return(...)						\
93 	__atomic_op_fence(atomic_add_return, __VA_ARGS__)
94 #endif
95 #endif /* atomic_add_return_relaxed */
96 
97 /* atomic_inc_return_relaxed */
98 #ifndef atomic_inc_return_relaxed
99 #define  atomic_inc_return_relaxed	atomic_inc_return
100 #define  atomic_inc_return_acquire	atomic_inc_return
101 #define  atomic_inc_return_release	atomic_inc_return
102 
103 #else /* atomic_inc_return_relaxed */
104 
105 #ifndef atomic_inc_return_acquire
106 #define  atomic_inc_return_acquire(...)					\
107 	__atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
108 #endif
109 
110 #ifndef atomic_inc_return_release
111 #define  atomic_inc_return_release(...)					\
112 	__atomic_op_release(atomic_inc_return, __VA_ARGS__)
113 #endif
114 
115 #ifndef atomic_inc_return
116 #define  atomic_inc_return(...)						\
117 	__atomic_op_fence(atomic_inc_return, __VA_ARGS__)
118 #endif
119 #endif /* atomic_inc_return_relaxed */
120 
121 /* atomic_sub_return_relaxed */
122 #ifndef atomic_sub_return_relaxed
123 #define  atomic_sub_return_relaxed	atomic_sub_return
124 #define  atomic_sub_return_acquire	atomic_sub_return
125 #define  atomic_sub_return_release	atomic_sub_return
126 
127 #else /* atomic_sub_return_relaxed */
128 
129 #ifndef atomic_sub_return_acquire
130 #define  atomic_sub_return_acquire(...)					\
131 	__atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
132 #endif
133 
134 #ifndef atomic_sub_return_release
135 #define  atomic_sub_return_release(...)					\
136 	__atomic_op_release(atomic_sub_return, __VA_ARGS__)
137 #endif
138 
139 #ifndef atomic_sub_return
140 #define  atomic_sub_return(...)						\
141 	__atomic_op_fence(atomic_sub_return, __VA_ARGS__)
142 #endif
143 #endif /* atomic_sub_return_relaxed */
144 
145 /* atomic_dec_return_relaxed */
146 #ifndef atomic_dec_return_relaxed
147 #define  atomic_dec_return_relaxed	atomic_dec_return
148 #define  atomic_dec_return_acquire	atomic_dec_return
149 #define  atomic_dec_return_release	atomic_dec_return
150 
151 #else /* atomic_dec_return_relaxed */
152 
153 #ifndef atomic_dec_return_acquire
154 #define  atomic_dec_return_acquire(...)					\
155 	__atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
156 #endif
157 
158 #ifndef atomic_dec_return_release
159 #define  atomic_dec_return_release(...)					\
160 	__atomic_op_release(atomic_dec_return, __VA_ARGS__)
161 #endif
162 
163 #ifndef atomic_dec_return
164 #define  atomic_dec_return(...)						\
165 	__atomic_op_fence(atomic_dec_return, __VA_ARGS__)
166 #endif
167 #endif /* atomic_dec_return_relaxed */
168 
169 
170 /* atomic_fetch_add_relaxed */
171 #ifndef atomic_fetch_add_relaxed
172 #define atomic_fetch_add_relaxed	atomic_fetch_add
173 #define atomic_fetch_add_acquire	atomic_fetch_add
174 #define atomic_fetch_add_release	atomic_fetch_add
175 
176 #else /* atomic_fetch_add_relaxed */
177 
178 #ifndef atomic_fetch_add_acquire
179 #define atomic_fetch_add_acquire(...)					\
180 	__atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
181 #endif
182 
183 #ifndef atomic_fetch_add_release
184 #define atomic_fetch_add_release(...)					\
185 	__atomic_op_release(atomic_fetch_add, __VA_ARGS__)
186 #endif
187 
188 #ifndef atomic_fetch_add
189 #define atomic_fetch_add(...)						\
190 	__atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
191 #endif
192 #endif /* atomic_fetch_add_relaxed */
193 
194 /* atomic_fetch_inc_relaxed */
195 #ifndef atomic_fetch_inc_relaxed
196 
197 #ifndef atomic_fetch_inc
198 #define atomic_fetch_inc(v)	        atomic_fetch_add(1, (v))
199 #define atomic_fetch_inc_relaxed(v)	atomic_fetch_add_relaxed(1, (v))
200 #define atomic_fetch_inc_acquire(v)	atomic_fetch_add_acquire(1, (v))
201 #define atomic_fetch_inc_release(v)	atomic_fetch_add_release(1, (v))
202 #else /* atomic_fetch_inc */
203 #define atomic_fetch_inc_relaxed	atomic_fetch_inc
204 #define atomic_fetch_inc_acquire	atomic_fetch_inc
205 #define atomic_fetch_inc_release	atomic_fetch_inc
206 #endif /* atomic_fetch_inc */
207 
208 #else /* atomic_fetch_inc_relaxed */
209 
210 #ifndef atomic_fetch_inc_acquire
211 #define atomic_fetch_inc_acquire(...)					\
212 	__atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
213 #endif
214 
215 #ifndef atomic_fetch_inc_release
216 #define atomic_fetch_inc_release(...)					\
217 	__atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
218 #endif
219 
220 #ifndef atomic_fetch_inc
221 #define atomic_fetch_inc(...)						\
222 	__atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
223 #endif
224 #endif /* atomic_fetch_inc_relaxed */
225 
226 /* atomic_fetch_sub_relaxed */
227 #ifndef atomic_fetch_sub_relaxed
228 #define atomic_fetch_sub_relaxed	atomic_fetch_sub
229 #define atomic_fetch_sub_acquire	atomic_fetch_sub
230 #define atomic_fetch_sub_release	atomic_fetch_sub
231 
232 #else /* atomic_fetch_sub_relaxed */
233 
234 #ifndef atomic_fetch_sub_acquire
235 #define atomic_fetch_sub_acquire(...)					\
236 	__atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
237 #endif
238 
239 #ifndef atomic_fetch_sub_release
240 #define atomic_fetch_sub_release(...)					\
241 	__atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
242 #endif
243 
244 #ifndef atomic_fetch_sub
245 #define atomic_fetch_sub(...)						\
246 	__atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
247 #endif
248 #endif /* atomic_fetch_sub_relaxed */
249 
250 /* atomic_fetch_dec_relaxed */
251 #ifndef atomic_fetch_dec_relaxed
252 
253 #ifndef atomic_fetch_dec
254 #define atomic_fetch_dec(v)	        atomic_fetch_sub(1, (v))
255 #define atomic_fetch_dec_relaxed(v)	atomic_fetch_sub_relaxed(1, (v))
256 #define atomic_fetch_dec_acquire(v)	atomic_fetch_sub_acquire(1, (v))
257 #define atomic_fetch_dec_release(v)	atomic_fetch_sub_release(1, (v))
258 #else /* atomic_fetch_dec */
259 #define atomic_fetch_dec_relaxed	atomic_fetch_dec
260 #define atomic_fetch_dec_acquire	atomic_fetch_dec
261 #define atomic_fetch_dec_release	atomic_fetch_dec
262 #endif /* atomic_fetch_dec */
263 
264 #else /* atomic_fetch_dec_relaxed */
265 
266 #ifndef atomic_fetch_dec_acquire
267 #define atomic_fetch_dec_acquire(...)					\
268 	__atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
269 #endif
270 
271 #ifndef atomic_fetch_dec_release
272 #define atomic_fetch_dec_release(...)					\
273 	__atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
274 #endif
275 
276 #ifndef atomic_fetch_dec
277 #define atomic_fetch_dec(...)						\
278 	__atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
279 #endif
280 #endif /* atomic_fetch_dec_relaxed */
281 
282 /* atomic_fetch_or_relaxed */
283 #ifndef atomic_fetch_or_relaxed
284 #define atomic_fetch_or_relaxed	atomic_fetch_or
285 #define atomic_fetch_or_acquire	atomic_fetch_or
286 #define atomic_fetch_or_release	atomic_fetch_or
287 
288 #else /* atomic_fetch_or_relaxed */
289 
290 #ifndef atomic_fetch_or_acquire
291 #define atomic_fetch_or_acquire(...)					\
292 	__atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
293 #endif
294 
295 #ifndef atomic_fetch_or_release
296 #define atomic_fetch_or_release(...)					\
297 	__atomic_op_release(atomic_fetch_or, __VA_ARGS__)
298 #endif
299 
300 #ifndef atomic_fetch_or
301 #define atomic_fetch_or(...)						\
302 	__atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
303 #endif
304 #endif /* atomic_fetch_or_relaxed */
305 
306 /* atomic_fetch_and_relaxed */
307 #ifndef atomic_fetch_and_relaxed
308 #define atomic_fetch_and_relaxed	atomic_fetch_and
309 #define atomic_fetch_and_acquire	atomic_fetch_and
310 #define atomic_fetch_and_release	atomic_fetch_and
311 
312 #else /* atomic_fetch_and_relaxed */
313 
314 #ifndef atomic_fetch_and_acquire
315 #define atomic_fetch_and_acquire(...)					\
316 	__atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
317 #endif
318 
319 #ifndef atomic_fetch_and_release
320 #define atomic_fetch_and_release(...)					\
321 	__atomic_op_release(atomic_fetch_and, __VA_ARGS__)
322 #endif
323 
324 #ifndef atomic_fetch_and
325 #define atomic_fetch_and(...)						\
326 	__atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
327 #endif
328 #endif /* atomic_fetch_and_relaxed */
329 
330 #ifdef atomic_andnot
331 /* atomic_fetch_andnot_relaxed */
332 #ifndef atomic_fetch_andnot_relaxed
333 #define atomic_fetch_andnot_relaxed	atomic_fetch_andnot
334 #define atomic_fetch_andnot_acquire	atomic_fetch_andnot
335 #define atomic_fetch_andnot_release	atomic_fetch_andnot
336 
337 #else /* atomic_fetch_andnot_relaxed */
338 
339 #ifndef atomic_fetch_andnot_acquire
340 #define atomic_fetch_andnot_acquire(...)					\
341 	__atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
342 #endif
343 
344 #ifndef atomic_fetch_andnot_release
345 #define atomic_fetch_andnot_release(...)					\
346 	__atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
347 #endif
348 
349 #ifndef atomic_fetch_andnot
350 #define atomic_fetch_andnot(...)						\
351 	__atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
352 #endif
353 #endif /* atomic_fetch_andnot_relaxed */
354 #endif /* atomic_andnot */
355 
356 /* atomic_fetch_xor_relaxed */
357 #ifndef atomic_fetch_xor_relaxed
358 #define atomic_fetch_xor_relaxed	atomic_fetch_xor
359 #define atomic_fetch_xor_acquire	atomic_fetch_xor
360 #define atomic_fetch_xor_release	atomic_fetch_xor
361 
362 #else /* atomic_fetch_xor_relaxed */
363 
364 #ifndef atomic_fetch_xor_acquire
365 #define atomic_fetch_xor_acquire(...)					\
366 	__atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
367 #endif
368 
369 #ifndef atomic_fetch_xor_release
370 #define atomic_fetch_xor_release(...)					\
371 	__atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
372 #endif
373 
374 #ifndef atomic_fetch_xor
375 #define atomic_fetch_xor(...)						\
376 	__atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
377 #endif
378 #endif /* atomic_fetch_xor_relaxed */
379 
380 
381 /* atomic_xchg_relaxed */
382 #ifndef atomic_xchg_relaxed
383 #define  atomic_xchg_relaxed		atomic_xchg
384 #define  atomic_xchg_acquire		atomic_xchg
385 #define  atomic_xchg_release		atomic_xchg
386 
387 #else /* atomic_xchg_relaxed */
388 
389 #ifndef atomic_xchg_acquire
390 #define  atomic_xchg_acquire(...)					\
391 	__atomic_op_acquire(atomic_xchg, __VA_ARGS__)
392 #endif
393 
394 #ifndef atomic_xchg_release
395 #define  atomic_xchg_release(...)					\
396 	__atomic_op_release(atomic_xchg, __VA_ARGS__)
397 #endif
398 
399 #ifndef atomic_xchg
400 #define  atomic_xchg(...)						\
401 	__atomic_op_fence(atomic_xchg, __VA_ARGS__)
402 #endif
403 #endif /* atomic_xchg_relaxed */
404 
405 /* atomic_cmpxchg_relaxed */
406 #ifndef atomic_cmpxchg_relaxed
407 #define  atomic_cmpxchg_relaxed		atomic_cmpxchg
408 #define  atomic_cmpxchg_acquire		atomic_cmpxchg
409 #define  atomic_cmpxchg_release		atomic_cmpxchg
410 
411 #else /* atomic_cmpxchg_relaxed */
412 
413 #ifndef atomic_cmpxchg_acquire
414 #define  atomic_cmpxchg_acquire(...)					\
415 	__atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
416 #endif
417 
418 #ifndef atomic_cmpxchg_release
419 #define  atomic_cmpxchg_release(...)					\
420 	__atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
421 #endif
422 
423 #ifndef atomic_cmpxchg
424 #define  atomic_cmpxchg(...)						\
425 	__atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
426 #endif
427 #endif /* atomic_cmpxchg_relaxed */
428 
429 #ifndef atomic_try_cmpxchg
430 
431 #define __atomic_try_cmpxchg(type, _p, _po, _n)				\
432 ({									\
433 	typeof(_po) __po = (_po);					\
434 	typeof(*(_po)) __r, __o = *__po;				\
435 	__r = atomic_cmpxchg##type((_p), __o, (_n));			\
436 	if (unlikely(__r != __o))					\
437 		*__po = __r;						\
438 	likely(__r == __o);						\
439 })
440 
441 #define atomic_try_cmpxchg(_p, _po, _n)		__atomic_try_cmpxchg(, _p, _po, _n)
442 #define atomic_try_cmpxchg_relaxed(_p, _po, _n)	__atomic_try_cmpxchg(_relaxed, _p, _po, _n)
443 #define atomic_try_cmpxchg_acquire(_p, _po, _n)	__atomic_try_cmpxchg(_acquire, _p, _po, _n)
444 #define atomic_try_cmpxchg_release(_p, _po, _n)	__atomic_try_cmpxchg(_release, _p, _po, _n)
445 
446 #else /* atomic_try_cmpxchg */
447 #define atomic_try_cmpxchg_relaxed	atomic_try_cmpxchg
448 #define atomic_try_cmpxchg_acquire	atomic_try_cmpxchg
449 #define atomic_try_cmpxchg_release	atomic_try_cmpxchg
450 #endif /* atomic_try_cmpxchg */
451 
452 /* cmpxchg_relaxed */
453 #ifndef cmpxchg_relaxed
454 #define  cmpxchg_relaxed		cmpxchg
455 #define  cmpxchg_acquire		cmpxchg
456 #define  cmpxchg_release		cmpxchg
457 
458 #else /* cmpxchg_relaxed */
459 
460 #ifndef cmpxchg_acquire
461 #define  cmpxchg_acquire(...)						\
462 	__atomic_op_acquire(cmpxchg, __VA_ARGS__)
463 #endif
464 
465 #ifndef cmpxchg_release
466 #define  cmpxchg_release(...)						\
467 	__atomic_op_release(cmpxchg, __VA_ARGS__)
468 #endif
469 
470 #ifndef cmpxchg
471 #define  cmpxchg(...)							\
472 	__atomic_op_fence(cmpxchg, __VA_ARGS__)
473 #endif
474 #endif /* cmpxchg_relaxed */
475 
476 /* cmpxchg64_relaxed */
477 #ifndef cmpxchg64_relaxed
478 #define  cmpxchg64_relaxed		cmpxchg64
479 #define  cmpxchg64_acquire		cmpxchg64
480 #define  cmpxchg64_release		cmpxchg64
481 
482 #else /* cmpxchg64_relaxed */
483 
484 #ifndef cmpxchg64_acquire
485 #define  cmpxchg64_acquire(...)						\
486 	__atomic_op_acquire(cmpxchg64, __VA_ARGS__)
487 #endif
488 
489 #ifndef cmpxchg64_release
490 #define  cmpxchg64_release(...)						\
491 	__atomic_op_release(cmpxchg64, __VA_ARGS__)
492 #endif
493 
494 #ifndef cmpxchg64
495 #define  cmpxchg64(...)							\
496 	__atomic_op_fence(cmpxchg64, __VA_ARGS__)
497 #endif
498 #endif /* cmpxchg64_relaxed */
499 
500 /* xchg_relaxed */
501 #ifndef xchg_relaxed
502 #define  xchg_relaxed			xchg
503 #define  xchg_acquire			xchg
504 #define  xchg_release			xchg
505 
506 #else /* xchg_relaxed */
507 
508 #ifndef xchg_acquire
509 #define  xchg_acquire(...)		__atomic_op_acquire(xchg, __VA_ARGS__)
510 #endif
511 
512 #ifndef xchg_release
513 #define  xchg_release(...)		__atomic_op_release(xchg, __VA_ARGS__)
514 #endif
515 
516 #ifndef xchg
517 #define  xchg(...)			__atomic_op_fence(xchg, __VA_ARGS__)
518 #endif
519 #endif /* xchg_relaxed */
520 
521 /**
522  * atomic_add_unless - add unless the number is already a given value
523  * @v: pointer of type atomic_t
524  * @a: the amount to add to v...
525  * @u: ...unless v is equal to u.
526  *
527  * Atomically adds @a to @v, so long as @v was not already @u.
528  * Returns non-zero if @v was not @u, and zero otherwise.
529  */
530 static inline int atomic_add_unless(atomic_t *v, int a, int u)
531 {
532 	return __atomic_add_unless(v, a, u) != u;
533 }
534 
535 /**
536  * atomic_inc_not_zero - increment unless the number is zero
537  * @v: pointer of type atomic_t
538  *
539  * Atomically increments @v by 1, so long as @v is non-zero.
540  * Returns non-zero if @v was non-zero, and zero otherwise.
541  */
542 #ifndef atomic_inc_not_zero
543 #define atomic_inc_not_zero(v)		atomic_add_unless((v), 1, 0)
544 #endif
545 
546 #ifndef atomic_andnot
547 static inline void atomic_andnot(int i, atomic_t *v)
548 {
549 	atomic_and(~i, v);
550 }
551 
552 static inline int atomic_fetch_andnot(int i, atomic_t *v)
553 {
554 	return atomic_fetch_and(~i, v);
555 }
556 
557 static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
558 {
559 	return atomic_fetch_and_relaxed(~i, v);
560 }
561 
562 static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
563 {
564 	return atomic_fetch_and_acquire(~i, v);
565 }
566 
567 static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
568 {
569 	return atomic_fetch_and_release(~i, v);
570 }
571 #endif
572 
573 /**
574  * atomic_inc_not_zero_hint - increment if not null
575  * @v: pointer of type atomic_t
576  * @hint: probable value of the atomic before the increment
577  *
578  * This version of atomic_inc_not_zero() gives a hint of probable
579  * value of the atomic. This helps processor to not read the memory
580  * before doing the atomic read/modify/write cycle, lowering
581  * number of bus transactions on some arches.
582  *
583  * Returns: 0 if increment was not done, 1 otherwise.
584  */
585 #ifndef atomic_inc_not_zero_hint
586 static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
587 {
588 	int val, c = hint;
589 
590 	/* sanity test, should be removed by compiler if hint is a constant */
591 	if (!hint)
592 		return atomic_inc_not_zero(v);
593 
594 	do {
595 		val = atomic_cmpxchg(v, c, c + 1);
596 		if (val == c)
597 			return 1;
598 		c = val;
599 	} while (c);
600 
601 	return 0;
602 }
603 #endif
604 
605 #ifndef atomic_inc_unless_negative
606 static inline int atomic_inc_unless_negative(atomic_t *p)
607 {
608 	int v, v1;
609 	for (v = 0; v >= 0; v = v1) {
610 		v1 = atomic_cmpxchg(p, v, v + 1);
611 		if (likely(v1 == v))
612 			return 1;
613 	}
614 	return 0;
615 }
616 #endif
617 
618 #ifndef atomic_dec_unless_positive
619 static inline int atomic_dec_unless_positive(atomic_t *p)
620 {
621 	int v, v1;
622 	for (v = 0; v <= 0; v = v1) {
623 		v1 = atomic_cmpxchg(p, v, v - 1);
624 		if (likely(v1 == v))
625 			return 1;
626 	}
627 	return 0;
628 }
629 #endif
630 
631 /*
632  * atomic_dec_if_positive - decrement by 1 if old value positive
633  * @v: pointer of type atomic_t
634  *
635  * The function returns the old value of *v minus 1, even if
636  * the atomic variable, v, was not decremented.
637  */
638 #ifndef atomic_dec_if_positive
639 static inline int atomic_dec_if_positive(atomic_t *v)
640 {
641 	int c, old, dec;
642 	c = atomic_read(v);
643 	for (;;) {
644 		dec = c - 1;
645 		if (unlikely(dec < 0))
646 			break;
647 		old = atomic_cmpxchg((v), c, dec);
648 		if (likely(old == c))
649 			break;
650 		c = old;
651 	}
652 	return dec;
653 }
654 #endif
655 
656 #define atomic_cond_read_acquire(v, c)	smp_cond_load_acquire(&(v)->counter, (c))
657 
658 #ifdef CONFIG_GENERIC_ATOMIC64
659 #include <asm-generic/atomic64.h>
660 #endif
661 
662 #ifndef atomic64_read_acquire
663 #define  atomic64_read_acquire(v)	smp_load_acquire(&(v)->counter)
664 #endif
665 
666 #ifndef atomic64_set_release
667 #define  atomic64_set_release(v, i)	smp_store_release(&(v)->counter, (i))
668 #endif
669 
670 /* atomic64_add_return_relaxed */
671 #ifndef atomic64_add_return_relaxed
672 #define  atomic64_add_return_relaxed	atomic64_add_return
673 #define  atomic64_add_return_acquire	atomic64_add_return
674 #define  atomic64_add_return_release	atomic64_add_return
675 
676 #else /* atomic64_add_return_relaxed */
677 
678 #ifndef atomic64_add_return_acquire
679 #define  atomic64_add_return_acquire(...)				\
680 	__atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
681 #endif
682 
683 #ifndef atomic64_add_return_release
684 #define  atomic64_add_return_release(...)				\
685 	__atomic_op_release(atomic64_add_return, __VA_ARGS__)
686 #endif
687 
688 #ifndef atomic64_add_return
689 #define  atomic64_add_return(...)					\
690 	__atomic_op_fence(atomic64_add_return, __VA_ARGS__)
691 #endif
692 #endif /* atomic64_add_return_relaxed */
693 
694 /* atomic64_inc_return_relaxed */
695 #ifndef atomic64_inc_return_relaxed
696 #define  atomic64_inc_return_relaxed	atomic64_inc_return
697 #define  atomic64_inc_return_acquire	atomic64_inc_return
698 #define  atomic64_inc_return_release	atomic64_inc_return
699 
700 #else /* atomic64_inc_return_relaxed */
701 
702 #ifndef atomic64_inc_return_acquire
703 #define  atomic64_inc_return_acquire(...)				\
704 	__atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
705 #endif
706 
707 #ifndef atomic64_inc_return_release
708 #define  atomic64_inc_return_release(...)				\
709 	__atomic_op_release(atomic64_inc_return, __VA_ARGS__)
710 #endif
711 
712 #ifndef atomic64_inc_return
713 #define  atomic64_inc_return(...)					\
714 	__atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
715 #endif
716 #endif /* atomic64_inc_return_relaxed */
717 
718 
719 /* atomic64_sub_return_relaxed */
720 #ifndef atomic64_sub_return_relaxed
721 #define  atomic64_sub_return_relaxed	atomic64_sub_return
722 #define  atomic64_sub_return_acquire	atomic64_sub_return
723 #define  atomic64_sub_return_release	atomic64_sub_return
724 
725 #else /* atomic64_sub_return_relaxed */
726 
727 #ifndef atomic64_sub_return_acquire
728 #define  atomic64_sub_return_acquire(...)				\
729 	__atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
730 #endif
731 
732 #ifndef atomic64_sub_return_release
733 #define  atomic64_sub_return_release(...)				\
734 	__atomic_op_release(atomic64_sub_return, __VA_ARGS__)
735 #endif
736 
737 #ifndef atomic64_sub_return
738 #define  atomic64_sub_return(...)					\
739 	__atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
740 #endif
741 #endif /* atomic64_sub_return_relaxed */
742 
743 /* atomic64_dec_return_relaxed */
744 #ifndef atomic64_dec_return_relaxed
745 #define  atomic64_dec_return_relaxed	atomic64_dec_return
746 #define  atomic64_dec_return_acquire	atomic64_dec_return
747 #define  atomic64_dec_return_release	atomic64_dec_return
748 
749 #else /* atomic64_dec_return_relaxed */
750 
751 #ifndef atomic64_dec_return_acquire
752 #define  atomic64_dec_return_acquire(...)				\
753 	__atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
754 #endif
755 
756 #ifndef atomic64_dec_return_release
757 #define  atomic64_dec_return_release(...)				\
758 	__atomic_op_release(atomic64_dec_return, __VA_ARGS__)
759 #endif
760 
761 #ifndef atomic64_dec_return
762 #define  atomic64_dec_return(...)					\
763 	__atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
764 #endif
765 #endif /* atomic64_dec_return_relaxed */
766 
767 
768 /* atomic64_fetch_add_relaxed */
769 #ifndef atomic64_fetch_add_relaxed
770 #define atomic64_fetch_add_relaxed	atomic64_fetch_add
771 #define atomic64_fetch_add_acquire	atomic64_fetch_add
772 #define atomic64_fetch_add_release	atomic64_fetch_add
773 
774 #else /* atomic64_fetch_add_relaxed */
775 
776 #ifndef atomic64_fetch_add_acquire
777 #define atomic64_fetch_add_acquire(...)					\
778 	__atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
779 #endif
780 
781 #ifndef atomic64_fetch_add_release
782 #define atomic64_fetch_add_release(...)					\
783 	__atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
784 #endif
785 
786 #ifndef atomic64_fetch_add
787 #define atomic64_fetch_add(...)						\
788 	__atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
789 #endif
790 #endif /* atomic64_fetch_add_relaxed */
791 
792 /* atomic64_fetch_inc_relaxed */
793 #ifndef atomic64_fetch_inc_relaxed
794 
795 #ifndef atomic64_fetch_inc
796 #define atomic64_fetch_inc(v)		atomic64_fetch_add(1, (v))
797 #define atomic64_fetch_inc_relaxed(v)	atomic64_fetch_add_relaxed(1, (v))
798 #define atomic64_fetch_inc_acquire(v)	atomic64_fetch_add_acquire(1, (v))
799 #define atomic64_fetch_inc_release(v)	atomic64_fetch_add_release(1, (v))
800 #else /* atomic64_fetch_inc */
801 #define atomic64_fetch_inc_relaxed	atomic64_fetch_inc
802 #define atomic64_fetch_inc_acquire	atomic64_fetch_inc
803 #define atomic64_fetch_inc_release	atomic64_fetch_inc
804 #endif /* atomic64_fetch_inc */
805 
806 #else /* atomic64_fetch_inc_relaxed */
807 
808 #ifndef atomic64_fetch_inc_acquire
809 #define atomic64_fetch_inc_acquire(...)					\
810 	__atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
811 #endif
812 
813 #ifndef atomic64_fetch_inc_release
814 #define atomic64_fetch_inc_release(...)					\
815 	__atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
816 #endif
817 
818 #ifndef atomic64_fetch_inc
819 #define atomic64_fetch_inc(...)						\
820 	__atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
821 #endif
822 #endif /* atomic64_fetch_inc_relaxed */
823 
824 /* atomic64_fetch_sub_relaxed */
825 #ifndef atomic64_fetch_sub_relaxed
826 #define atomic64_fetch_sub_relaxed	atomic64_fetch_sub
827 #define atomic64_fetch_sub_acquire	atomic64_fetch_sub
828 #define atomic64_fetch_sub_release	atomic64_fetch_sub
829 
830 #else /* atomic64_fetch_sub_relaxed */
831 
832 #ifndef atomic64_fetch_sub_acquire
833 #define atomic64_fetch_sub_acquire(...)					\
834 	__atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
835 #endif
836 
837 #ifndef atomic64_fetch_sub_release
838 #define atomic64_fetch_sub_release(...)					\
839 	__atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
840 #endif
841 
842 #ifndef atomic64_fetch_sub
843 #define atomic64_fetch_sub(...)						\
844 	__atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
845 #endif
846 #endif /* atomic64_fetch_sub_relaxed */
847 
848 /* atomic64_fetch_dec_relaxed */
849 #ifndef atomic64_fetch_dec_relaxed
850 
851 #ifndef atomic64_fetch_dec
852 #define atomic64_fetch_dec(v)		atomic64_fetch_sub(1, (v))
853 #define atomic64_fetch_dec_relaxed(v)	atomic64_fetch_sub_relaxed(1, (v))
854 #define atomic64_fetch_dec_acquire(v)	atomic64_fetch_sub_acquire(1, (v))
855 #define atomic64_fetch_dec_release(v)	atomic64_fetch_sub_release(1, (v))
856 #else /* atomic64_fetch_dec */
857 #define atomic64_fetch_dec_relaxed	atomic64_fetch_dec
858 #define atomic64_fetch_dec_acquire	atomic64_fetch_dec
859 #define atomic64_fetch_dec_release	atomic64_fetch_dec
860 #endif /* atomic64_fetch_dec */
861 
862 #else /* atomic64_fetch_dec_relaxed */
863 
864 #ifndef atomic64_fetch_dec_acquire
865 #define atomic64_fetch_dec_acquire(...)					\
866 	__atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
867 #endif
868 
869 #ifndef atomic64_fetch_dec_release
870 #define atomic64_fetch_dec_release(...)					\
871 	__atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
872 #endif
873 
874 #ifndef atomic64_fetch_dec
875 #define atomic64_fetch_dec(...)						\
876 	__atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
877 #endif
878 #endif /* atomic64_fetch_dec_relaxed */
879 
880 /* atomic64_fetch_or_relaxed */
881 #ifndef atomic64_fetch_or_relaxed
882 #define atomic64_fetch_or_relaxed	atomic64_fetch_or
883 #define atomic64_fetch_or_acquire	atomic64_fetch_or
884 #define atomic64_fetch_or_release	atomic64_fetch_or
885 
886 #else /* atomic64_fetch_or_relaxed */
887 
888 #ifndef atomic64_fetch_or_acquire
889 #define atomic64_fetch_or_acquire(...)					\
890 	__atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
891 #endif
892 
893 #ifndef atomic64_fetch_or_release
894 #define atomic64_fetch_or_release(...)					\
895 	__atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
896 #endif
897 
898 #ifndef atomic64_fetch_or
899 #define atomic64_fetch_or(...)						\
900 	__atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
901 #endif
902 #endif /* atomic64_fetch_or_relaxed */
903 
904 /* atomic64_fetch_and_relaxed */
905 #ifndef atomic64_fetch_and_relaxed
906 #define atomic64_fetch_and_relaxed	atomic64_fetch_and
907 #define atomic64_fetch_and_acquire	atomic64_fetch_and
908 #define atomic64_fetch_and_release	atomic64_fetch_and
909 
910 #else /* atomic64_fetch_and_relaxed */
911 
912 #ifndef atomic64_fetch_and_acquire
913 #define atomic64_fetch_and_acquire(...)					\
914 	__atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
915 #endif
916 
917 #ifndef atomic64_fetch_and_release
918 #define atomic64_fetch_and_release(...)					\
919 	__atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
920 #endif
921 
922 #ifndef atomic64_fetch_and
923 #define atomic64_fetch_and(...)						\
924 	__atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
925 #endif
926 #endif /* atomic64_fetch_and_relaxed */
927 
928 #ifdef atomic64_andnot
929 /* atomic64_fetch_andnot_relaxed */
930 #ifndef atomic64_fetch_andnot_relaxed
931 #define atomic64_fetch_andnot_relaxed	atomic64_fetch_andnot
932 #define atomic64_fetch_andnot_acquire	atomic64_fetch_andnot
933 #define atomic64_fetch_andnot_release	atomic64_fetch_andnot
934 
935 #else /* atomic64_fetch_andnot_relaxed */
936 
937 #ifndef atomic64_fetch_andnot_acquire
938 #define atomic64_fetch_andnot_acquire(...)					\
939 	__atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
940 #endif
941 
942 #ifndef atomic64_fetch_andnot_release
943 #define atomic64_fetch_andnot_release(...)					\
944 	__atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
945 #endif
946 
947 #ifndef atomic64_fetch_andnot
948 #define atomic64_fetch_andnot(...)						\
949 	__atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
950 #endif
951 #endif /* atomic64_fetch_andnot_relaxed */
952 #endif /* atomic64_andnot */
953 
954 /* atomic64_fetch_xor_relaxed */
955 #ifndef atomic64_fetch_xor_relaxed
956 #define atomic64_fetch_xor_relaxed	atomic64_fetch_xor
957 #define atomic64_fetch_xor_acquire	atomic64_fetch_xor
958 #define atomic64_fetch_xor_release	atomic64_fetch_xor
959 
960 #else /* atomic64_fetch_xor_relaxed */
961 
962 #ifndef atomic64_fetch_xor_acquire
963 #define atomic64_fetch_xor_acquire(...)					\
964 	__atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
965 #endif
966 
967 #ifndef atomic64_fetch_xor_release
968 #define atomic64_fetch_xor_release(...)					\
969 	__atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
970 #endif
971 
972 #ifndef atomic64_fetch_xor
973 #define atomic64_fetch_xor(...)						\
974 	__atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
975 #endif
976 #endif /* atomic64_fetch_xor_relaxed */
977 
978 
979 /* atomic64_xchg_relaxed */
980 #ifndef atomic64_xchg_relaxed
981 #define  atomic64_xchg_relaxed		atomic64_xchg
982 #define  atomic64_xchg_acquire		atomic64_xchg
983 #define  atomic64_xchg_release		atomic64_xchg
984 
985 #else /* atomic64_xchg_relaxed */
986 
987 #ifndef atomic64_xchg_acquire
988 #define  atomic64_xchg_acquire(...)					\
989 	__atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
990 #endif
991 
992 #ifndef atomic64_xchg_release
993 #define  atomic64_xchg_release(...)					\
994 	__atomic_op_release(atomic64_xchg, __VA_ARGS__)
995 #endif
996 
997 #ifndef atomic64_xchg
998 #define  atomic64_xchg(...)						\
999 	__atomic_op_fence(atomic64_xchg, __VA_ARGS__)
1000 #endif
1001 #endif /* atomic64_xchg_relaxed */
1002 
1003 /* atomic64_cmpxchg_relaxed */
1004 #ifndef atomic64_cmpxchg_relaxed
1005 #define  atomic64_cmpxchg_relaxed	atomic64_cmpxchg
1006 #define  atomic64_cmpxchg_acquire	atomic64_cmpxchg
1007 #define  atomic64_cmpxchg_release	atomic64_cmpxchg
1008 
1009 #else /* atomic64_cmpxchg_relaxed */
1010 
1011 #ifndef atomic64_cmpxchg_acquire
1012 #define  atomic64_cmpxchg_acquire(...)					\
1013 	__atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1014 #endif
1015 
1016 #ifndef atomic64_cmpxchg_release
1017 #define  atomic64_cmpxchg_release(...)					\
1018 	__atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1019 #endif
1020 
1021 #ifndef atomic64_cmpxchg
1022 #define  atomic64_cmpxchg(...)						\
1023 	__atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1024 #endif
1025 #endif /* atomic64_cmpxchg_relaxed */
1026 
1027 #ifndef atomic64_try_cmpxchg
1028 
1029 #define __atomic64_try_cmpxchg(type, _p, _po, _n)			\
1030 ({									\
1031 	typeof(_po) __po = (_po);					\
1032 	typeof(*(_po)) __r, __o = *__po;				\
1033 	__r = atomic64_cmpxchg##type((_p), __o, (_n));			\
1034 	if (unlikely(__r != __o))					\
1035 		*__po = __r;						\
1036 	likely(__r == __o);						\
1037 })
1038 
1039 #define atomic64_try_cmpxchg(_p, _po, _n)		__atomic64_try_cmpxchg(, _p, _po, _n)
1040 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n)	__atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1041 #define atomic64_try_cmpxchg_acquire(_p, _po, _n)	__atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1042 #define atomic64_try_cmpxchg_release(_p, _po, _n)	__atomic64_try_cmpxchg(_release, _p, _po, _n)
1043 
1044 #else /* atomic64_try_cmpxchg */
1045 #define atomic64_try_cmpxchg_relaxed	atomic64_try_cmpxchg
1046 #define atomic64_try_cmpxchg_acquire	atomic64_try_cmpxchg
1047 #define atomic64_try_cmpxchg_release	atomic64_try_cmpxchg
1048 #endif /* atomic64_try_cmpxchg */
1049 
1050 #ifndef atomic64_andnot
1051 static inline void atomic64_andnot(long long i, atomic64_t *v)
1052 {
1053 	atomic64_and(~i, v);
1054 }
1055 
1056 static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1057 {
1058 	return atomic64_fetch_and(~i, v);
1059 }
1060 
1061 static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1062 {
1063 	return atomic64_fetch_and_relaxed(~i, v);
1064 }
1065 
1066 static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1067 {
1068 	return atomic64_fetch_and_acquire(~i, v);
1069 }
1070 
1071 static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1072 {
1073 	return atomic64_fetch_and_release(~i, v);
1074 }
1075 #endif
1076 
1077 #define atomic64_cond_read_acquire(v, c)	smp_cond_load_acquire(&(v)->counter, (c))
1078 
1079 #include <asm-generic/atomic-long.h>
1080 
1081 #endif /* _LINUX_ATOMIC_H */
1082