1e3d18ceeSMark Rutland // SPDX-License-Identifier: GPL-2.0
2e3d18ceeSMark Rutland
3e3d18ceeSMark Rutland // Generated by scripts/atomic/gen-atomic-long.sh
4e3d18ceeSMark Rutland // DO NOT MODIFY THIS FILE DIRECTLY
5e3d18ceeSMark Rutland
6e3d18ceeSMark Rutland #ifndef _LINUX_ATOMIC_LONG_H
7e3d18ceeSMark Rutland #define _LINUX_ATOMIC_LONG_H
8e3d18ceeSMark Rutland
9e3d18ceeSMark Rutland #include <linux/compiler.h>
10e3d18ceeSMark Rutland #include <asm/types.h>
11e3d18ceeSMark Rutland
12e3d18ceeSMark Rutland #ifdef CONFIG_64BIT
13e3d18ceeSMark Rutland typedef atomic64_t atomic_long_t;
14e3d18ceeSMark Rutland #define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
15e3d18ceeSMark Rutland #define atomic_long_cond_read_acquire atomic64_cond_read_acquire
16e3d18ceeSMark Rutland #define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed
17e3d18ceeSMark Rutland #else
18e3d18ceeSMark Rutland typedef atomic_t atomic_long_t;
19e3d18ceeSMark Rutland #define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
20e3d18ceeSMark Rutland #define atomic_long_cond_read_acquire atomic_cond_read_acquire
21e3d18ceeSMark Rutland #define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
22e3d18ceeSMark Rutland #endif
23e3d18ceeSMark Rutland
24ad811070SMark Rutland /**
25ad811070SMark Rutland * raw_atomic_long_read() - atomic load with relaxed ordering
26ad811070SMark Rutland * @v: pointer to atomic_long_t
27ad811070SMark Rutland *
28ad811070SMark Rutland * Atomically loads the value of @v with relaxed ordering.
29ad811070SMark Rutland *
30ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_read() elsewhere.
31ad811070SMark Rutland *
32ad811070SMark Rutland * Return: The value loaded from @v.
33ad811070SMark Rutland */
3463039946SMark Rutland static __always_inline long
raw_atomic_long_read(const atomic_long_t * v)3563039946SMark Rutland raw_atomic_long_read(const atomic_long_t *v)
3663039946SMark Rutland {
37e3d18ceeSMark Rutland #ifdef CONFIG_64BIT
381815da17SMark Rutland return raw_atomic64_read(v);
3963039946SMark Rutland #else
401815da17SMark Rutland return raw_atomic_read(v);
4163039946SMark Rutland #endif
42e3d18ceeSMark Rutland }
43e3d18ceeSMark Rutland
44ad811070SMark Rutland /**
45ad811070SMark Rutland * raw_atomic_long_read_acquire() - atomic load with acquire ordering
46ad811070SMark Rutland * @v: pointer to atomic_long_t
47ad811070SMark Rutland *
48ad811070SMark Rutland * Atomically loads the value of @v with acquire ordering.
49ad811070SMark Rutland *
50ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_read_acquire() elsewhere.
51ad811070SMark Rutland *
52ad811070SMark Rutland * Return: The value loaded from @v.
53ad811070SMark Rutland */
54e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_read_acquire(const atomic_long_t * v)551815da17SMark Rutland raw_atomic_long_read_acquire(const atomic_long_t *v)
56e3d18ceeSMark Rutland {
5763039946SMark Rutland #ifdef CONFIG_64BIT
5863039946SMark Rutland return raw_atomic64_read_acquire(v);
5963039946SMark Rutland #else
601815da17SMark Rutland return raw_atomic_read_acquire(v);
6163039946SMark Rutland #endif
62e3d18ceeSMark Rutland }
63e3d18ceeSMark Rutland
64ad811070SMark Rutland /**
65ad811070SMark Rutland * raw_atomic_long_set() - atomic set with relaxed ordering
66ad811070SMark Rutland * @v: pointer to atomic_long_t
67ad811070SMark Rutland * @i: long value to assign
68ad811070SMark Rutland *
69ad811070SMark Rutland * Atomically sets @v to @i with relaxed ordering.
70ad811070SMark Rutland *
71ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_set() elsewhere.
72ad811070SMark Rutland *
73ad811070SMark Rutland * Return: Nothing.
74ad811070SMark Rutland */
75e3d18ceeSMark Rutland static __always_inline void
raw_atomic_long_set(atomic_long_t * v,long i)761815da17SMark Rutland raw_atomic_long_set(atomic_long_t *v, long i)
77e3d18ceeSMark Rutland {
7863039946SMark Rutland #ifdef CONFIG_64BIT
7963039946SMark Rutland raw_atomic64_set(v, i);
8063039946SMark Rutland #else
811815da17SMark Rutland raw_atomic_set(v, i);
8263039946SMark Rutland #endif
83e3d18ceeSMark Rutland }
84e3d18ceeSMark Rutland
85ad811070SMark Rutland /**
86ad811070SMark Rutland * raw_atomic_long_set_release() - atomic set with release ordering
87ad811070SMark Rutland * @v: pointer to atomic_long_t
88ad811070SMark Rutland * @i: long value to assign
89ad811070SMark Rutland *
90ad811070SMark Rutland * Atomically sets @v to @i with release ordering.
91ad811070SMark Rutland *
92ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_set_release() elsewhere.
93ad811070SMark Rutland *
94ad811070SMark Rutland * Return: Nothing.
95ad811070SMark Rutland */
96e3d18ceeSMark Rutland static __always_inline void
raw_atomic_long_set_release(atomic_long_t * v,long i)971815da17SMark Rutland raw_atomic_long_set_release(atomic_long_t *v, long i)
98e3d18ceeSMark Rutland {
9963039946SMark Rutland #ifdef CONFIG_64BIT
10063039946SMark Rutland raw_atomic64_set_release(v, i);
10163039946SMark Rutland #else
1021815da17SMark Rutland raw_atomic_set_release(v, i);
10363039946SMark Rutland #endif
104e3d18ceeSMark Rutland }
105e3d18ceeSMark Rutland
106ad811070SMark Rutland /**
107ad811070SMark Rutland * raw_atomic_long_add() - atomic add with relaxed ordering
108ad811070SMark Rutland * @i: long value to add
109ad811070SMark Rutland * @v: pointer to atomic_long_t
110ad811070SMark Rutland *
111ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
112ad811070SMark Rutland *
113ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_add() elsewhere.
114ad811070SMark Rutland *
115ad811070SMark Rutland * Return: Nothing.
116ad811070SMark Rutland */
117e3d18ceeSMark Rutland static __always_inline void
raw_atomic_long_add(long i,atomic_long_t * v)1181815da17SMark Rutland raw_atomic_long_add(long i, atomic_long_t *v)
119e3d18ceeSMark Rutland {
12063039946SMark Rutland #ifdef CONFIG_64BIT
12163039946SMark Rutland raw_atomic64_add(i, v);
12263039946SMark Rutland #else
1231815da17SMark Rutland raw_atomic_add(i, v);
12463039946SMark Rutland #endif
125e3d18ceeSMark Rutland }
126e3d18ceeSMark Rutland
127ad811070SMark Rutland /**
128ad811070SMark Rutland * raw_atomic_long_add_return() - atomic add with full ordering
129ad811070SMark Rutland * @i: long value to add
130ad811070SMark Rutland * @v: pointer to atomic_long_t
131ad811070SMark Rutland *
132ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
133ad811070SMark Rutland *
134ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_add_return() elsewhere.
135ad811070SMark Rutland *
136ad811070SMark Rutland * Return: The updated value of @v.
137ad811070SMark Rutland */
138e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_add_return(long i,atomic_long_t * v)1391815da17SMark Rutland raw_atomic_long_add_return(long i, atomic_long_t *v)
140e3d18ceeSMark Rutland {
14163039946SMark Rutland #ifdef CONFIG_64BIT
14263039946SMark Rutland return raw_atomic64_add_return(i, v);
14363039946SMark Rutland #else
1441815da17SMark Rutland return raw_atomic_add_return(i, v);
14563039946SMark Rutland #endif
146e3d18ceeSMark Rutland }
147e3d18ceeSMark Rutland
148ad811070SMark Rutland /**
149ad811070SMark Rutland * raw_atomic_long_add_return_acquire() - atomic add with acquire ordering
150ad811070SMark Rutland * @i: long value to add
151ad811070SMark Rutland * @v: pointer to atomic_long_t
152ad811070SMark Rutland *
153ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
154ad811070SMark Rutland *
155ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_add_return_acquire() elsewhere.
156ad811070SMark Rutland *
157ad811070SMark Rutland * Return: The updated value of @v.
158ad811070SMark Rutland */
159e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_add_return_acquire(long i,atomic_long_t * v)1601815da17SMark Rutland raw_atomic_long_add_return_acquire(long i, atomic_long_t *v)
161e3d18ceeSMark Rutland {
16263039946SMark Rutland #ifdef CONFIG_64BIT
16363039946SMark Rutland return raw_atomic64_add_return_acquire(i, v);
16463039946SMark Rutland #else
1651815da17SMark Rutland return raw_atomic_add_return_acquire(i, v);
16663039946SMark Rutland #endif
167e3d18ceeSMark Rutland }
168e3d18ceeSMark Rutland
169ad811070SMark Rutland /**
170ad811070SMark Rutland * raw_atomic_long_add_return_release() - atomic add with release ordering
171ad811070SMark Rutland * @i: long value to add
172ad811070SMark Rutland * @v: pointer to atomic_long_t
173ad811070SMark Rutland *
174ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
175ad811070SMark Rutland *
176ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_add_return_release() elsewhere.
177ad811070SMark Rutland *
178ad811070SMark Rutland * Return: The updated value of @v.
179ad811070SMark Rutland */
180e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_add_return_release(long i,atomic_long_t * v)1811815da17SMark Rutland raw_atomic_long_add_return_release(long i, atomic_long_t *v)
182e3d18ceeSMark Rutland {
18363039946SMark Rutland #ifdef CONFIG_64BIT
18463039946SMark Rutland return raw_atomic64_add_return_release(i, v);
18563039946SMark Rutland #else
1861815da17SMark Rutland return raw_atomic_add_return_release(i, v);
18763039946SMark Rutland #endif
188e3d18ceeSMark Rutland }
189e3d18ceeSMark Rutland
190ad811070SMark Rutland /**
191ad811070SMark Rutland * raw_atomic_long_add_return_relaxed() - atomic add with relaxed ordering
192ad811070SMark Rutland * @i: long value to add
193ad811070SMark Rutland * @v: pointer to atomic_long_t
194ad811070SMark Rutland *
195ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
196ad811070SMark Rutland *
197ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_add_return_relaxed() elsewhere.
198ad811070SMark Rutland *
199ad811070SMark Rutland * Return: The updated value of @v.
200ad811070SMark Rutland */
201e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_add_return_relaxed(long i,atomic_long_t * v)2021815da17SMark Rutland raw_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
203e3d18ceeSMark Rutland {
20463039946SMark Rutland #ifdef CONFIG_64BIT
20563039946SMark Rutland return raw_atomic64_add_return_relaxed(i, v);
20663039946SMark Rutland #else
2071815da17SMark Rutland return raw_atomic_add_return_relaxed(i, v);
20863039946SMark Rutland #endif
209e3d18ceeSMark Rutland }
210e3d18ceeSMark Rutland
211ad811070SMark Rutland /**
212ad811070SMark Rutland * raw_atomic_long_fetch_add() - atomic add with full ordering
213ad811070SMark Rutland * @i: long value to add
214ad811070SMark Rutland * @v: pointer to atomic_long_t
215ad811070SMark Rutland *
216ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
217ad811070SMark Rutland *
218ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_add() elsewhere.
219ad811070SMark Rutland *
220ad811070SMark Rutland * Return: The original value of @v.
221ad811070SMark Rutland */
222e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_add(long i,atomic_long_t * v)2231815da17SMark Rutland raw_atomic_long_fetch_add(long i, atomic_long_t *v)
224e3d18ceeSMark Rutland {
22563039946SMark Rutland #ifdef CONFIG_64BIT
22663039946SMark Rutland return raw_atomic64_fetch_add(i, v);
22763039946SMark Rutland #else
2281815da17SMark Rutland return raw_atomic_fetch_add(i, v);
22963039946SMark Rutland #endif
230e3d18ceeSMark Rutland }
231e3d18ceeSMark Rutland
232ad811070SMark Rutland /**
233ad811070SMark Rutland * raw_atomic_long_fetch_add_acquire() - atomic add with acquire ordering
234ad811070SMark Rutland * @i: long value to add
235ad811070SMark Rutland * @v: pointer to atomic_long_t
236ad811070SMark Rutland *
237ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
238ad811070SMark Rutland *
239ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_add_acquire() elsewhere.
240ad811070SMark Rutland *
241ad811070SMark Rutland * Return: The original value of @v.
242ad811070SMark Rutland */
243e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_add_acquire(long i,atomic_long_t * v)2441815da17SMark Rutland raw_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
245e3d18ceeSMark Rutland {
24663039946SMark Rutland #ifdef CONFIG_64BIT
24763039946SMark Rutland return raw_atomic64_fetch_add_acquire(i, v);
24863039946SMark Rutland #else
2491815da17SMark Rutland return raw_atomic_fetch_add_acquire(i, v);
25063039946SMark Rutland #endif
251e3d18ceeSMark Rutland }
252e3d18ceeSMark Rutland
253ad811070SMark Rutland /**
254ad811070SMark Rutland * raw_atomic_long_fetch_add_release() - atomic add with release ordering
255ad811070SMark Rutland * @i: long value to add
256ad811070SMark Rutland * @v: pointer to atomic_long_t
257ad811070SMark Rutland *
258ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
259ad811070SMark Rutland *
260ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_add_release() elsewhere.
261ad811070SMark Rutland *
262ad811070SMark Rutland * Return: The original value of @v.
263ad811070SMark Rutland */
264e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_add_release(long i,atomic_long_t * v)2651815da17SMark Rutland raw_atomic_long_fetch_add_release(long i, atomic_long_t *v)
266e3d18ceeSMark Rutland {
26763039946SMark Rutland #ifdef CONFIG_64BIT
26863039946SMark Rutland return raw_atomic64_fetch_add_release(i, v);
26963039946SMark Rutland #else
2701815da17SMark Rutland return raw_atomic_fetch_add_release(i, v);
27163039946SMark Rutland #endif
272e3d18ceeSMark Rutland }
273e3d18ceeSMark Rutland
274ad811070SMark Rutland /**
275ad811070SMark Rutland * raw_atomic_long_fetch_add_relaxed() - atomic add with relaxed ordering
276ad811070SMark Rutland * @i: long value to add
277ad811070SMark Rutland * @v: pointer to atomic_long_t
278ad811070SMark Rutland *
279ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
280ad811070SMark Rutland *
281ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_add_relaxed() elsewhere.
282ad811070SMark Rutland *
283ad811070SMark Rutland * Return: The original value of @v.
284ad811070SMark Rutland */
285e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_add_relaxed(long i,atomic_long_t * v)2861815da17SMark Rutland raw_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
287e3d18ceeSMark Rutland {
28863039946SMark Rutland #ifdef CONFIG_64BIT
28963039946SMark Rutland return raw_atomic64_fetch_add_relaxed(i, v);
29063039946SMark Rutland #else
2911815da17SMark Rutland return raw_atomic_fetch_add_relaxed(i, v);
29263039946SMark Rutland #endif
293e3d18ceeSMark Rutland }
294e3d18ceeSMark Rutland
295ad811070SMark Rutland /**
296ad811070SMark Rutland * raw_atomic_long_sub() - atomic subtract with relaxed ordering
297ad811070SMark Rutland * @i: long value to subtract
298ad811070SMark Rutland * @v: pointer to atomic_long_t
299ad811070SMark Rutland *
300ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
301ad811070SMark Rutland *
302ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_sub() elsewhere.
303ad811070SMark Rutland *
304ad811070SMark Rutland * Return: Nothing.
305ad811070SMark Rutland */
306e3d18ceeSMark Rutland static __always_inline void
raw_atomic_long_sub(long i,atomic_long_t * v)3071815da17SMark Rutland raw_atomic_long_sub(long i, atomic_long_t *v)
308e3d18ceeSMark Rutland {
30963039946SMark Rutland #ifdef CONFIG_64BIT
31063039946SMark Rutland raw_atomic64_sub(i, v);
31163039946SMark Rutland #else
3121815da17SMark Rutland raw_atomic_sub(i, v);
31363039946SMark Rutland #endif
314e3d18ceeSMark Rutland }
315e3d18ceeSMark Rutland
316ad811070SMark Rutland /**
317ad811070SMark Rutland * raw_atomic_long_sub_return() - atomic subtract with full ordering
318ad811070SMark Rutland * @i: long value to subtract
319ad811070SMark Rutland * @v: pointer to atomic_long_t
320ad811070SMark Rutland *
321ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
322ad811070SMark Rutland *
323ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_sub_return() elsewhere.
324ad811070SMark Rutland *
325ad811070SMark Rutland * Return: The updated value of @v.
326ad811070SMark Rutland */
327e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_sub_return(long i,atomic_long_t * v)3281815da17SMark Rutland raw_atomic_long_sub_return(long i, atomic_long_t *v)
329e3d18ceeSMark Rutland {
33063039946SMark Rutland #ifdef CONFIG_64BIT
33163039946SMark Rutland return raw_atomic64_sub_return(i, v);
33263039946SMark Rutland #else
3331815da17SMark Rutland return raw_atomic_sub_return(i, v);
33463039946SMark Rutland #endif
335e3d18ceeSMark Rutland }
336e3d18ceeSMark Rutland
337ad811070SMark Rutland /**
338ad811070SMark Rutland * raw_atomic_long_sub_return_acquire() - atomic subtract with acquire ordering
339ad811070SMark Rutland * @i: long value to subtract
340ad811070SMark Rutland * @v: pointer to atomic_long_t
341ad811070SMark Rutland *
342ad811070SMark Rutland * Atomically updates @v to (@v - @i) with acquire ordering.
343ad811070SMark Rutland *
344ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_sub_return_acquire() elsewhere.
345ad811070SMark Rutland *
346ad811070SMark Rutland * Return: The updated value of @v.
347ad811070SMark Rutland */
348e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_sub_return_acquire(long i,atomic_long_t * v)3491815da17SMark Rutland raw_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
350e3d18ceeSMark Rutland {
35163039946SMark Rutland #ifdef CONFIG_64BIT
35263039946SMark Rutland return raw_atomic64_sub_return_acquire(i, v);
35363039946SMark Rutland #else
3541815da17SMark Rutland return raw_atomic_sub_return_acquire(i, v);
35563039946SMark Rutland #endif
356e3d18ceeSMark Rutland }
357e3d18ceeSMark Rutland
358ad811070SMark Rutland /**
359ad811070SMark Rutland * raw_atomic_long_sub_return_release() - atomic subtract with release ordering
360ad811070SMark Rutland * @i: long value to subtract
361ad811070SMark Rutland * @v: pointer to atomic_long_t
362ad811070SMark Rutland *
363ad811070SMark Rutland * Atomically updates @v to (@v - @i) with release ordering.
364ad811070SMark Rutland *
365ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_sub_return_release() elsewhere.
366ad811070SMark Rutland *
367ad811070SMark Rutland * Return: The updated value of @v.
368ad811070SMark Rutland */
369e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_sub_return_release(long i,atomic_long_t * v)3701815da17SMark Rutland raw_atomic_long_sub_return_release(long i, atomic_long_t *v)
371e3d18ceeSMark Rutland {
37263039946SMark Rutland #ifdef CONFIG_64BIT
37363039946SMark Rutland return raw_atomic64_sub_return_release(i, v);
37463039946SMark Rutland #else
3751815da17SMark Rutland return raw_atomic_sub_return_release(i, v);
37663039946SMark Rutland #endif
377e3d18ceeSMark Rutland }
378e3d18ceeSMark Rutland
379ad811070SMark Rutland /**
380ad811070SMark Rutland * raw_atomic_long_sub_return_relaxed() - atomic subtract with relaxed ordering
381ad811070SMark Rutland * @i: long value to subtract
382ad811070SMark Rutland * @v: pointer to atomic_long_t
383ad811070SMark Rutland *
384ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
385ad811070SMark Rutland *
386ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_sub_return_relaxed() elsewhere.
387ad811070SMark Rutland *
388ad811070SMark Rutland * Return: The updated value of @v.
389ad811070SMark Rutland */
390e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_sub_return_relaxed(long i,atomic_long_t * v)3911815da17SMark Rutland raw_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
392e3d18ceeSMark Rutland {
39363039946SMark Rutland #ifdef CONFIG_64BIT
39463039946SMark Rutland return raw_atomic64_sub_return_relaxed(i, v);
39563039946SMark Rutland #else
3961815da17SMark Rutland return raw_atomic_sub_return_relaxed(i, v);
39763039946SMark Rutland #endif
398e3d18ceeSMark Rutland }
399e3d18ceeSMark Rutland
400ad811070SMark Rutland /**
401ad811070SMark Rutland * raw_atomic_long_fetch_sub() - atomic subtract with full ordering
402ad811070SMark Rutland * @i: long value to subtract
403ad811070SMark Rutland * @v: pointer to atomic_long_t
404ad811070SMark Rutland *
405ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
406ad811070SMark Rutland *
407ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_sub() elsewhere.
408ad811070SMark Rutland *
409ad811070SMark Rutland * Return: The original value of @v.
410ad811070SMark Rutland */
411e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_sub(long i,atomic_long_t * v)4121815da17SMark Rutland raw_atomic_long_fetch_sub(long i, atomic_long_t *v)
413e3d18ceeSMark Rutland {
41463039946SMark Rutland #ifdef CONFIG_64BIT
41563039946SMark Rutland return raw_atomic64_fetch_sub(i, v);
41663039946SMark Rutland #else
4171815da17SMark Rutland return raw_atomic_fetch_sub(i, v);
41863039946SMark Rutland #endif
419e3d18ceeSMark Rutland }
420e3d18ceeSMark Rutland
421ad811070SMark Rutland /**
422ad811070SMark Rutland * raw_atomic_long_fetch_sub_acquire() - atomic subtract with acquire ordering
423ad811070SMark Rutland * @i: long value to subtract
424ad811070SMark Rutland * @v: pointer to atomic_long_t
425ad811070SMark Rutland *
426ad811070SMark Rutland * Atomically updates @v to (@v - @i) with acquire ordering.
427ad811070SMark Rutland *
428ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_sub_acquire() elsewhere.
429ad811070SMark Rutland *
430ad811070SMark Rutland * Return: The original value of @v.
431ad811070SMark Rutland */
432e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_sub_acquire(long i,atomic_long_t * v)4331815da17SMark Rutland raw_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
434e3d18ceeSMark Rutland {
43563039946SMark Rutland #ifdef CONFIG_64BIT
43663039946SMark Rutland return raw_atomic64_fetch_sub_acquire(i, v);
43763039946SMark Rutland #else
4381815da17SMark Rutland return raw_atomic_fetch_sub_acquire(i, v);
43963039946SMark Rutland #endif
440e3d18ceeSMark Rutland }
441e3d18ceeSMark Rutland
442ad811070SMark Rutland /**
443ad811070SMark Rutland * raw_atomic_long_fetch_sub_release() - atomic subtract with release ordering
444ad811070SMark Rutland * @i: long value to subtract
445ad811070SMark Rutland * @v: pointer to atomic_long_t
446ad811070SMark Rutland *
447ad811070SMark Rutland * Atomically updates @v to (@v - @i) with release ordering.
448ad811070SMark Rutland *
449ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_sub_release() elsewhere.
450ad811070SMark Rutland *
451ad811070SMark Rutland * Return: The original value of @v.
452ad811070SMark Rutland */
453e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_sub_release(long i,atomic_long_t * v)4541815da17SMark Rutland raw_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
455e3d18ceeSMark Rutland {
45663039946SMark Rutland #ifdef CONFIG_64BIT
45763039946SMark Rutland return raw_atomic64_fetch_sub_release(i, v);
45863039946SMark Rutland #else
4591815da17SMark Rutland return raw_atomic_fetch_sub_release(i, v);
46063039946SMark Rutland #endif
461e3d18ceeSMark Rutland }
462e3d18ceeSMark Rutland
463ad811070SMark Rutland /**
464ad811070SMark Rutland * raw_atomic_long_fetch_sub_relaxed() - atomic subtract with relaxed ordering
465ad811070SMark Rutland * @i: long value to subtract
466ad811070SMark Rutland * @v: pointer to atomic_long_t
467ad811070SMark Rutland *
468ad811070SMark Rutland * Atomically updates @v to (@v - @i) with relaxed ordering.
469ad811070SMark Rutland *
470ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_sub_relaxed() elsewhere.
471ad811070SMark Rutland *
472ad811070SMark Rutland * Return: The original value of @v.
473ad811070SMark Rutland */
474e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_sub_relaxed(long i,atomic_long_t * v)4751815da17SMark Rutland raw_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
476e3d18ceeSMark Rutland {
47763039946SMark Rutland #ifdef CONFIG_64BIT
47863039946SMark Rutland return raw_atomic64_fetch_sub_relaxed(i, v);
47963039946SMark Rutland #else
4801815da17SMark Rutland return raw_atomic_fetch_sub_relaxed(i, v);
48163039946SMark Rutland #endif
482e3d18ceeSMark Rutland }
483e3d18ceeSMark Rutland
484ad811070SMark Rutland /**
485ad811070SMark Rutland * raw_atomic_long_inc() - atomic increment with relaxed ordering
486ad811070SMark Rutland * @v: pointer to atomic_long_t
487ad811070SMark Rutland *
488ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
489ad811070SMark Rutland *
490ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_inc() elsewhere.
491ad811070SMark Rutland *
492ad811070SMark Rutland * Return: Nothing.
493ad811070SMark Rutland */
494e3d18ceeSMark Rutland static __always_inline void
raw_atomic_long_inc(atomic_long_t * v)4951815da17SMark Rutland raw_atomic_long_inc(atomic_long_t *v)
496e3d18ceeSMark Rutland {
49763039946SMark Rutland #ifdef CONFIG_64BIT
49863039946SMark Rutland raw_atomic64_inc(v);
49963039946SMark Rutland #else
5001815da17SMark Rutland raw_atomic_inc(v);
50163039946SMark Rutland #endif
502e3d18ceeSMark Rutland }
503e3d18ceeSMark Rutland
504ad811070SMark Rutland /**
505ad811070SMark Rutland * raw_atomic_long_inc_return() - atomic increment with full ordering
506ad811070SMark Rutland * @v: pointer to atomic_long_t
507ad811070SMark Rutland *
508ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
509ad811070SMark Rutland *
510ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_inc_return() elsewhere.
511ad811070SMark Rutland *
512ad811070SMark Rutland * Return: The updated value of @v.
513ad811070SMark Rutland */
514e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_inc_return(atomic_long_t * v)5151815da17SMark Rutland raw_atomic_long_inc_return(atomic_long_t *v)
516e3d18ceeSMark Rutland {
51763039946SMark Rutland #ifdef CONFIG_64BIT
51863039946SMark Rutland return raw_atomic64_inc_return(v);
51963039946SMark Rutland #else
5201815da17SMark Rutland return raw_atomic_inc_return(v);
52163039946SMark Rutland #endif
522e3d18ceeSMark Rutland }
523e3d18ceeSMark Rutland
524ad811070SMark Rutland /**
525ad811070SMark Rutland * raw_atomic_long_inc_return_acquire() - atomic increment with acquire ordering
526ad811070SMark Rutland * @v: pointer to atomic_long_t
527ad811070SMark Rutland *
528ad811070SMark Rutland * Atomically updates @v to (@v + 1) with acquire ordering.
529ad811070SMark Rutland *
530ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_inc_return_acquire() elsewhere.
531ad811070SMark Rutland *
532ad811070SMark Rutland * Return: The updated value of @v.
533ad811070SMark Rutland */
534e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_inc_return_acquire(atomic_long_t * v)5351815da17SMark Rutland raw_atomic_long_inc_return_acquire(atomic_long_t *v)
536e3d18ceeSMark Rutland {
53763039946SMark Rutland #ifdef CONFIG_64BIT
53863039946SMark Rutland return raw_atomic64_inc_return_acquire(v);
53963039946SMark Rutland #else
5401815da17SMark Rutland return raw_atomic_inc_return_acquire(v);
54163039946SMark Rutland #endif
542e3d18ceeSMark Rutland }
543e3d18ceeSMark Rutland
544ad811070SMark Rutland /**
545ad811070SMark Rutland * raw_atomic_long_inc_return_release() - atomic increment with release ordering
546ad811070SMark Rutland * @v: pointer to atomic_long_t
547ad811070SMark Rutland *
548ad811070SMark Rutland * Atomically updates @v to (@v + 1) with release ordering.
549ad811070SMark Rutland *
550ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_inc_return_release() elsewhere.
551ad811070SMark Rutland *
552ad811070SMark Rutland * Return: The updated value of @v.
553ad811070SMark Rutland */
554e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_inc_return_release(atomic_long_t * v)5551815da17SMark Rutland raw_atomic_long_inc_return_release(atomic_long_t *v)
556e3d18ceeSMark Rutland {
55763039946SMark Rutland #ifdef CONFIG_64BIT
55863039946SMark Rutland return raw_atomic64_inc_return_release(v);
55963039946SMark Rutland #else
5601815da17SMark Rutland return raw_atomic_inc_return_release(v);
56163039946SMark Rutland #endif
562e3d18ceeSMark Rutland }
563e3d18ceeSMark Rutland
564ad811070SMark Rutland /**
565ad811070SMark Rutland * raw_atomic_long_inc_return_relaxed() - atomic increment with relaxed ordering
566ad811070SMark Rutland * @v: pointer to atomic_long_t
567ad811070SMark Rutland *
568ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
569ad811070SMark Rutland *
570ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_inc_return_relaxed() elsewhere.
571ad811070SMark Rutland *
572ad811070SMark Rutland * Return: The updated value of @v.
573ad811070SMark Rutland */
574e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_inc_return_relaxed(atomic_long_t * v)5751815da17SMark Rutland raw_atomic_long_inc_return_relaxed(atomic_long_t *v)
576e3d18ceeSMark Rutland {
57763039946SMark Rutland #ifdef CONFIG_64BIT
57863039946SMark Rutland return raw_atomic64_inc_return_relaxed(v);
57963039946SMark Rutland #else
5801815da17SMark Rutland return raw_atomic_inc_return_relaxed(v);
58163039946SMark Rutland #endif
582e3d18ceeSMark Rutland }
583e3d18ceeSMark Rutland
584ad811070SMark Rutland /**
585ad811070SMark Rutland * raw_atomic_long_fetch_inc() - atomic increment with full ordering
586ad811070SMark Rutland * @v: pointer to atomic_long_t
587ad811070SMark Rutland *
588ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
589ad811070SMark Rutland *
590ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_inc() elsewhere.
591ad811070SMark Rutland *
592ad811070SMark Rutland * Return: The original value of @v.
593ad811070SMark Rutland */
594e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_inc(atomic_long_t * v)5951815da17SMark Rutland raw_atomic_long_fetch_inc(atomic_long_t *v)
596e3d18ceeSMark Rutland {
59763039946SMark Rutland #ifdef CONFIG_64BIT
59863039946SMark Rutland return raw_atomic64_fetch_inc(v);
59963039946SMark Rutland #else
6001815da17SMark Rutland return raw_atomic_fetch_inc(v);
60163039946SMark Rutland #endif
602e3d18ceeSMark Rutland }
603e3d18ceeSMark Rutland
604ad811070SMark Rutland /**
605ad811070SMark Rutland * raw_atomic_long_fetch_inc_acquire() - atomic increment with acquire ordering
606ad811070SMark Rutland * @v: pointer to atomic_long_t
607ad811070SMark Rutland *
608ad811070SMark Rutland * Atomically updates @v to (@v + 1) with acquire ordering.
609ad811070SMark Rutland *
610ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_inc_acquire() elsewhere.
611ad811070SMark Rutland *
612ad811070SMark Rutland * Return: The original value of @v.
613ad811070SMark Rutland */
614e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_inc_acquire(atomic_long_t * v)6151815da17SMark Rutland raw_atomic_long_fetch_inc_acquire(atomic_long_t *v)
616e3d18ceeSMark Rutland {
61763039946SMark Rutland #ifdef CONFIG_64BIT
61863039946SMark Rutland return raw_atomic64_fetch_inc_acquire(v);
61963039946SMark Rutland #else
6201815da17SMark Rutland return raw_atomic_fetch_inc_acquire(v);
62163039946SMark Rutland #endif
622e3d18ceeSMark Rutland }
623e3d18ceeSMark Rutland
624ad811070SMark Rutland /**
625ad811070SMark Rutland * raw_atomic_long_fetch_inc_release() - atomic increment with release ordering
626ad811070SMark Rutland * @v: pointer to atomic_long_t
627ad811070SMark Rutland *
628ad811070SMark Rutland * Atomically updates @v to (@v + 1) with release ordering.
629ad811070SMark Rutland *
630ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_inc_release() elsewhere.
631ad811070SMark Rutland *
632ad811070SMark Rutland * Return: The original value of @v.
633ad811070SMark Rutland */
634e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_inc_release(atomic_long_t * v)6351815da17SMark Rutland raw_atomic_long_fetch_inc_release(atomic_long_t *v)
636e3d18ceeSMark Rutland {
63763039946SMark Rutland #ifdef CONFIG_64BIT
63863039946SMark Rutland return raw_atomic64_fetch_inc_release(v);
63963039946SMark Rutland #else
6401815da17SMark Rutland return raw_atomic_fetch_inc_release(v);
64163039946SMark Rutland #endif
642e3d18ceeSMark Rutland }
643e3d18ceeSMark Rutland
644ad811070SMark Rutland /**
645ad811070SMark Rutland * raw_atomic_long_fetch_inc_relaxed() - atomic increment with relaxed ordering
646ad811070SMark Rutland * @v: pointer to atomic_long_t
647ad811070SMark Rutland *
648ad811070SMark Rutland * Atomically updates @v to (@v + 1) with relaxed ordering.
649ad811070SMark Rutland *
650ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_inc_relaxed() elsewhere.
651ad811070SMark Rutland *
652ad811070SMark Rutland * Return: The original value of @v.
653ad811070SMark Rutland */
654e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_inc_relaxed(atomic_long_t * v)6551815da17SMark Rutland raw_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
656e3d18ceeSMark Rutland {
65763039946SMark Rutland #ifdef CONFIG_64BIT
65863039946SMark Rutland return raw_atomic64_fetch_inc_relaxed(v);
65963039946SMark Rutland #else
6601815da17SMark Rutland return raw_atomic_fetch_inc_relaxed(v);
66163039946SMark Rutland #endif
662e3d18ceeSMark Rutland }
663e3d18ceeSMark Rutland
664ad811070SMark Rutland /**
665ad811070SMark Rutland * raw_atomic_long_dec() - atomic decrement with relaxed ordering
666ad811070SMark Rutland * @v: pointer to atomic_long_t
667ad811070SMark Rutland *
668ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
669ad811070SMark Rutland *
670ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_dec() elsewhere.
671ad811070SMark Rutland *
672ad811070SMark Rutland * Return: Nothing.
673ad811070SMark Rutland */
674e3d18ceeSMark Rutland static __always_inline void
raw_atomic_long_dec(atomic_long_t * v)6751815da17SMark Rutland raw_atomic_long_dec(atomic_long_t *v)
676e3d18ceeSMark Rutland {
67763039946SMark Rutland #ifdef CONFIG_64BIT
67863039946SMark Rutland raw_atomic64_dec(v);
67963039946SMark Rutland #else
6801815da17SMark Rutland raw_atomic_dec(v);
68163039946SMark Rutland #endif
682e3d18ceeSMark Rutland }
683e3d18ceeSMark Rutland
684ad811070SMark Rutland /**
685ad811070SMark Rutland * raw_atomic_long_dec_return() - atomic decrement with full ordering
686ad811070SMark Rutland * @v: pointer to atomic_long_t
687ad811070SMark Rutland *
688ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
689ad811070SMark Rutland *
690ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_dec_return() elsewhere.
691ad811070SMark Rutland *
692ad811070SMark Rutland * Return: The updated value of @v.
693ad811070SMark Rutland */
694e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_dec_return(atomic_long_t * v)6951815da17SMark Rutland raw_atomic_long_dec_return(atomic_long_t *v)
696e3d18ceeSMark Rutland {
69763039946SMark Rutland #ifdef CONFIG_64BIT
69863039946SMark Rutland return raw_atomic64_dec_return(v);
69963039946SMark Rutland #else
7001815da17SMark Rutland return raw_atomic_dec_return(v);
70163039946SMark Rutland #endif
702e3d18ceeSMark Rutland }
703e3d18ceeSMark Rutland
704ad811070SMark Rutland /**
705ad811070SMark Rutland * raw_atomic_long_dec_return_acquire() - atomic decrement with acquire ordering
706ad811070SMark Rutland * @v: pointer to atomic_long_t
707ad811070SMark Rutland *
708ad811070SMark Rutland * Atomically updates @v to (@v - 1) with acquire ordering.
709ad811070SMark Rutland *
710ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_dec_return_acquire() elsewhere.
711ad811070SMark Rutland *
712ad811070SMark Rutland * Return: The updated value of @v.
713ad811070SMark Rutland */
714e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_dec_return_acquire(atomic_long_t * v)7151815da17SMark Rutland raw_atomic_long_dec_return_acquire(atomic_long_t *v)
716e3d18ceeSMark Rutland {
71763039946SMark Rutland #ifdef CONFIG_64BIT
71863039946SMark Rutland return raw_atomic64_dec_return_acquire(v);
71963039946SMark Rutland #else
7201815da17SMark Rutland return raw_atomic_dec_return_acquire(v);
72163039946SMark Rutland #endif
722e3d18ceeSMark Rutland }
723e3d18ceeSMark Rutland
724ad811070SMark Rutland /**
725ad811070SMark Rutland * raw_atomic_long_dec_return_release() - atomic decrement with release ordering
726ad811070SMark Rutland * @v: pointer to atomic_long_t
727ad811070SMark Rutland *
728ad811070SMark Rutland * Atomically updates @v to (@v - 1) with release ordering.
729ad811070SMark Rutland *
730ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_dec_return_release() elsewhere.
731ad811070SMark Rutland *
732ad811070SMark Rutland * Return: The updated value of @v.
733ad811070SMark Rutland */
734e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_dec_return_release(atomic_long_t * v)7351815da17SMark Rutland raw_atomic_long_dec_return_release(atomic_long_t *v)
736e3d18ceeSMark Rutland {
73763039946SMark Rutland #ifdef CONFIG_64BIT
73863039946SMark Rutland return raw_atomic64_dec_return_release(v);
73963039946SMark Rutland #else
7401815da17SMark Rutland return raw_atomic_dec_return_release(v);
74163039946SMark Rutland #endif
742e3d18ceeSMark Rutland }
743e3d18ceeSMark Rutland
744ad811070SMark Rutland /**
745ad811070SMark Rutland * raw_atomic_long_dec_return_relaxed() - atomic decrement with relaxed ordering
746ad811070SMark Rutland * @v: pointer to atomic_long_t
747ad811070SMark Rutland *
748ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
749ad811070SMark Rutland *
750ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_dec_return_relaxed() elsewhere.
751ad811070SMark Rutland *
752ad811070SMark Rutland * Return: The updated value of @v.
753ad811070SMark Rutland */
754e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_dec_return_relaxed(atomic_long_t * v)7551815da17SMark Rutland raw_atomic_long_dec_return_relaxed(atomic_long_t *v)
756e3d18ceeSMark Rutland {
75763039946SMark Rutland #ifdef CONFIG_64BIT
75863039946SMark Rutland return raw_atomic64_dec_return_relaxed(v);
75963039946SMark Rutland #else
7601815da17SMark Rutland return raw_atomic_dec_return_relaxed(v);
76163039946SMark Rutland #endif
762e3d18ceeSMark Rutland }
763e3d18ceeSMark Rutland
764ad811070SMark Rutland /**
765ad811070SMark Rutland * raw_atomic_long_fetch_dec() - atomic decrement with full ordering
766ad811070SMark Rutland * @v: pointer to atomic_long_t
767ad811070SMark Rutland *
768ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
769ad811070SMark Rutland *
770ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_dec() elsewhere.
771ad811070SMark Rutland *
772ad811070SMark Rutland * Return: The original value of @v.
773ad811070SMark Rutland */
774e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_dec(atomic_long_t * v)7751815da17SMark Rutland raw_atomic_long_fetch_dec(atomic_long_t *v)
776e3d18ceeSMark Rutland {
77763039946SMark Rutland #ifdef CONFIG_64BIT
77863039946SMark Rutland return raw_atomic64_fetch_dec(v);
77963039946SMark Rutland #else
7801815da17SMark Rutland return raw_atomic_fetch_dec(v);
78163039946SMark Rutland #endif
782e3d18ceeSMark Rutland }
783e3d18ceeSMark Rutland
784ad811070SMark Rutland /**
785ad811070SMark Rutland * raw_atomic_long_fetch_dec_acquire() - atomic decrement with acquire ordering
786ad811070SMark Rutland * @v: pointer to atomic_long_t
787ad811070SMark Rutland *
788ad811070SMark Rutland * Atomically updates @v to (@v - 1) with acquire ordering.
789ad811070SMark Rutland *
790ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_dec_acquire() elsewhere.
791ad811070SMark Rutland *
792ad811070SMark Rutland * Return: The original value of @v.
793ad811070SMark Rutland */
794e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_dec_acquire(atomic_long_t * v)7951815da17SMark Rutland raw_atomic_long_fetch_dec_acquire(atomic_long_t *v)
796e3d18ceeSMark Rutland {
79763039946SMark Rutland #ifdef CONFIG_64BIT
79863039946SMark Rutland return raw_atomic64_fetch_dec_acquire(v);
79963039946SMark Rutland #else
8001815da17SMark Rutland return raw_atomic_fetch_dec_acquire(v);
80163039946SMark Rutland #endif
802e3d18ceeSMark Rutland }
803e3d18ceeSMark Rutland
804ad811070SMark Rutland /**
805ad811070SMark Rutland * raw_atomic_long_fetch_dec_release() - atomic decrement with release ordering
806ad811070SMark Rutland * @v: pointer to atomic_long_t
807ad811070SMark Rutland *
808ad811070SMark Rutland * Atomically updates @v to (@v - 1) with release ordering.
809ad811070SMark Rutland *
810ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_dec_release() elsewhere.
811ad811070SMark Rutland *
812ad811070SMark Rutland * Return: The original value of @v.
813ad811070SMark Rutland */
814e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_dec_release(atomic_long_t * v)8151815da17SMark Rutland raw_atomic_long_fetch_dec_release(atomic_long_t *v)
816e3d18ceeSMark Rutland {
81763039946SMark Rutland #ifdef CONFIG_64BIT
81863039946SMark Rutland return raw_atomic64_fetch_dec_release(v);
81963039946SMark Rutland #else
8201815da17SMark Rutland return raw_atomic_fetch_dec_release(v);
82163039946SMark Rutland #endif
822e3d18ceeSMark Rutland }
823e3d18ceeSMark Rutland
824ad811070SMark Rutland /**
825ad811070SMark Rutland * raw_atomic_long_fetch_dec_relaxed() - atomic decrement with relaxed ordering
826ad811070SMark Rutland * @v: pointer to atomic_long_t
827ad811070SMark Rutland *
828ad811070SMark Rutland * Atomically updates @v to (@v - 1) with relaxed ordering.
829ad811070SMark Rutland *
830ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_dec_relaxed() elsewhere.
831ad811070SMark Rutland *
832ad811070SMark Rutland * Return: The original value of @v.
833ad811070SMark Rutland */
834e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_dec_relaxed(atomic_long_t * v)8351815da17SMark Rutland raw_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
836e3d18ceeSMark Rutland {
83763039946SMark Rutland #ifdef CONFIG_64BIT
83863039946SMark Rutland return raw_atomic64_fetch_dec_relaxed(v);
83963039946SMark Rutland #else
8401815da17SMark Rutland return raw_atomic_fetch_dec_relaxed(v);
84163039946SMark Rutland #endif
842e3d18ceeSMark Rutland }
843e3d18ceeSMark Rutland
844ad811070SMark Rutland /**
845ad811070SMark Rutland * raw_atomic_long_and() - atomic bitwise AND with relaxed ordering
846ad811070SMark Rutland * @i: long value
847ad811070SMark Rutland * @v: pointer to atomic_long_t
848ad811070SMark Rutland *
849ad811070SMark Rutland * Atomically updates @v to (@v & @i) with relaxed ordering.
850ad811070SMark Rutland *
851ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_and() elsewhere.
852ad811070SMark Rutland *
853ad811070SMark Rutland * Return: Nothing.
854ad811070SMark Rutland */
855e3d18ceeSMark Rutland static __always_inline void
raw_atomic_long_and(long i,atomic_long_t * v)8561815da17SMark Rutland raw_atomic_long_and(long i, atomic_long_t *v)
857e3d18ceeSMark Rutland {
85863039946SMark Rutland #ifdef CONFIG_64BIT
85963039946SMark Rutland raw_atomic64_and(i, v);
86063039946SMark Rutland #else
8611815da17SMark Rutland raw_atomic_and(i, v);
86263039946SMark Rutland #endif
863e3d18ceeSMark Rutland }
864e3d18ceeSMark Rutland
865ad811070SMark Rutland /**
866ad811070SMark Rutland * raw_atomic_long_fetch_and() - atomic bitwise AND with full ordering
867ad811070SMark Rutland * @i: long value
868ad811070SMark Rutland * @v: pointer to atomic_long_t
869ad811070SMark Rutland *
870ad811070SMark Rutland * Atomically updates @v to (@v & @i) with full ordering.
871ad811070SMark Rutland *
872ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_and() elsewhere.
873ad811070SMark Rutland *
874ad811070SMark Rutland * Return: The original value of @v.
875ad811070SMark Rutland */
876e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_and(long i,atomic_long_t * v)8771815da17SMark Rutland raw_atomic_long_fetch_and(long i, atomic_long_t *v)
878e3d18ceeSMark Rutland {
87963039946SMark Rutland #ifdef CONFIG_64BIT
88063039946SMark Rutland return raw_atomic64_fetch_and(i, v);
88163039946SMark Rutland #else
8821815da17SMark Rutland return raw_atomic_fetch_and(i, v);
88363039946SMark Rutland #endif
884e3d18ceeSMark Rutland }
885e3d18ceeSMark Rutland
886ad811070SMark Rutland /**
887ad811070SMark Rutland * raw_atomic_long_fetch_and_acquire() - atomic bitwise AND with acquire ordering
888ad811070SMark Rutland * @i: long value
889ad811070SMark Rutland * @v: pointer to atomic_long_t
890ad811070SMark Rutland *
891ad811070SMark Rutland * Atomically updates @v to (@v & @i) with acquire ordering.
892ad811070SMark Rutland *
893ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_and_acquire() elsewhere.
894ad811070SMark Rutland *
895ad811070SMark Rutland * Return: The original value of @v.
896ad811070SMark Rutland */
897e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_and_acquire(long i,atomic_long_t * v)8981815da17SMark Rutland raw_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
899e3d18ceeSMark Rutland {
90063039946SMark Rutland #ifdef CONFIG_64BIT
90163039946SMark Rutland return raw_atomic64_fetch_and_acquire(i, v);
90263039946SMark Rutland #else
9031815da17SMark Rutland return raw_atomic_fetch_and_acquire(i, v);
90463039946SMark Rutland #endif
905e3d18ceeSMark Rutland }
906e3d18ceeSMark Rutland
907ad811070SMark Rutland /**
908ad811070SMark Rutland * raw_atomic_long_fetch_and_release() - atomic bitwise AND with release ordering
909ad811070SMark Rutland * @i: long value
910ad811070SMark Rutland * @v: pointer to atomic_long_t
911ad811070SMark Rutland *
912ad811070SMark Rutland * Atomically updates @v to (@v & @i) with release ordering.
913ad811070SMark Rutland *
914ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_and_release() elsewhere.
915ad811070SMark Rutland *
916ad811070SMark Rutland * Return: The original value of @v.
917ad811070SMark Rutland */
918e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_and_release(long i,atomic_long_t * v)9191815da17SMark Rutland raw_atomic_long_fetch_and_release(long i, atomic_long_t *v)
920e3d18ceeSMark Rutland {
92163039946SMark Rutland #ifdef CONFIG_64BIT
92263039946SMark Rutland return raw_atomic64_fetch_and_release(i, v);
92363039946SMark Rutland #else
9241815da17SMark Rutland return raw_atomic_fetch_and_release(i, v);
92563039946SMark Rutland #endif
926e3d18ceeSMark Rutland }
927e3d18ceeSMark Rutland
928ad811070SMark Rutland /**
929ad811070SMark Rutland * raw_atomic_long_fetch_and_relaxed() - atomic bitwise AND with relaxed ordering
930ad811070SMark Rutland * @i: long value
931ad811070SMark Rutland * @v: pointer to atomic_long_t
932ad811070SMark Rutland *
933ad811070SMark Rutland * Atomically updates @v to (@v & @i) with relaxed ordering.
934ad811070SMark Rutland *
935ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_and_relaxed() elsewhere.
936ad811070SMark Rutland *
937ad811070SMark Rutland * Return: The original value of @v.
938ad811070SMark Rutland */
939e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_and_relaxed(long i,atomic_long_t * v)9401815da17SMark Rutland raw_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
941e3d18ceeSMark Rutland {
94263039946SMark Rutland #ifdef CONFIG_64BIT
94363039946SMark Rutland return raw_atomic64_fetch_and_relaxed(i, v);
94463039946SMark Rutland #else
9451815da17SMark Rutland return raw_atomic_fetch_and_relaxed(i, v);
94663039946SMark Rutland #endif
947e3d18ceeSMark Rutland }
948e3d18ceeSMark Rutland
949ad811070SMark Rutland /**
950ad811070SMark Rutland * raw_atomic_long_andnot() - atomic bitwise AND NOT with relaxed ordering
951ad811070SMark Rutland * @i: long value
952ad811070SMark Rutland * @v: pointer to atomic_long_t
953ad811070SMark Rutland *
954ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with relaxed ordering.
955ad811070SMark Rutland *
956ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_andnot() elsewhere.
957ad811070SMark Rutland *
958ad811070SMark Rutland * Return: Nothing.
959ad811070SMark Rutland */
960e3d18ceeSMark Rutland static __always_inline void
raw_atomic_long_andnot(long i,atomic_long_t * v)9611815da17SMark Rutland raw_atomic_long_andnot(long i, atomic_long_t *v)
962e3d18ceeSMark Rutland {
96363039946SMark Rutland #ifdef CONFIG_64BIT
96463039946SMark Rutland raw_atomic64_andnot(i, v);
96563039946SMark Rutland #else
9661815da17SMark Rutland raw_atomic_andnot(i, v);
96763039946SMark Rutland #endif
968e3d18ceeSMark Rutland }
969e3d18ceeSMark Rutland
970ad811070SMark Rutland /**
971ad811070SMark Rutland * raw_atomic_long_fetch_andnot() - atomic bitwise AND NOT with full ordering
972ad811070SMark Rutland * @i: long value
973ad811070SMark Rutland * @v: pointer to atomic_long_t
974ad811070SMark Rutland *
975ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with full ordering.
976ad811070SMark Rutland *
977ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_andnot() elsewhere.
978ad811070SMark Rutland *
979ad811070SMark Rutland * Return: The original value of @v.
980ad811070SMark Rutland */
981e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_andnot(long i,atomic_long_t * v)9821815da17SMark Rutland raw_atomic_long_fetch_andnot(long i, atomic_long_t *v)
983e3d18ceeSMark Rutland {
98463039946SMark Rutland #ifdef CONFIG_64BIT
98563039946SMark Rutland return raw_atomic64_fetch_andnot(i, v);
98663039946SMark Rutland #else
9871815da17SMark Rutland return raw_atomic_fetch_andnot(i, v);
98863039946SMark Rutland #endif
989e3d18ceeSMark Rutland }
990e3d18ceeSMark Rutland
991ad811070SMark Rutland /**
992ad811070SMark Rutland * raw_atomic_long_fetch_andnot_acquire() - atomic bitwise AND NOT with acquire ordering
993ad811070SMark Rutland * @i: long value
994ad811070SMark Rutland * @v: pointer to atomic_long_t
995ad811070SMark Rutland *
996ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with acquire ordering.
997ad811070SMark Rutland *
998ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_acquire() elsewhere.
999ad811070SMark Rutland *
1000ad811070SMark Rutland * Return: The original value of @v.
1001ad811070SMark Rutland */
1002e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_andnot_acquire(long i,atomic_long_t * v)10031815da17SMark Rutland raw_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
1004e3d18ceeSMark Rutland {
100563039946SMark Rutland #ifdef CONFIG_64BIT
100663039946SMark Rutland return raw_atomic64_fetch_andnot_acquire(i, v);
100763039946SMark Rutland #else
10081815da17SMark Rutland return raw_atomic_fetch_andnot_acquire(i, v);
100963039946SMark Rutland #endif
1010e3d18ceeSMark Rutland }
1011e3d18ceeSMark Rutland
1012ad811070SMark Rutland /**
1013ad811070SMark Rutland * raw_atomic_long_fetch_andnot_release() - atomic bitwise AND NOT with release ordering
1014ad811070SMark Rutland * @i: long value
1015ad811070SMark Rutland * @v: pointer to atomic_long_t
1016ad811070SMark Rutland *
1017ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with release ordering.
1018ad811070SMark Rutland *
1019ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_release() elsewhere.
1020ad811070SMark Rutland *
1021ad811070SMark Rutland * Return: The original value of @v.
1022ad811070SMark Rutland */
1023e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_andnot_release(long i,atomic_long_t * v)10241815da17SMark Rutland raw_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
1025e3d18ceeSMark Rutland {
102663039946SMark Rutland #ifdef CONFIG_64BIT
102763039946SMark Rutland return raw_atomic64_fetch_andnot_release(i, v);
102863039946SMark Rutland #else
10291815da17SMark Rutland return raw_atomic_fetch_andnot_release(i, v);
103063039946SMark Rutland #endif
1031e3d18ceeSMark Rutland }
1032e3d18ceeSMark Rutland
1033ad811070SMark Rutland /**
1034ad811070SMark Rutland * raw_atomic_long_fetch_andnot_relaxed() - atomic bitwise AND NOT with relaxed ordering
1035ad811070SMark Rutland * @i: long value
1036ad811070SMark Rutland * @v: pointer to atomic_long_t
1037ad811070SMark Rutland *
1038ad811070SMark Rutland * Atomically updates @v to (@v & ~@i) with relaxed ordering.
1039ad811070SMark Rutland *
1040ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_andnot_relaxed() elsewhere.
1041ad811070SMark Rutland *
1042ad811070SMark Rutland * Return: The original value of @v.
1043ad811070SMark Rutland */
1044e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_andnot_relaxed(long i,atomic_long_t * v)10451815da17SMark Rutland raw_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
1046e3d18ceeSMark Rutland {
104763039946SMark Rutland #ifdef CONFIG_64BIT
104863039946SMark Rutland return raw_atomic64_fetch_andnot_relaxed(i, v);
104963039946SMark Rutland #else
10501815da17SMark Rutland return raw_atomic_fetch_andnot_relaxed(i, v);
105163039946SMark Rutland #endif
1052e3d18ceeSMark Rutland }
1053e3d18ceeSMark Rutland
1054ad811070SMark Rutland /**
1055ad811070SMark Rutland * raw_atomic_long_or() - atomic bitwise OR with relaxed ordering
1056ad811070SMark Rutland * @i: long value
1057ad811070SMark Rutland * @v: pointer to atomic_long_t
1058ad811070SMark Rutland *
1059ad811070SMark Rutland * Atomically updates @v to (@v | @i) with relaxed ordering.
1060ad811070SMark Rutland *
1061ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_or() elsewhere.
1062ad811070SMark Rutland *
1063ad811070SMark Rutland * Return: Nothing.
1064ad811070SMark Rutland */
1065e3d18ceeSMark Rutland static __always_inline void
raw_atomic_long_or(long i,atomic_long_t * v)10661815da17SMark Rutland raw_atomic_long_or(long i, atomic_long_t *v)
1067e3d18ceeSMark Rutland {
106863039946SMark Rutland #ifdef CONFIG_64BIT
106963039946SMark Rutland raw_atomic64_or(i, v);
107063039946SMark Rutland #else
10711815da17SMark Rutland raw_atomic_or(i, v);
107263039946SMark Rutland #endif
1073e3d18ceeSMark Rutland }
1074e3d18ceeSMark Rutland
1075ad811070SMark Rutland /**
1076ad811070SMark Rutland * raw_atomic_long_fetch_or() - atomic bitwise OR with full ordering
1077ad811070SMark Rutland * @i: long value
1078ad811070SMark Rutland * @v: pointer to atomic_long_t
1079ad811070SMark Rutland *
1080ad811070SMark Rutland * Atomically updates @v to (@v | @i) with full ordering.
1081ad811070SMark Rutland *
1082ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_or() elsewhere.
1083ad811070SMark Rutland *
1084ad811070SMark Rutland * Return: The original value of @v.
1085ad811070SMark Rutland */
1086e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_or(long i,atomic_long_t * v)10871815da17SMark Rutland raw_atomic_long_fetch_or(long i, atomic_long_t *v)
1088e3d18ceeSMark Rutland {
108963039946SMark Rutland #ifdef CONFIG_64BIT
109063039946SMark Rutland return raw_atomic64_fetch_or(i, v);
109163039946SMark Rutland #else
10921815da17SMark Rutland return raw_atomic_fetch_or(i, v);
109363039946SMark Rutland #endif
1094e3d18ceeSMark Rutland }
1095e3d18ceeSMark Rutland
1096ad811070SMark Rutland /**
1097ad811070SMark Rutland * raw_atomic_long_fetch_or_acquire() - atomic bitwise OR with acquire ordering
1098ad811070SMark Rutland * @i: long value
1099ad811070SMark Rutland * @v: pointer to atomic_long_t
1100ad811070SMark Rutland *
1101ad811070SMark Rutland * Atomically updates @v to (@v | @i) with acquire ordering.
1102ad811070SMark Rutland *
1103ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_or_acquire() elsewhere.
1104ad811070SMark Rutland *
1105ad811070SMark Rutland * Return: The original value of @v.
1106ad811070SMark Rutland */
1107e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_or_acquire(long i,atomic_long_t * v)11081815da17SMark Rutland raw_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
1109e3d18ceeSMark Rutland {
111063039946SMark Rutland #ifdef CONFIG_64BIT
111163039946SMark Rutland return raw_atomic64_fetch_or_acquire(i, v);
111263039946SMark Rutland #else
11131815da17SMark Rutland return raw_atomic_fetch_or_acquire(i, v);
111463039946SMark Rutland #endif
1115e3d18ceeSMark Rutland }
1116e3d18ceeSMark Rutland
1117ad811070SMark Rutland /**
1118ad811070SMark Rutland * raw_atomic_long_fetch_or_release() - atomic bitwise OR with release ordering
1119ad811070SMark Rutland * @i: long value
1120ad811070SMark Rutland * @v: pointer to atomic_long_t
1121ad811070SMark Rutland *
1122ad811070SMark Rutland * Atomically updates @v to (@v | @i) with release ordering.
1123ad811070SMark Rutland *
1124ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_or_release() elsewhere.
1125ad811070SMark Rutland *
1126ad811070SMark Rutland * Return: The original value of @v.
1127ad811070SMark Rutland */
1128e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_or_release(long i,atomic_long_t * v)11291815da17SMark Rutland raw_atomic_long_fetch_or_release(long i, atomic_long_t *v)
1130e3d18ceeSMark Rutland {
113163039946SMark Rutland #ifdef CONFIG_64BIT
113263039946SMark Rutland return raw_atomic64_fetch_or_release(i, v);
113363039946SMark Rutland #else
11341815da17SMark Rutland return raw_atomic_fetch_or_release(i, v);
113563039946SMark Rutland #endif
1136e3d18ceeSMark Rutland }
1137e3d18ceeSMark Rutland
1138ad811070SMark Rutland /**
1139ad811070SMark Rutland * raw_atomic_long_fetch_or_relaxed() - atomic bitwise OR with relaxed ordering
1140ad811070SMark Rutland * @i: long value
1141ad811070SMark Rutland * @v: pointer to atomic_long_t
1142ad811070SMark Rutland *
1143ad811070SMark Rutland * Atomically updates @v to (@v | @i) with relaxed ordering.
1144ad811070SMark Rutland *
1145ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_or_relaxed() elsewhere.
1146ad811070SMark Rutland *
1147ad811070SMark Rutland * Return: The original value of @v.
1148ad811070SMark Rutland */
1149e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_or_relaxed(long i,atomic_long_t * v)11501815da17SMark Rutland raw_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
1151e3d18ceeSMark Rutland {
115263039946SMark Rutland #ifdef CONFIG_64BIT
115363039946SMark Rutland return raw_atomic64_fetch_or_relaxed(i, v);
115463039946SMark Rutland #else
11551815da17SMark Rutland return raw_atomic_fetch_or_relaxed(i, v);
115663039946SMark Rutland #endif
1157e3d18ceeSMark Rutland }
1158e3d18ceeSMark Rutland
1159ad811070SMark Rutland /**
1160ad811070SMark Rutland * raw_atomic_long_xor() - atomic bitwise XOR with relaxed ordering
1161ad811070SMark Rutland * @i: long value
1162ad811070SMark Rutland * @v: pointer to atomic_long_t
1163ad811070SMark Rutland *
1164ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1165ad811070SMark Rutland *
1166ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_xor() elsewhere.
1167ad811070SMark Rutland *
1168ad811070SMark Rutland * Return: Nothing.
1169ad811070SMark Rutland */
1170e3d18ceeSMark Rutland static __always_inline void
raw_atomic_long_xor(long i,atomic_long_t * v)11711815da17SMark Rutland raw_atomic_long_xor(long i, atomic_long_t *v)
1172e3d18ceeSMark Rutland {
117363039946SMark Rutland #ifdef CONFIG_64BIT
117463039946SMark Rutland raw_atomic64_xor(i, v);
117563039946SMark Rutland #else
11761815da17SMark Rutland raw_atomic_xor(i, v);
117763039946SMark Rutland #endif
1178e3d18ceeSMark Rutland }
1179e3d18ceeSMark Rutland
1180ad811070SMark Rutland /**
1181ad811070SMark Rutland * raw_atomic_long_fetch_xor() - atomic bitwise XOR with full ordering
1182ad811070SMark Rutland * @i: long value
1183ad811070SMark Rutland * @v: pointer to atomic_long_t
1184ad811070SMark Rutland *
1185ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with full ordering.
1186ad811070SMark Rutland *
1187ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_xor() elsewhere.
1188ad811070SMark Rutland *
1189ad811070SMark Rutland * Return: The original value of @v.
1190ad811070SMark Rutland */
1191e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_xor(long i,atomic_long_t * v)11921815da17SMark Rutland raw_atomic_long_fetch_xor(long i, atomic_long_t *v)
1193e3d18ceeSMark Rutland {
119463039946SMark Rutland #ifdef CONFIG_64BIT
119563039946SMark Rutland return raw_atomic64_fetch_xor(i, v);
119663039946SMark Rutland #else
11971815da17SMark Rutland return raw_atomic_fetch_xor(i, v);
119863039946SMark Rutland #endif
1199e3d18ceeSMark Rutland }
1200e3d18ceeSMark Rutland
1201ad811070SMark Rutland /**
1202ad811070SMark Rutland * raw_atomic_long_fetch_xor_acquire() - atomic bitwise XOR with acquire ordering
1203ad811070SMark Rutland * @i: long value
1204ad811070SMark Rutland * @v: pointer to atomic_long_t
1205ad811070SMark Rutland *
1206ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with acquire ordering.
1207ad811070SMark Rutland *
1208ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_xor_acquire() elsewhere.
1209ad811070SMark Rutland *
1210ad811070SMark Rutland * Return: The original value of @v.
1211ad811070SMark Rutland */
1212e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_xor_acquire(long i,atomic_long_t * v)12131815da17SMark Rutland raw_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
1214e3d18ceeSMark Rutland {
121563039946SMark Rutland #ifdef CONFIG_64BIT
121663039946SMark Rutland return raw_atomic64_fetch_xor_acquire(i, v);
121763039946SMark Rutland #else
12181815da17SMark Rutland return raw_atomic_fetch_xor_acquire(i, v);
121963039946SMark Rutland #endif
1220e3d18ceeSMark Rutland }
1221e3d18ceeSMark Rutland
1222ad811070SMark Rutland /**
1223ad811070SMark Rutland * raw_atomic_long_fetch_xor_release() - atomic bitwise XOR with release ordering
1224ad811070SMark Rutland * @i: long value
1225ad811070SMark Rutland * @v: pointer to atomic_long_t
1226ad811070SMark Rutland *
1227ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with release ordering.
1228ad811070SMark Rutland *
1229ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_xor_release() elsewhere.
1230ad811070SMark Rutland *
1231ad811070SMark Rutland * Return: The original value of @v.
1232ad811070SMark Rutland */
1233e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_xor_release(long i,atomic_long_t * v)12341815da17SMark Rutland raw_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
1235e3d18ceeSMark Rutland {
123663039946SMark Rutland #ifdef CONFIG_64BIT
123763039946SMark Rutland return raw_atomic64_fetch_xor_release(i, v);
123863039946SMark Rutland #else
12391815da17SMark Rutland return raw_atomic_fetch_xor_release(i, v);
124063039946SMark Rutland #endif
1241e3d18ceeSMark Rutland }
1242e3d18ceeSMark Rutland
1243ad811070SMark Rutland /**
1244ad811070SMark Rutland * raw_atomic_long_fetch_xor_relaxed() - atomic bitwise XOR with relaxed ordering
1245ad811070SMark Rutland * @i: long value
1246ad811070SMark Rutland * @v: pointer to atomic_long_t
1247ad811070SMark Rutland *
1248ad811070SMark Rutland * Atomically updates @v to (@v ^ @i) with relaxed ordering.
1249ad811070SMark Rutland *
1250ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_xor_relaxed() elsewhere.
1251ad811070SMark Rutland *
1252ad811070SMark Rutland * Return: The original value of @v.
1253ad811070SMark Rutland */
1254e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_xor_relaxed(long i,atomic_long_t * v)12551815da17SMark Rutland raw_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
1256e3d18ceeSMark Rutland {
125763039946SMark Rutland #ifdef CONFIG_64BIT
125863039946SMark Rutland return raw_atomic64_fetch_xor_relaxed(i, v);
125963039946SMark Rutland #else
12601815da17SMark Rutland return raw_atomic_fetch_xor_relaxed(i, v);
126163039946SMark Rutland #endif
1262e3d18ceeSMark Rutland }
1263e3d18ceeSMark Rutland
1264ad811070SMark Rutland /**
1265ad811070SMark Rutland * raw_atomic_long_xchg() - atomic exchange with full ordering
1266ad811070SMark Rutland * @v: pointer to atomic_long_t
1267ad811070SMark Rutland * @new: long value to assign
1268ad811070SMark Rutland *
1269ad811070SMark Rutland * Atomically updates @v to @new with full ordering.
1270ad811070SMark Rutland *
1271ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_xchg() elsewhere.
1272ad811070SMark Rutland *
1273ad811070SMark Rutland * Return: The original value of @v.
1274ad811070SMark Rutland */
1275e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_xchg(atomic_long_t * v,long new)12761d78814dSMark Rutland raw_atomic_long_xchg(atomic_long_t *v, long new)
1277e3d18ceeSMark Rutland {
127863039946SMark Rutland #ifdef CONFIG_64BIT
12791d78814dSMark Rutland return raw_atomic64_xchg(v, new);
128063039946SMark Rutland #else
12811d78814dSMark Rutland return raw_atomic_xchg(v, new);
128263039946SMark Rutland #endif
1283e3d18ceeSMark Rutland }
1284e3d18ceeSMark Rutland
1285ad811070SMark Rutland /**
1286ad811070SMark Rutland * raw_atomic_long_xchg_acquire() - atomic exchange with acquire ordering
1287ad811070SMark Rutland * @v: pointer to atomic_long_t
1288ad811070SMark Rutland * @new: long value to assign
1289ad811070SMark Rutland *
1290ad811070SMark Rutland * Atomically updates @v to @new with acquire ordering.
1291ad811070SMark Rutland *
1292ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_xchg_acquire() elsewhere.
1293ad811070SMark Rutland *
1294ad811070SMark Rutland * Return: The original value of @v.
1295ad811070SMark Rutland */
1296e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_xchg_acquire(atomic_long_t * v,long new)12971d78814dSMark Rutland raw_atomic_long_xchg_acquire(atomic_long_t *v, long new)
1298e3d18ceeSMark Rutland {
129963039946SMark Rutland #ifdef CONFIG_64BIT
13001d78814dSMark Rutland return raw_atomic64_xchg_acquire(v, new);
130163039946SMark Rutland #else
13021d78814dSMark Rutland return raw_atomic_xchg_acquire(v, new);
130363039946SMark Rutland #endif
1304e3d18ceeSMark Rutland }
1305e3d18ceeSMark Rutland
1306ad811070SMark Rutland /**
1307ad811070SMark Rutland * raw_atomic_long_xchg_release() - atomic exchange with release ordering
1308ad811070SMark Rutland * @v: pointer to atomic_long_t
1309ad811070SMark Rutland * @new: long value to assign
1310ad811070SMark Rutland *
1311ad811070SMark Rutland * Atomically updates @v to @new with release ordering.
1312ad811070SMark Rutland *
1313ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_xchg_release() elsewhere.
1314ad811070SMark Rutland *
1315ad811070SMark Rutland * Return: The original value of @v.
1316ad811070SMark Rutland */
1317e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_xchg_release(atomic_long_t * v,long new)13181d78814dSMark Rutland raw_atomic_long_xchg_release(atomic_long_t *v, long new)
1319e3d18ceeSMark Rutland {
132063039946SMark Rutland #ifdef CONFIG_64BIT
13211d78814dSMark Rutland return raw_atomic64_xchg_release(v, new);
132263039946SMark Rutland #else
13231d78814dSMark Rutland return raw_atomic_xchg_release(v, new);
132463039946SMark Rutland #endif
1325e3d18ceeSMark Rutland }
1326e3d18ceeSMark Rutland
1327ad811070SMark Rutland /**
1328ad811070SMark Rutland * raw_atomic_long_xchg_relaxed() - atomic exchange with relaxed ordering
1329ad811070SMark Rutland * @v: pointer to atomic_long_t
1330ad811070SMark Rutland * @new: long value to assign
1331ad811070SMark Rutland *
1332ad811070SMark Rutland * Atomically updates @v to @new with relaxed ordering.
1333ad811070SMark Rutland *
1334ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_xchg_relaxed() elsewhere.
1335ad811070SMark Rutland *
1336ad811070SMark Rutland * Return: The original value of @v.
1337ad811070SMark Rutland */
1338e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_xchg_relaxed(atomic_long_t * v,long new)13391d78814dSMark Rutland raw_atomic_long_xchg_relaxed(atomic_long_t *v, long new)
1340e3d18ceeSMark Rutland {
134163039946SMark Rutland #ifdef CONFIG_64BIT
13421d78814dSMark Rutland return raw_atomic64_xchg_relaxed(v, new);
134363039946SMark Rutland #else
13441d78814dSMark Rutland return raw_atomic_xchg_relaxed(v, new);
134563039946SMark Rutland #endif
1346e3d18ceeSMark Rutland }
1347e3d18ceeSMark Rutland
1348ad811070SMark Rutland /**
1349ad811070SMark Rutland * raw_atomic_long_cmpxchg() - atomic compare and exchange with full ordering
1350ad811070SMark Rutland * @v: pointer to atomic_long_t
1351ad811070SMark Rutland * @old: long value to compare with
1352ad811070SMark Rutland * @new: long value to assign
1353ad811070SMark Rutland *
1354ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with full ordering.
13556dfee110SMark Rutland * Otherwise, @v is not modified and relaxed ordering is provided.
1356ad811070SMark Rutland *
1357ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_cmpxchg() elsewhere.
1358ad811070SMark Rutland *
1359ad811070SMark Rutland * Return: The original value of @v.
1360ad811070SMark Rutland */
1361e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_cmpxchg(atomic_long_t * v,long old,long new)13621815da17SMark Rutland raw_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
1363e3d18ceeSMark Rutland {
136463039946SMark Rutland #ifdef CONFIG_64BIT
136563039946SMark Rutland return raw_atomic64_cmpxchg(v, old, new);
136663039946SMark Rutland #else
13671815da17SMark Rutland return raw_atomic_cmpxchg(v, old, new);
136863039946SMark Rutland #endif
1369e3d18ceeSMark Rutland }
1370e3d18ceeSMark Rutland
1371ad811070SMark Rutland /**
1372ad811070SMark Rutland * raw_atomic_long_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1373ad811070SMark Rutland * @v: pointer to atomic_long_t
1374ad811070SMark Rutland * @old: long value to compare with
1375ad811070SMark Rutland * @new: long value to assign
1376ad811070SMark Rutland *
1377ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with acquire ordering.
13786dfee110SMark Rutland * Otherwise, @v is not modified and relaxed ordering is provided.
1379ad811070SMark Rutland *
1380ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_cmpxchg_acquire() elsewhere.
1381ad811070SMark Rutland *
1382ad811070SMark Rutland * Return: The original value of @v.
1383ad811070SMark Rutland */
1384e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_cmpxchg_acquire(atomic_long_t * v,long old,long new)13851815da17SMark Rutland raw_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
1386e3d18ceeSMark Rutland {
138763039946SMark Rutland #ifdef CONFIG_64BIT
138863039946SMark Rutland return raw_atomic64_cmpxchg_acquire(v, old, new);
138963039946SMark Rutland #else
13901815da17SMark Rutland return raw_atomic_cmpxchg_acquire(v, old, new);
139163039946SMark Rutland #endif
1392e3d18ceeSMark Rutland }
1393e3d18ceeSMark Rutland
1394ad811070SMark Rutland /**
1395ad811070SMark Rutland * raw_atomic_long_cmpxchg_release() - atomic compare and exchange with release ordering
1396ad811070SMark Rutland * @v: pointer to atomic_long_t
1397ad811070SMark Rutland * @old: long value to compare with
1398ad811070SMark Rutland * @new: long value to assign
1399ad811070SMark Rutland *
1400ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with release ordering.
14016dfee110SMark Rutland * Otherwise, @v is not modified and relaxed ordering is provided.
1402ad811070SMark Rutland *
1403ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_cmpxchg_release() elsewhere.
1404ad811070SMark Rutland *
1405ad811070SMark Rutland * Return: The original value of @v.
1406ad811070SMark Rutland */
1407e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_cmpxchg_release(atomic_long_t * v,long old,long new)14081815da17SMark Rutland raw_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
1409e3d18ceeSMark Rutland {
141063039946SMark Rutland #ifdef CONFIG_64BIT
141163039946SMark Rutland return raw_atomic64_cmpxchg_release(v, old, new);
141263039946SMark Rutland #else
14131815da17SMark Rutland return raw_atomic_cmpxchg_release(v, old, new);
141463039946SMark Rutland #endif
1415e3d18ceeSMark Rutland }
1416e3d18ceeSMark Rutland
1417ad811070SMark Rutland /**
1418ad811070SMark Rutland * raw_atomic_long_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1419ad811070SMark Rutland * @v: pointer to atomic_long_t
1420ad811070SMark Rutland * @old: long value to compare with
1421ad811070SMark Rutland * @new: long value to assign
1422ad811070SMark Rutland *
1423ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with relaxed ordering.
14246dfee110SMark Rutland * Otherwise, @v is not modified and relaxed ordering is provided.
1425ad811070SMark Rutland *
1426ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_cmpxchg_relaxed() elsewhere.
1427ad811070SMark Rutland *
1428ad811070SMark Rutland * Return: The original value of @v.
1429ad811070SMark Rutland */
1430e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_cmpxchg_relaxed(atomic_long_t * v,long old,long new)14311815da17SMark Rutland raw_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
1432e3d18ceeSMark Rutland {
143363039946SMark Rutland #ifdef CONFIG_64BIT
143463039946SMark Rutland return raw_atomic64_cmpxchg_relaxed(v, old, new);
143563039946SMark Rutland #else
14361815da17SMark Rutland return raw_atomic_cmpxchg_relaxed(v, old, new);
143763039946SMark Rutland #endif
1438e3d18ceeSMark Rutland }
1439e3d18ceeSMark Rutland
1440ad811070SMark Rutland /**
1441ad811070SMark Rutland * raw_atomic_long_try_cmpxchg() - atomic compare and exchange with full ordering
1442ad811070SMark Rutland * @v: pointer to atomic_long_t
1443ad811070SMark Rutland * @old: pointer to long value to compare with
1444ad811070SMark Rutland * @new: long value to assign
1445ad811070SMark Rutland *
1446ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with full ordering.
14476dfee110SMark Rutland * Otherwise, @v is not modified, @old is updated to the current value of @v,
14486dfee110SMark Rutland * and relaxed ordering is provided.
1449ad811070SMark Rutland *
1450ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg() elsewhere.
1451ad811070SMark Rutland *
1452ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
1453ad811070SMark Rutland */
1454e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_try_cmpxchg(atomic_long_t * v,long * old,long new)14551815da17SMark Rutland raw_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
1456e3d18ceeSMark Rutland {
145763039946SMark Rutland #ifdef CONFIG_64BIT
145863039946SMark Rutland return raw_atomic64_try_cmpxchg(v, (s64 *)old, new);
145963039946SMark Rutland #else
14601815da17SMark Rutland return raw_atomic_try_cmpxchg(v, (int *)old, new);
146163039946SMark Rutland #endif
1462e3d18ceeSMark Rutland }
1463e3d18ceeSMark Rutland
1464ad811070SMark Rutland /**
1465ad811070SMark Rutland * raw_atomic_long_try_cmpxchg_acquire() - atomic compare and exchange with acquire ordering
1466ad811070SMark Rutland * @v: pointer to atomic_long_t
1467ad811070SMark Rutland * @old: pointer to long value to compare with
1468ad811070SMark Rutland * @new: long value to assign
1469ad811070SMark Rutland *
1470ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with acquire ordering.
14716dfee110SMark Rutland * Otherwise, @v is not modified, @old is updated to the current value of @v,
14726dfee110SMark Rutland * and relaxed ordering is provided.
1473ad811070SMark Rutland *
1474ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_acquire() elsewhere.
1475ad811070SMark Rutland *
1476ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
1477ad811070SMark Rutland */
1478e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_try_cmpxchg_acquire(atomic_long_t * v,long * old,long new)14791815da17SMark Rutland raw_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
1480e3d18ceeSMark Rutland {
148163039946SMark Rutland #ifdef CONFIG_64BIT
148263039946SMark Rutland return raw_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
148363039946SMark Rutland #else
14841815da17SMark Rutland return raw_atomic_try_cmpxchg_acquire(v, (int *)old, new);
148563039946SMark Rutland #endif
1486e3d18ceeSMark Rutland }
1487e3d18ceeSMark Rutland
1488ad811070SMark Rutland /**
1489ad811070SMark Rutland * raw_atomic_long_try_cmpxchg_release() - atomic compare and exchange with release ordering
1490ad811070SMark Rutland * @v: pointer to atomic_long_t
1491ad811070SMark Rutland * @old: pointer to long value to compare with
1492ad811070SMark Rutland * @new: long value to assign
1493ad811070SMark Rutland *
1494ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with release ordering.
14956dfee110SMark Rutland * Otherwise, @v is not modified, @old is updated to the current value of @v,
14966dfee110SMark Rutland * and relaxed ordering is provided.
1497ad811070SMark Rutland *
1498ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_release() elsewhere.
1499ad811070SMark Rutland *
1500ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
1501ad811070SMark Rutland */
1502e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_try_cmpxchg_release(atomic_long_t * v,long * old,long new)15031815da17SMark Rutland raw_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
1504e3d18ceeSMark Rutland {
150563039946SMark Rutland #ifdef CONFIG_64BIT
150663039946SMark Rutland return raw_atomic64_try_cmpxchg_release(v, (s64 *)old, new);
150763039946SMark Rutland #else
15081815da17SMark Rutland return raw_atomic_try_cmpxchg_release(v, (int *)old, new);
150963039946SMark Rutland #endif
1510e3d18ceeSMark Rutland }
1511e3d18ceeSMark Rutland
1512ad811070SMark Rutland /**
1513ad811070SMark Rutland * raw_atomic_long_try_cmpxchg_relaxed() - atomic compare and exchange with relaxed ordering
1514ad811070SMark Rutland * @v: pointer to atomic_long_t
1515ad811070SMark Rutland * @old: pointer to long value to compare with
1516ad811070SMark Rutland * @new: long value to assign
1517ad811070SMark Rutland *
1518ad811070SMark Rutland * If (@v == @old), atomically updates @v to @new with relaxed ordering.
15196dfee110SMark Rutland * Otherwise, @v is not modified, @old is updated to the current value of @v,
15206dfee110SMark Rutland * and relaxed ordering is provided.
1521ad811070SMark Rutland *
1522ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_try_cmpxchg_relaxed() elsewhere.
1523ad811070SMark Rutland *
1524ad811070SMark Rutland * Return: @true if the exchange occured, @false otherwise.
1525ad811070SMark Rutland */
1526e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t * v,long * old,long new)15271815da17SMark Rutland raw_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
1528e3d18ceeSMark Rutland {
152963039946SMark Rutland #ifdef CONFIG_64BIT
153063039946SMark Rutland return raw_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
153163039946SMark Rutland #else
15321815da17SMark Rutland return raw_atomic_try_cmpxchg_relaxed(v, (int *)old, new);
153363039946SMark Rutland #endif
1534e3d18ceeSMark Rutland }
1535e3d18ceeSMark Rutland
1536ad811070SMark Rutland /**
1537ad811070SMark Rutland * raw_atomic_long_sub_and_test() - atomic subtract and test if zero with full ordering
1538*f92a59f6SCarlos Llamas * @i: long value to subtract
1539ad811070SMark Rutland * @v: pointer to atomic_long_t
1540ad811070SMark Rutland *
1541ad811070SMark Rutland * Atomically updates @v to (@v - @i) with full ordering.
1542ad811070SMark Rutland *
1543ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_sub_and_test() elsewhere.
1544ad811070SMark Rutland *
1545ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
1546ad811070SMark Rutland */
1547e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_sub_and_test(long i,atomic_long_t * v)15481815da17SMark Rutland raw_atomic_long_sub_and_test(long i, atomic_long_t *v)
1549e3d18ceeSMark Rutland {
155063039946SMark Rutland #ifdef CONFIG_64BIT
155163039946SMark Rutland return raw_atomic64_sub_and_test(i, v);
155263039946SMark Rutland #else
15531815da17SMark Rutland return raw_atomic_sub_and_test(i, v);
155463039946SMark Rutland #endif
1555e3d18ceeSMark Rutland }
1556e3d18ceeSMark Rutland
1557ad811070SMark Rutland /**
1558ad811070SMark Rutland * raw_atomic_long_dec_and_test() - atomic decrement and test if zero with full ordering
1559ad811070SMark Rutland * @v: pointer to atomic_long_t
1560ad811070SMark Rutland *
1561ad811070SMark Rutland * Atomically updates @v to (@v - 1) with full ordering.
1562ad811070SMark Rutland *
1563ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_dec_and_test() elsewhere.
1564ad811070SMark Rutland *
1565ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
1566ad811070SMark Rutland */
1567e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_dec_and_test(atomic_long_t * v)15681815da17SMark Rutland raw_atomic_long_dec_and_test(atomic_long_t *v)
1569e3d18ceeSMark Rutland {
157063039946SMark Rutland #ifdef CONFIG_64BIT
157163039946SMark Rutland return raw_atomic64_dec_and_test(v);
157263039946SMark Rutland #else
15731815da17SMark Rutland return raw_atomic_dec_and_test(v);
157463039946SMark Rutland #endif
1575e3d18ceeSMark Rutland }
1576e3d18ceeSMark Rutland
1577ad811070SMark Rutland /**
1578ad811070SMark Rutland * raw_atomic_long_inc_and_test() - atomic increment and test if zero with full ordering
1579ad811070SMark Rutland * @v: pointer to atomic_long_t
1580ad811070SMark Rutland *
1581ad811070SMark Rutland * Atomically updates @v to (@v + 1) with full ordering.
1582ad811070SMark Rutland *
1583ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_inc_and_test() elsewhere.
1584ad811070SMark Rutland *
1585ad811070SMark Rutland * Return: @true if the resulting value of @v is zero, @false otherwise.
1586ad811070SMark Rutland */
1587e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_inc_and_test(atomic_long_t * v)15881815da17SMark Rutland raw_atomic_long_inc_and_test(atomic_long_t *v)
1589e3d18ceeSMark Rutland {
159063039946SMark Rutland #ifdef CONFIG_64BIT
159163039946SMark Rutland return raw_atomic64_inc_and_test(v);
159263039946SMark Rutland #else
15931815da17SMark Rutland return raw_atomic_inc_and_test(v);
159463039946SMark Rutland #endif
1595e3d18ceeSMark Rutland }
1596e3d18ceeSMark Rutland
1597ad811070SMark Rutland /**
1598ad811070SMark Rutland * raw_atomic_long_add_negative() - atomic add and test if negative with full ordering
1599ad811070SMark Rutland * @i: long value to add
1600ad811070SMark Rutland * @v: pointer to atomic_long_t
1601ad811070SMark Rutland *
1602ad811070SMark Rutland * Atomically updates @v to (@v + @i) with full ordering.
1603ad811070SMark Rutland *
1604ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_add_negative() elsewhere.
1605ad811070SMark Rutland *
1606ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
1607ad811070SMark Rutland */
1608e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_add_negative(long i,atomic_long_t * v)16091815da17SMark Rutland raw_atomic_long_add_negative(long i, atomic_long_t *v)
1610e3d18ceeSMark Rutland {
161163039946SMark Rutland #ifdef CONFIG_64BIT
161263039946SMark Rutland return raw_atomic64_add_negative(i, v);
161363039946SMark Rutland #else
16141815da17SMark Rutland return raw_atomic_add_negative(i, v);
161563039946SMark Rutland #endif
1616e3d18ceeSMark Rutland }
1617e3d18ceeSMark Rutland
1618ad811070SMark Rutland /**
1619ad811070SMark Rutland * raw_atomic_long_add_negative_acquire() - atomic add and test if negative with acquire ordering
1620ad811070SMark Rutland * @i: long value to add
1621ad811070SMark Rutland * @v: pointer to atomic_long_t
1622ad811070SMark Rutland *
1623ad811070SMark Rutland * Atomically updates @v to (@v + @i) with acquire ordering.
1624ad811070SMark Rutland *
1625ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_add_negative_acquire() elsewhere.
1626ad811070SMark Rutland *
1627ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
1628ad811070SMark Rutland */
1629e5ab9effSThomas Gleixner static __always_inline bool
raw_atomic_long_add_negative_acquire(long i,atomic_long_t * v)16301815da17SMark Rutland raw_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
1631e5ab9effSThomas Gleixner {
163263039946SMark Rutland #ifdef CONFIG_64BIT
163363039946SMark Rutland return raw_atomic64_add_negative_acquire(i, v);
163463039946SMark Rutland #else
16351815da17SMark Rutland return raw_atomic_add_negative_acquire(i, v);
163663039946SMark Rutland #endif
1637e5ab9effSThomas Gleixner }
1638e5ab9effSThomas Gleixner
1639ad811070SMark Rutland /**
1640ad811070SMark Rutland * raw_atomic_long_add_negative_release() - atomic add and test if negative with release ordering
1641ad811070SMark Rutland * @i: long value to add
1642ad811070SMark Rutland * @v: pointer to atomic_long_t
1643ad811070SMark Rutland *
1644ad811070SMark Rutland * Atomically updates @v to (@v + @i) with release ordering.
1645ad811070SMark Rutland *
1646ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_add_negative_release() elsewhere.
1647ad811070SMark Rutland *
1648ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
1649ad811070SMark Rutland */
1650e5ab9effSThomas Gleixner static __always_inline bool
raw_atomic_long_add_negative_release(long i,atomic_long_t * v)16511815da17SMark Rutland raw_atomic_long_add_negative_release(long i, atomic_long_t *v)
1652e5ab9effSThomas Gleixner {
165363039946SMark Rutland #ifdef CONFIG_64BIT
165463039946SMark Rutland return raw_atomic64_add_negative_release(i, v);
165563039946SMark Rutland #else
16561815da17SMark Rutland return raw_atomic_add_negative_release(i, v);
165763039946SMark Rutland #endif
1658e5ab9effSThomas Gleixner }
1659e5ab9effSThomas Gleixner
1660ad811070SMark Rutland /**
1661ad811070SMark Rutland * raw_atomic_long_add_negative_relaxed() - atomic add and test if negative with relaxed ordering
1662ad811070SMark Rutland * @i: long value to add
1663ad811070SMark Rutland * @v: pointer to atomic_long_t
1664ad811070SMark Rutland *
1665ad811070SMark Rutland * Atomically updates @v to (@v + @i) with relaxed ordering.
1666ad811070SMark Rutland *
1667ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_add_negative_relaxed() elsewhere.
1668ad811070SMark Rutland *
1669ad811070SMark Rutland * Return: @true if the resulting value of @v is negative, @false otherwise.
1670ad811070SMark Rutland */
1671e5ab9effSThomas Gleixner static __always_inline bool
raw_atomic_long_add_negative_relaxed(long i,atomic_long_t * v)16721815da17SMark Rutland raw_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
1673e5ab9effSThomas Gleixner {
167463039946SMark Rutland #ifdef CONFIG_64BIT
167563039946SMark Rutland return raw_atomic64_add_negative_relaxed(i, v);
167663039946SMark Rutland #else
16771815da17SMark Rutland return raw_atomic_add_negative_relaxed(i, v);
167863039946SMark Rutland #endif
1679e5ab9effSThomas Gleixner }
1680e5ab9effSThomas Gleixner
1681ad811070SMark Rutland /**
1682ad811070SMark Rutland * raw_atomic_long_fetch_add_unless() - atomic add unless value with full ordering
1683ad811070SMark Rutland * @v: pointer to atomic_long_t
1684ad811070SMark Rutland * @a: long value to add
1685ad811070SMark Rutland * @u: long value to compare with
1686ad811070SMark Rutland *
1687ad811070SMark Rutland * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
16886dfee110SMark Rutland * Otherwise, @v is not modified and relaxed ordering is provided.
1689ad811070SMark Rutland *
1690ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_fetch_add_unless() elsewhere.
1691ad811070SMark Rutland *
1692ad811070SMark Rutland * Return: The original value of @v.
1693ad811070SMark Rutland */
1694e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_fetch_add_unless(atomic_long_t * v,long a,long u)16951815da17SMark Rutland raw_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
1696e3d18ceeSMark Rutland {
169763039946SMark Rutland #ifdef CONFIG_64BIT
169863039946SMark Rutland return raw_atomic64_fetch_add_unless(v, a, u);
169963039946SMark Rutland #else
17001815da17SMark Rutland return raw_atomic_fetch_add_unless(v, a, u);
170163039946SMark Rutland #endif
1702e3d18ceeSMark Rutland }
1703e3d18ceeSMark Rutland
1704ad811070SMark Rutland /**
1705ad811070SMark Rutland * raw_atomic_long_add_unless() - atomic add unless value with full ordering
1706ad811070SMark Rutland * @v: pointer to atomic_long_t
1707ad811070SMark Rutland * @a: long value to add
1708ad811070SMark Rutland * @u: long value to compare with
1709ad811070SMark Rutland *
1710ad811070SMark Rutland * If (@v != @u), atomically updates @v to (@v + @a) with full ordering.
17116dfee110SMark Rutland * Otherwise, @v is not modified and relaxed ordering is provided.
1712ad811070SMark Rutland *
1713ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_add_unless() elsewhere.
1714ad811070SMark Rutland *
1715ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
1716ad811070SMark Rutland */
1717e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_add_unless(atomic_long_t * v,long a,long u)17181815da17SMark Rutland raw_atomic_long_add_unless(atomic_long_t *v, long a, long u)
1719e3d18ceeSMark Rutland {
172063039946SMark Rutland #ifdef CONFIG_64BIT
172163039946SMark Rutland return raw_atomic64_add_unless(v, a, u);
172263039946SMark Rutland #else
17231815da17SMark Rutland return raw_atomic_add_unless(v, a, u);
172463039946SMark Rutland #endif
1725e3d18ceeSMark Rutland }
1726e3d18ceeSMark Rutland
1727ad811070SMark Rutland /**
1728ad811070SMark Rutland * raw_atomic_long_inc_not_zero() - atomic increment unless zero with full ordering
1729ad811070SMark Rutland * @v: pointer to atomic_long_t
1730ad811070SMark Rutland *
1731ad811070SMark Rutland * If (@v != 0), atomically updates @v to (@v + 1) with full ordering.
17326dfee110SMark Rutland * Otherwise, @v is not modified and relaxed ordering is provided.
1733ad811070SMark Rutland *
1734ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_inc_not_zero() elsewhere.
1735ad811070SMark Rutland *
1736ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
1737ad811070SMark Rutland */
1738e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_inc_not_zero(atomic_long_t * v)17391815da17SMark Rutland raw_atomic_long_inc_not_zero(atomic_long_t *v)
1740e3d18ceeSMark Rutland {
174163039946SMark Rutland #ifdef CONFIG_64BIT
174263039946SMark Rutland return raw_atomic64_inc_not_zero(v);
174363039946SMark Rutland #else
17441815da17SMark Rutland return raw_atomic_inc_not_zero(v);
174563039946SMark Rutland #endif
1746e3d18ceeSMark Rutland }
1747e3d18ceeSMark Rutland
1748ad811070SMark Rutland /**
1749ad811070SMark Rutland * raw_atomic_long_inc_unless_negative() - atomic increment unless negative with full ordering
1750ad811070SMark Rutland * @v: pointer to atomic_long_t
1751ad811070SMark Rutland *
1752ad811070SMark Rutland * If (@v >= 0), atomically updates @v to (@v + 1) with full ordering.
17536dfee110SMark Rutland * Otherwise, @v is not modified and relaxed ordering is provided.
1754ad811070SMark Rutland *
1755ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_inc_unless_negative() elsewhere.
1756ad811070SMark Rutland *
1757ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
1758ad811070SMark Rutland */
1759e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_inc_unless_negative(atomic_long_t * v)17601815da17SMark Rutland raw_atomic_long_inc_unless_negative(atomic_long_t *v)
1761e3d18ceeSMark Rutland {
176263039946SMark Rutland #ifdef CONFIG_64BIT
176363039946SMark Rutland return raw_atomic64_inc_unless_negative(v);
176463039946SMark Rutland #else
17651815da17SMark Rutland return raw_atomic_inc_unless_negative(v);
176663039946SMark Rutland #endif
1767e3d18ceeSMark Rutland }
1768e3d18ceeSMark Rutland
1769ad811070SMark Rutland /**
1770ad811070SMark Rutland * raw_atomic_long_dec_unless_positive() - atomic decrement unless positive with full ordering
1771ad811070SMark Rutland * @v: pointer to atomic_long_t
1772ad811070SMark Rutland *
1773ad811070SMark Rutland * If (@v <= 0), atomically updates @v to (@v - 1) with full ordering.
17746dfee110SMark Rutland * Otherwise, @v is not modified and relaxed ordering is provided.
1775ad811070SMark Rutland *
1776ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_dec_unless_positive() elsewhere.
1777ad811070SMark Rutland *
1778ad811070SMark Rutland * Return: @true if @v was updated, @false otherwise.
1779ad811070SMark Rutland */
1780e3d18ceeSMark Rutland static __always_inline bool
raw_atomic_long_dec_unless_positive(atomic_long_t * v)17811815da17SMark Rutland raw_atomic_long_dec_unless_positive(atomic_long_t *v)
1782e3d18ceeSMark Rutland {
178363039946SMark Rutland #ifdef CONFIG_64BIT
178463039946SMark Rutland return raw_atomic64_dec_unless_positive(v);
178563039946SMark Rutland #else
17861815da17SMark Rutland return raw_atomic_dec_unless_positive(v);
178763039946SMark Rutland #endif
1788e3d18ceeSMark Rutland }
1789e3d18ceeSMark Rutland
1790ad811070SMark Rutland /**
1791ad811070SMark Rutland * raw_atomic_long_dec_if_positive() - atomic decrement if positive with full ordering
1792ad811070SMark Rutland * @v: pointer to atomic_long_t
1793ad811070SMark Rutland *
1794ad811070SMark Rutland * If (@v > 0), atomically updates @v to (@v - 1) with full ordering.
17956dfee110SMark Rutland * Otherwise, @v is not modified and relaxed ordering is provided.
1796ad811070SMark Rutland *
1797ad811070SMark Rutland * Safe to use in noinstr code; prefer atomic_long_dec_if_positive() elsewhere.
1798ad811070SMark Rutland *
1799b33eb50aSMark Rutland * Return: The old value of (@v - 1), regardless of whether @v was updated.
1800ad811070SMark Rutland */
1801e3d18ceeSMark Rutland static __always_inline long
raw_atomic_long_dec_if_positive(atomic_long_t * v)18021815da17SMark Rutland raw_atomic_long_dec_if_positive(atomic_long_t *v)
1803e3d18ceeSMark Rutland {
180463039946SMark Rutland #ifdef CONFIG_64BIT
180563039946SMark Rutland return raw_atomic64_dec_if_positive(v);
180663039946SMark Rutland #else
18071815da17SMark Rutland return raw_atomic_dec_if_positive(v);
180863039946SMark Rutland #endif
1809e3d18ceeSMark Rutland }
1810e3d18ceeSMark Rutland
1811e3d18ceeSMark Rutland #endif /* _LINUX_ATOMIC_LONG_H */
1812*f92a59f6SCarlos Llamas // eadf183c3600b8b92b91839dd3be6bcc560c752d
1813