xref: /f-stack/freebsd/mips/include/atomic.h (revision 22ce4aff)
1a9643ea8Slogwang /*-
2*22ce4affSfengbojiang  * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3*22ce4affSfengbojiang  *
4a9643ea8Slogwang  * Copyright (c) 1998 Doug Rabson
5a9643ea8Slogwang  * All rights reserved.
6a9643ea8Slogwang  *
7a9643ea8Slogwang  * Redistribution and use in source and binary forms, with or without
8a9643ea8Slogwang  * modification, are permitted provided that the following conditions
9a9643ea8Slogwang  * are met:
10a9643ea8Slogwang  * 1. Redistributions of source code must retain the above copyright
11a9643ea8Slogwang  *    notice, this list of conditions and the following disclaimer.
12a9643ea8Slogwang  * 2. Redistributions in binary form must reproduce the above copyright
13a9643ea8Slogwang  *    notice, this list of conditions and the following disclaimer in the
14a9643ea8Slogwang  *    documentation and/or other materials provided with the distribution.
15a9643ea8Slogwang  *
16a9643ea8Slogwang  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
17a9643ea8Slogwang  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18a9643ea8Slogwang  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19a9643ea8Slogwang  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
20a9643ea8Slogwang  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21a9643ea8Slogwang  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
22a9643ea8Slogwang  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
23a9643ea8Slogwang  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
24a9643ea8Slogwang  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25a9643ea8Slogwang  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
26a9643ea8Slogwang  * SUCH DAMAGE.
27a9643ea8Slogwang  *
28a9643ea8Slogwang  *	from: src/sys/alpha/include/atomic.h,v 1.21.2.3 2005/10/06 18:12:05 jhb
29a9643ea8Slogwang  * $FreeBSD$
30a9643ea8Slogwang  */
31a9643ea8Slogwang 
32a9643ea8Slogwang #ifndef _MACHINE_ATOMIC_H_
33a9643ea8Slogwang #define	_MACHINE_ATOMIC_H_
34a9643ea8Slogwang 
35a9643ea8Slogwang #ifndef _SYS_CDEFS_H_
36a9643ea8Slogwang #error this file needs sys/cdefs.h as a prerequisite
37a9643ea8Slogwang #endif
38a9643ea8Slogwang 
39*22ce4affSfengbojiang #include <sys/atomic_common.h>
40*22ce4affSfengbojiang 
41*22ce4affSfengbojiang #if !defined(__mips_n64) && !defined(__mips_n32)
42*22ce4affSfengbojiang #include <sys/_atomic64e.h>
43*22ce4affSfengbojiang #endif
44*22ce4affSfengbojiang 
45a9643ea8Slogwang /*
46a9643ea8Slogwang  * Note: All the 64-bit atomic operations are only atomic when running
47a9643ea8Slogwang  * in 64-bit mode.  It is assumed that code compiled for n32 and n64
48a9643ea8Slogwang  * fits into this definition and no further safeties are needed.
49a9643ea8Slogwang  *
50a9643ea8Slogwang  * It is also assumed that the add, subtract and other arithmetic is
51a9643ea8Slogwang  * done on numbers not pointers.  The special rules for n32 pointers
52a9643ea8Slogwang  * do not have atomic operations defined for them, but generally shouldn't
53a9643ea8Slogwang  * need atomic operations.
54a9643ea8Slogwang  */
55a9643ea8Slogwang #ifndef __MIPS_PLATFORM_SYNC_NOPS
56a9643ea8Slogwang #define __MIPS_PLATFORM_SYNC_NOPS ""
57a9643ea8Slogwang #endif
58a9643ea8Slogwang 
59a9643ea8Slogwang static __inline  void
mips_sync(void)60a9643ea8Slogwang mips_sync(void)
61a9643ea8Slogwang {
62a9643ea8Slogwang 	__asm __volatile (".set noreorder\n"
63a9643ea8Slogwang 			"\tsync\n"
64a9643ea8Slogwang 			__MIPS_PLATFORM_SYNC_NOPS
65a9643ea8Slogwang 			".set reorder\n"
66a9643ea8Slogwang 			: : : "memory");
67a9643ea8Slogwang }
68a9643ea8Slogwang 
69a9643ea8Slogwang #define mb()	mips_sync()
70a9643ea8Slogwang #define wmb()	mips_sync()
71a9643ea8Slogwang #define rmb()	mips_sync()
72a9643ea8Slogwang 
73a9643ea8Slogwang /*
74a9643ea8Slogwang  * Various simple arithmetic on memory which is atomic in the presence
75a9643ea8Slogwang  * of interrupts and SMP safe.
76a9643ea8Slogwang  */
77a9643ea8Slogwang 
78a9643ea8Slogwang void atomic_set_8(__volatile uint8_t *, uint8_t);
79a9643ea8Slogwang void atomic_clear_8(__volatile uint8_t *, uint8_t);
80a9643ea8Slogwang void atomic_add_8(__volatile uint8_t *, uint8_t);
81a9643ea8Slogwang void atomic_subtract_8(__volatile uint8_t *, uint8_t);
82a9643ea8Slogwang 
83a9643ea8Slogwang void atomic_set_16(__volatile uint16_t *, uint16_t);
84a9643ea8Slogwang void atomic_clear_16(__volatile uint16_t *, uint16_t);
85a9643ea8Slogwang void atomic_add_16(__volatile uint16_t *, uint16_t);
86a9643ea8Slogwang void atomic_subtract_16(__volatile uint16_t *, uint16_t);
87a9643ea8Slogwang 
88*22ce4affSfengbojiang static __inline int atomic_cmpset_8(__volatile uint8_t *, uint8_t, uint8_t);
89*22ce4affSfengbojiang static __inline int atomic_fcmpset_8(__volatile uint8_t *, uint8_t *, uint8_t);
90*22ce4affSfengbojiang static __inline int atomic_cmpset_16(__volatile uint16_t *, uint16_t, uint16_t);
91*22ce4affSfengbojiang static __inline int atomic_fcmpset_16(__volatile uint16_t *, uint16_t *, uint16_t);
92*22ce4affSfengbojiang 
93a9643ea8Slogwang static __inline void
atomic_set_32(__volatile uint32_t * p,uint32_t v)94a9643ea8Slogwang atomic_set_32(__volatile uint32_t *p, uint32_t v)
95a9643ea8Slogwang {
96a9643ea8Slogwang 	uint32_t temp;
97a9643ea8Slogwang 
98a9643ea8Slogwang 	__asm __volatile (
99a9643ea8Slogwang 		"1:\tll	%0, %3\n\t"		/* load old value */
100a9643ea8Slogwang 		"or	%0, %2, %0\n\t"		/* calculate new value */
101a9643ea8Slogwang 		"sc	%0, %1\n\t"		/* attempt to store */
102a9643ea8Slogwang 		"beqz	%0, 1b\n\t"		/* spin if failed */
103a9643ea8Slogwang 		: "=&r" (temp), "=m" (*p)
104a9643ea8Slogwang 		: "r" (v), "m" (*p)
105a9643ea8Slogwang 		: "memory");
106a9643ea8Slogwang 
107a9643ea8Slogwang }
108a9643ea8Slogwang 
109a9643ea8Slogwang static __inline void
atomic_clear_32(__volatile uint32_t * p,uint32_t v)110a9643ea8Slogwang atomic_clear_32(__volatile uint32_t *p, uint32_t v)
111a9643ea8Slogwang {
112a9643ea8Slogwang 	uint32_t temp;
113a9643ea8Slogwang 	v = ~v;
114a9643ea8Slogwang 
115a9643ea8Slogwang 	__asm __volatile (
116a9643ea8Slogwang 		"1:\tll	%0, %3\n\t"		/* load old value */
117a9643ea8Slogwang 		"and	%0, %2, %0\n\t"		/* calculate new value */
118a9643ea8Slogwang 		"sc	%0, %1\n\t"		/* attempt to store */
119a9643ea8Slogwang 		"beqz	%0, 1b\n\t"		/* spin if failed */
120a9643ea8Slogwang 		: "=&r" (temp), "=m" (*p)
121a9643ea8Slogwang 		: "r" (v), "m" (*p)
122a9643ea8Slogwang 		: "memory");
123a9643ea8Slogwang }
124a9643ea8Slogwang 
125a9643ea8Slogwang static __inline void
atomic_add_32(__volatile uint32_t * p,uint32_t v)126a9643ea8Slogwang atomic_add_32(__volatile uint32_t *p, uint32_t v)
127a9643ea8Slogwang {
128a9643ea8Slogwang 	uint32_t temp;
129a9643ea8Slogwang 
130a9643ea8Slogwang 	__asm __volatile (
131a9643ea8Slogwang 		"1:\tll	%0, %3\n\t"		/* load old value */
132a9643ea8Slogwang 		"addu	%0, %2, %0\n\t"		/* calculate new value */
133a9643ea8Slogwang 		"sc	%0, %1\n\t"		/* attempt to store */
134a9643ea8Slogwang 		"beqz	%0, 1b\n\t"		/* spin if failed */
135a9643ea8Slogwang 		: "=&r" (temp), "=m" (*p)
136a9643ea8Slogwang 		: "r" (v), "m" (*p)
137a9643ea8Slogwang 		: "memory");
138a9643ea8Slogwang }
139a9643ea8Slogwang 
140a9643ea8Slogwang static __inline void
atomic_subtract_32(__volatile uint32_t * p,uint32_t v)141a9643ea8Slogwang atomic_subtract_32(__volatile uint32_t *p, uint32_t v)
142a9643ea8Slogwang {
143a9643ea8Slogwang 	uint32_t temp;
144a9643ea8Slogwang 
145a9643ea8Slogwang 	__asm __volatile (
146a9643ea8Slogwang 		"1:\tll	%0, %3\n\t"		/* load old value */
147a9643ea8Slogwang 		"subu	%0, %2\n\t"		/* calculate new value */
148a9643ea8Slogwang 		"sc	%0, %1\n\t"		/* attempt to store */
149a9643ea8Slogwang 		"beqz	%0, 1b\n\t"		/* spin if failed */
150a9643ea8Slogwang 		: "=&r" (temp), "=m" (*p)
151a9643ea8Slogwang 		: "r" (v), "m" (*p)
152a9643ea8Slogwang 		: "memory");
153a9643ea8Slogwang }
154a9643ea8Slogwang 
155a9643ea8Slogwang static __inline uint32_t
atomic_readandclear_32(__volatile uint32_t * addr)156a9643ea8Slogwang atomic_readandclear_32(__volatile uint32_t *addr)
157a9643ea8Slogwang {
158a9643ea8Slogwang 	uint32_t result,temp;
159a9643ea8Slogwang 
160a9643ea8Slogwang 	__asm __volatile (
161a9643ea8Slogwang 		"1:\tll	 %0,%3\n\t"	/* load current value, asserting lock */
162a9643ea8Slogwang 		"li	 %1,0\n\t"		/* value to store */
163a9643ea8Slogwang 		"sc	 %1,%2\n\t"	/* attempt to store */
164a9643ea8Slogwang 		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
165a9643ea8Slogwang 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
166a9643ea8Slogwang 		: "m" (*addr)
167a9643ea8Slogwang 		: "memory");
168a9643ea8Slogwang 
169a9643ea8Slogwang 	return result;
170a9643ea8Slogwang }
171a9643ea8Slogwang 
172a9643ea8Slogwang static __inline uint32_t
atomic_readandset_32(__volatile uint32_t * addr,uint32_t value)173a9643ea8Slogwang atomic_readandset_32(__volatile uint32_t *addr, uint32_t value)
174a9643ea8Slogwang {
175a9643ea8Slogwang 	uint32_t result,temp;
176a9643ea8Slogwang 
177a9643ea8Slogwang 	__asm __volatile (
178a9643ea8Slogwang 		"1:\tll	 %0,%3\n\t"	/* load current value, asserting lock */
179a9643ea8Slogwang 		"or      %1,$0,%4\n\t"
180a9643ea8Slogwang 		"sc	 %1,%2\n\t"	/* attempt to store */
181a9643ea8Slogwang 		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
182a9643ea8Slogwang 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
183a9643ea8Slogwang 		: "m" (*addr), "r" (value)
184a9643ea8Slogwang 		: "memory");
185a9643ea8Slogwang 
186a9643ea8Slogwang 	return result;
187a9643ea8Slogwang }
188a9643ea8Slogwang 
189a9643ea8Slogwang #if defined(__mips_n64) || defined(__mips_n32)
190a9643ea8Slogwang static __inline void
atomic_set_64(__volatile uint64_t * p,uint64_t v)191a9643ea8Slogwang atomic_set_64(__volatile uint64_t *p, uint64_t v)
192a9643ea8Slogwang {
193a9643ea8Slogwang 	uint64_t temp;
194a9643ea8Slogwang 
195a9643ea8Slogwang 	__asm __volatile (
196a9643ea8Slogwang 		"1:\n\t"
197a9643ea8Slogwang 		"lld	%0, %3\n\t"		/* load old value */
198a9643ea8Slogwang 		"or	%0, %2, %0\n\t"		/* calculate new value */
199a9643ea8Slogwang 		"scd	%0, %1\n\t"		/* attempt to store */
200a9643ea8Slogwang 		"beqz	%0, 1b\n\t"		/* spin if failed */
201a9643ea8Slogwang 		: "=&r" (temp), "=m" (*p)
202a9643ea8Slogwang 		: "r" (v), "m" (*p)
203a9643ea8Slogwang 		: "memory");
204a9643ea8Slogwang 
205a9643ea8Slogwang }
206a9643ea8Slogwang 
207a9643ea8Slogwang static __inline void
atomic_clear_64(__volatile uint64_t * p,uint64_t v)208a9643ea8Slogwang atomic_clear_64(__volatile uint64_t *p, uint64_t v)
209a9643ea8Slogwang {
210a9643ea8Slogwang 	uint64_t temp;
211a9643ea8Slogwang 	v = ~v;
212a9643ea8Slogwang 
213a9643ea8Slogwang 	__asm __volatile (
214a9643ea8Slogwang 		"1:\n\t"
215a9643ea8Slogwang 		"lld	%0, %3\n\t"		/* load old value */
216a9643ea8Slogwang 		"and	%0, %2, %0\n\t"		/* calculate new value */
217a9643ea8Slogwang 		"scd	%0, %1\n\t"		/* attempt to store */
218a9643ea8Slogwang 		"beqz	%0, 1b\n\t"		/* spin if failed */
219a9643ea8Slogwang 		: "=&r" (temp), "=m" (*p)
220a9643ea8Slogwang 		: "r" (v), "m" (*p)
221a9643ea8Slogwang 		: "memory");
222a9643ea8Slogwang }
223a9643ea8Slogwang 
224a9643ea8Slogwang static __inline void
atomic_add_64(__volatile uint64_t * p,uint64_t v)225a9643ea8Slogwang atomic_add_64(__volatile uint64_t *p, uint64_t v)
226a9643ea8Slogwang {
227a9643ea8Slogwang 	uint64_t temp;
228a9643ea8Slogwang 
229a9643ea8Slogwang 	__asm __volatile (
230a9643ea8Slogwang 		"1:\n\t"
231a9643ea8Slogwang 		"lld	%0, %3\n\t"		/* load old value */
232a9643ea8Slogwang 		"daddu	%0, %2, %0\n\t"		/* calculate new value */
233a9643ea8Slogwang 		"scd	%0, %1\n\t"		/* attempt to store */
234a9643ea8Slogwang 		"beqz	%0, 1b\n\t"		/* spin if failed */
235a9643ea8Slogwang 		: "=&r" (temp), "=m" (*p)
236a9643ea8Slogwang 		: "r" (v), "m" (*p)
237a9643ea8Slogwang 		: "memory");
238a9643ea8Slogwang }
239a9643ea8Slogwang 
240a9643ea8Slogwang static __inline void
atomic_subtract_64(__volatile uint64_t * p,uint64_t v)241a9643ea8Slogwang atomic_subtract_64(__volatile uint64_t *p, uint64_t v)
242a9643ea8Slogwang {
243a9643ea8Slogwang 	uint64_t temp;
244a9643ea8Slogwang 
245a9643ea8Slogwang 	__asm __volatile (
246a9643ea8Slogwang 		"1:\n\t"
247a9643ea8Slogwang 		"lld	%0, %3\n\t"		/* load old value */
248a9643ea8Slogwang 		"dsubu	%0, %2\n\t"		/* calculate new value */
249a9643ea8Slogwang 		"scd	%0, %1\n\t"		/* attempt to store */
250a9643ea8Slogwang 		"beqz	%0, 1b\n\t"		/* spin if failed */
251a9643ea8Slogwang 		: "=&r" (temp), "=m" (*p)
252a9643ea8Slogwang 		: "r" (v), "m" (*p)
253a9643ea8Slogwang 		: "memory");
254a9643ea8Slogwang }
255a9643ea8Slogwang 
256a9643ea8Slogwang static __inline uint64_t
atomic_readandclear_64(__volatile uint64_t * addr)257a9643ea8Slogwang atomic_readandclear_64(__volatile uint64_t *addr)
258a9643ea8Slogwang {
259a9643ea8Slogwang 	uint64_t result,temp;
260a9643ea8Slogwang 
261a9643ea8Slogwang 	__asm __volatile (
262a9643ea8Slogwang 		"1:\n\t"
263a9643ea8Slogwang 		"lld	 %0, %3\n\t"		/* load old value */
264a9643ea8Slogwang 		"li	 %1, 0\n\t"		/* value to store */
265a9643ea8Slogwang 		"scd	 %1, %2\n\t"		/* attempt to store */
266a9643ea8Slogwang 		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
267a9643ea8Slogwang 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
268a9643ea8Slogwang 		: "m" (*addr)
269a9643ea8Slogwang 		: "memory");
270a9643ea8Slogwang 
271a9643ea8Slogwang 	return result;
272a9643ea8Slogwang }
273a9643ea8Slogwang 
274a9643ea8Slogwang static __inline uint64_t
atomic_readandset_64(__volatile uint64_t * addr,uint64_t value)275a9643ea8Slogwang atomic_readandset_64(__volatile uint64_t *addr, uint64_t value)
276a9643ea8Slogwang {
277a9643ea8Slogwang 	uint64_t result,temp;
278a9643ea8Slogwang 
279a9643ea8Slogwang 	__asm __volatile (
280a9643ea8Slogwang 		"1:\n\t"
281a9643ea8Slogwang 		"lld	 %0,%3\n\t"		/* Load old value*/
282a9643ea8Slogwang 		"or      %1,$0,%4\n\t"
283a9643ea8Slogwang 		"scd	 %1,%2\n\t"		/* attempt to store */
284a9643ea8Slogwang 		"beqz	 %1, 1b\n\t"		/* if the store failed, spin */
285a9643ea8Slogwang 		: "=&r"(result), "=&r"(temp), "=m" (*addr)
286a9643ea8Slogwang 		: "m" (*addr), "r" (value)
287a9643ea8Slogwang 		: "memory");
288a9643ea8Slogwang 
289a9643ea8Slogwang 	return result;
290a9643ea8Slogwang }
291a9643ea8Slogwang #endif
292a9643ea8Slogwang 
293a9643ea8Slogwang #define	ATOMIC_ACQ_REL(NAME, WIDTH)					\
294a9643ea8Slogwang static __inline  void							\
295a9643ea8Slogwang atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
296a9643ea8Slogwang {									\
297a9643ea8Slogwang 	atomic_##NAME##_##WIDTH(p, v);					\
298a9643ea8Slogwang 	mips_sync(); 							\
299a9643ea8Slogwang }									\
300a9643ea8Slogwang 									\
301a9643ea8Slogwang static __inline  void							\
302a9643ea8Slogwang atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
303a9643ea8Slogwang {									\
304a9643ea8Slogwang 	mips_sync();							\
305a9643ea8Slogwang 	atomic_##NAME##_##WIDTH(p, v);					\
306a9643ea8Slogwang }
307a9643ea8Slogwang 
308a9643ea8Slogwang /* Variants of simple arithmetic with memory barriers. */
309a9643ea8Slogwang ATOMIC_ACQ_REL(set, 8)
310a9643ea8Slogwang ATOMIC_ACQ_REL(clear, 8)
311a9643ea8Slogwang ATOMIC_ACQ_REL(add, 8)
312a9643ea8Slogwang ATOMIC_ACQ_REL(subtract, 8)
313a9643ea8Slogwang ATOMIC_ACQ_REL(set, 16)
314a9643ea8Slogwang ATOMIC_ACQ_REL(clear, 16)
315a9643ea8Slogwang ATOMIC_ACQ_REL(add, 16)
316a9643ea8Slogwang ATOMIC_ACQ_REL(subtract, 16)
317a9643ea8Slogwang ATOMIC_ACQ_REL(set, 32)
318a9643ea8Slogwang ATOMIC_ACQ_REL(clear, 32)
319a9643ea8Slogwang ATOMIC_ACQ_REL(add, 32)
320a9643ea8Slogwang ATOMIC_ACQ_REL(subtract, 32)
321a9643ea8Slogwang #if defined(__mips_n64) || defined(__mips_n32)
322a9643ea8Slogwang ATOMIC_ACQ_REL(set, 64)
323a9643ea8Slogwang ATOMIC_ACQ_REL(clear, 64)
324a9643ea8Slogwang ATOMIC_ACQ_REL(add, 64)
325a9643ea8Slogwang ATOMIC_ACQ_REL(subtract, 64)
326a9643ea8Slogwang #endif
327a9643ea8Slogwang 
328a9643ea8Slogwang #undef ATOMIC_ACQ_REL
329a9643ea8Slogwang 
330a9643ea8Slogwang /*
331a9643ea8Slogwang  * We assume that a = b will do atomic loads and stores.
332a9643ea8Slogwang  */
333a9643ea8Slogwang #define	ATOMIC_STORE_LOAD(WIDTH)			\
334a9643ea8Slogwang static __inline  uint##WIDTH##_t			\
335a9643ea8Slogwang atomic_load_acq_##WIDTH(__volatile uint##WIDTH##_t *p)	\
336a9643ea8Slogwang {							\
337a9643ea8Slogwang 	uint##WIDTH##_t v;				\
338a9643ea8Slogwang 							\
339a9643ea8Slogwang 	v = *p;						\
340a9643ea8Slogwang 	mips_sync();					\
341a9643ea8Slogwang 	return (v);					\
342a9643ea8Slogwang }							\
343a9643ea8Slogwang 							\
344a9643ea8Slogwang static __inline  void					\
345a9643ea8Slogwang atomic_store_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
346a9643ea8Slogwang {							\
347a9643ea8Slogwang 	mips_sync();					\
348a9643ea8Slogwang 	*p = v;						\
349a9643ea8Slogwang }
350a9643ea8Slogwang 
351a9643ea8Slogwang ATOMIC_STORE_LOAD(32)
352*22ce4affSfengbojiang #if defined(__mips_n64) || defined(__mips_n32)
353a9643ea8Slogwang ATOMIC_STORE_LOAD(64)
354a9643ea8Slogwang #endif
355a9643ea8Slogwang #undef ATOMIC_STORE_LOAD
356a9643ea8Slogwang 
357a9643ea8Slogwang /*
358*22ce4affSfengbojiang  * MIPS n32 is not a LP64 API, so atomic_load_64 isn't defined there. Define it
359*22ce4affSfengbojiang  * here since n32 is an oddball !LP64 but that can do 64-bit atomics.
360*22ce4affSfengbojiang  */
361*22ce4affSfengbojiang #if defined(__mips_n32)
362*22ce4affSfengbojiang #define	atomic_load_64	atomic_load_acq_64
363*22ce4affSfengbojiang #endif
364*22ce4affSfengbojiang 
365*22ce4affSfengbojiang /*
366a9643ea8Slogwang  * Atomically compare the value stored at *p with cmpval and if the
367a9643ea8Slogwang  * two values are equal, update the value of *p with newval. Returns
368a9643ea8Slogwang  * zero if the compare failed, nonzero otherwise.
369a9643ea8Slogwang  */
370*22ce4affSfengbojiang static __inline int
atomic_cmpset_32(__volatile uint32_t * p,uint32_t cmpval,uint32_t newval)371a9643ea8Slogwang atomic_cmpset_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
372a9643ea8Slogwang {
373*22ce4affSfengbojiang 	int ret;
374a9643ea8Slogwang 
375a9643ea8Slogwang 	__asm __volatile (
376a9643ea8Slogwang 		"1:\tll	%0, %4\n\t"		/* load old value */
377a9643ea8Slogwang 		"bne %0, %2, 2f\n\t"		/* compare */
378a9643ea8Slogwang 		"move %0, %3\n\t"		/* value to store */
379a9643ea8Slogwang 		"sc %0, %1\n\t"			/* attempt to store */
380a9643ea8Slogwang 		"beqz %0, 1b\n\t"		/* if it failed, spin */
381a9643ea8Slogwang 		"j 3f\n\t"
382a9643ea8Slogwang 		"2:\n\t"
383a9643ea8Slogwang 		"li	%0, 0\n\t"
384a9643ea8Slogwang 		"3:\n"
385a9643ea8Slogwang 		: "=&r" (ret), "=m" (*p)
386a9643ea8Slogwang 		: "r" (cmpval), "r" (newval), "m" (*p)
387a9643ea8Slogwang 		: "memory");
388a9643ea8Slogwang 
389a9643ea8Slogwang 	return ret;
390a9643ea8Slogwang }
391a9643ea8Slogwang 
392a9643ea8Slogwang /*
393a9643ea8Slogwang  * Atomically compare the value stored at *p with cmpval and if the
394a9643ea8Slogwang  * two values are equal, update the value of *p with newval. Returns
395a9643ea8Slogwang  * zero if the compare failed, nonzero otherwise.
396a9643ea8Slogwang  */
397*22ce4affSfengbojiang static __inline int
atomic_fcmpset_32(__volatile uint32_t * p,uint32_t * cmpval,uint32_t newval)398*22ce4affSfengbojiang atomic_fcmpset_32(__volatile uint32_t *p, uint32_t *cmpval, uint32_t newval)
399a9643ea8Slogwang {
400*22ce4affSfengbojiang 	int ret;
401a9643ea8Slogwang 
402*22ce4affSfengbojiang 	/*
403*22ce4affSfengbojiang 	 * The following sequence (similar to that in atomic_fcmpset_64) will
404*22ce4affSfengbojiang 	 * attempt to update the value of *p with newval if the comparison
405*22ce4affSfengbojiang 	 * succeeds.  Note that they'll exit regardless of whether the store
406*22ce4affSfengbojiang 	 * actually succeeded, leaving *cmpval untouched.  This is in line with
407*22ce4affSfengbojiang 	 * the documentation of atomic_fcmpset_<type>() in atomic(9) for ll/sc
408*22ce4affSfengbojiang 	 * architectures.
409*22ce4affSfengbojiang 	 */
410*22ce4affSfengbojiang 	__asm __volatile (
411*22ce4affSfengbojiang 		"ll	%0, %1\n\t"		/* load old value */
412*22ce4affSfengbojiang 		"bne	%0, %4, 1f\n\t"		/* compare */
413*22ce4affSfengbojiang 		"move	%0, %3\n\t"		/* value to store */
414*22ce4affSfengbojiang 		"sc	%0, %1\n\t"		/* attempt to store */
415*22ce4affSfengbojiang 		"j	2f\n\t"			/* exit regardless of success */
416*22ce4affSfengbojiang 		"nop\n\t"			/* avoid delay slot accident */
417*22ce4affSfengbojiang 		"1:\n\t"
418*22ce4affSfengbojiang 		"sw	%0, %2\n\t"		/* save old value */
419*22ce4affSfengbojiang 		"li	%0, 0\n\t"
420*22ce4affSfengbojiang 		"2:\n"
421*22ce4affSfengbojiang 		: "=&r" (ret), "+m" (*p), "=m" (*cmpval)
422*22ce4affSfengbojiang 		: "r" (newval), "r" (*cmpval)
423*22ce4affSfengbojiang 		: "memory");
424*22ce4affSfengbojiang 	return ret;
425a9643ea8Slogwang }
426a9643ea8Slogwang 
427*22ce4affSfengbojiang #define	ATOMIC_CMPSET_ACQ_REL(WIDTH)					\
428*22ce4affSfengbojiang static __inline  int							\
429*22ce4affSfengbojiang atomic_cmpset_acq_##WIDTH(__volatile uint##WIDTH##_t *p,		\
430*22ce4affSfengbojiang     uint##WIDTH##_t cmpval, uint##WIDTH##_t newval)			\
431*22ce4affSfengbojiang {									\
432*22ce4affSfengbojiang 	int retval;							\
433*22ce4affSfengbojiang 									\
434*22ce4affSfengbojiang 	retval = atomic_cmpset_##WIDTH(p, cmpval, newval);		\
435*22ce4affSfengbojiang 	mips_sync();							\
436*22ce4affSfengbojiang 	return (retval);						\
437*22ce4affSfengbojiang }									\
438*22ce4affSfengbojiang 									\
439*22ce4affSfengbojiang static __inline  int							\
440*22ce4affSfengbojiang atomic_cmpset_rel_##WIDTH(__volatile uint##WIDTH##_t *p,		\
441*22ce4affSfengbojiang     uint##WIDTH##_t cmpval, uint##WIDTH##_t newval)			\
442*22ce4affSfengbojiang {									\
443*22ce4affSfengbojiang 	mips_sync();							\
444*22ce4affSfengbojiang 	return (atomic_cmpset_##WIDTH(p, cmpval, newval));		\
445a9643ea8Slogwang }
446a9643ea8Slogwang 
447*22ce4affSfengbojiang #define	ATOMIC_FCMPSET_ACQ_REL(WIDTH)					\
448*22ce4affSfengbojiang static __inline  int							\
449*22ce4affSfengbojiang atomic_fcmpset_acq_##WIDTH(__volatile uint##WIDTH##_t *p,		\
450*22ce4affSfengbojiang     uint##WIDTH##_t *cmpval, uint##WIDTH##_t newval)			\
451*22ce4affSfengbojiang {									\
452*22ce4affSfengbojiang 	int retval;							\
453*22ce4affSfengbojiang 									\
454*22ce4affSfengbojiang 	retval = atomic_fcmpset_##WIDTH(p, cmpval, newval);		\
455*22ce4affSfengbojiang 	mips_sync();							\
456*22ce4affSfengbojiang 	return (retval);						\
457*22ce4affSfengbojiang }									\
458*22ce4affSfengbojiang 									\
459*22ce4affSfengbojiang static __inline  int							\
460*22ce4affSfengbojiang atomic_fcmpset_rel_##WIDTH(__volatile uint##WIDTH##_t *p,		\
461*22ce4affSfengbojiang     uint##WIDTH##_t *cmpval, uint##WIDTH##_t newval)			\
462*22ce4affSfengbojiang {									\
463*22ce4affSfengbojiang 	mips_sync();							\
464*22ce4affSfengbojiang 	return (atomic_fcmpset_##WIDTH(p, cmpval, newval));		\
465*22ce4affSfengbojiang }
466*22ce4affSfengbojiang 
467*22ce4affSfengbojiang /*
468*22ce4affSfengbojiang  * Atomically compare the value stored at *p with cmpval and if the
469*22ce4affSfengbojiang  * two values are equal, update the value of *p with newval. Returns
470*22ce4affSfengbojiang  * zero if the compare failed, nonzero otherwise.
471*22ce4affSfengbojiang  */
472*22ce4affSfengbojiang ATOMIC_CMPSET_ACQ_REL(8);
473*22ce4affSfengbojiang ATOMIC_CMPSET_ACQ_REL(16);
474*22ce4affSfengbojiang ATOMIC_CMPSET_ACQ_REL(32);
475*22ce4affSfengbojiang ATOMIC_FCMPSET_ACQ_REL(8);
476*22ce4affSfengbojiang ATOMIC_FCMPSET_ACQ_REL(16);
477*22ce4affSfengbojiang ATOMIC_FCMPSET_ACQ_REL(32);
478*22ce4affSfengbojiang 
479a9643ea8Slogwang /*
480a9643ea8Slogwang  * Atomically add the value of v to the integer pointed to by p and return
481a9643ea8Slogwang  * the previous value of *p.
482a9643ea8Slogwang  */
483a9643ea8Slogwang static __inline uint32_t
atomic_fetchadd_32(__volatile uint32_t * p,uint32_t v)484a9643ea8Slogwang atomic_fetchadd_32(__volatile uint32_t *p, uint32_t v)
485a9643ea8Slogwang {
486a9643ea8Slogwang 	uint32_t value, temp;
487a9643ea8Slogwang 
488a9643ea8Slogwang 	__asm __volatile (
489a9643ea8Slogwang 		"1:\tll %0, %1\n\t"		/* load old value */
490a9643ea8Slogwang 		"addu %2, %3, %0\n\t"		/* calculate new value */
491a9643ea8Slogwang 		"sc %2, %1\n\t"			/* attempt to store */
492a9643ea8Slogwang 		"beqz %2, 1b\n\t"		/* spin if failed */
493a9643ea8Slogwang 		: "=&r" (value), "=m" (*p), "=&r" (temp)
494a9643ea8Slogwang 		: "r" (v), "m" (*p));
495a9643ea8Slogwang 	return (value);
496a9643ea8Slogwang }
497a9643ea8Slogwang 
498a9643ea8Slogwang #if defined(__mips_n64) || defined(__mips_n32)
499a9643ea8Slogwang /*
500a9643ea8Slogwang  * Atomically compare the value stored at *p with cmpval and if the
501a9643ea8Slogwang  * two values are equal, update the value of *p with newval. Returns
502a9643ea8Slogwang  * zero if the compare failed, nonzero otherwise.
503a9643ea8Slogwang  */
504*22ce4affSfengbojiang static __inline int
atomic_cmpset_64(__volatile uint64_t * p,uint64_t cmpval,uint64_t newval)505a9643ea8Slogwang atomic_cmpset_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
506a9643ea8Slogwang {
507*22ce4affSfengbojiang 	int ret;
508a9643ea8Slogwang 
509a9643ea8Slogwang 	__asm __volatile (
510a9643ea8Slogwang 		"1:\n\t"
511a9643ea8Slogwang 		"lld	%0, %4\n\t"		/* load old value */
512a9643ea8Slogwang 		"bne	%0, %2, 2f\n\t"		/* compare */
513a9643ea8Slogwang 		"move	%0, %3\n\t"		/* value to store */
514a9643ea8Slogwang 		"scd	%0, %1\n\t"		/* attempt to store */
515a9643ea8Slogwang 		"beqz	%0, 1b\n\t"		/* if it failed, spin */
516a9643ea8Slogwang 		"j	3f\n\t"
517a9643ea8Slogwang 		"2:\n\t"
518a9643ea8Slogwang 		"li	%0, 0\n\t"
519a9643ea8Slogwang 		"3:\n"
520a9643ea8Slogwang 		: "=&r" (ret), "=m" (*p)
521a9643ea8Slogwang 		: "r" (cmpval), "r" (newval), "m" (*p)
522a9643ea8Slogwang 		: "memory");
523a9643ea8Slogwang 
524a9643ea8Slogwang 	return ret;
525a9643ea8Slogwang }
526a9643ea8Slogwang 
527*22ce4affSfengbojiang static __inline int
atomic_fcmpset_64(__volatile uint64_t * p,uint64_t * cmpval,uint64_t newval)528*22ce4affSfengbojiang atomic_fcmpset_64(__volatile uint64_t *p, uint64_t *cmpval, uint64_t newval)
529*22ce4affSfengbojiang {
530*22ce4affSfengbojiang         int ret;
531*22ce4affSfengbojiang 
532*22ce4affSfengbojiang         __asm __volatile (
533*22ce4affSfengbojiang 		"lld	%0, %1\n\t"		/* load old value */
534*22ce4affSfengbojiang                 "bne	%0, %4, 1f\n\t"		/* compare */
535*22ce4affSfengbojiang                 "move	%0, %3\n\t"		/* value to store */
536*22ce4affSfengbojiang                 "scd	%0, %1\n\t"		/* attempt to store */
537*22ce4affSfengbojiang 		"j	2f\n\t"			/* exit regardless of success */
538*22ce4affSfengbojiang 		"nop\n\t"			/* avoid delay slot accident */
539*22ce4affSfengbojiang                 "1:\n\t"
540*22ce4affSfengbojiang                 "sd	%0, %2\n\t"		/* save old value */
541*22ce4affSfengbojiang                 "li	%0, 0\n\t"
542*22ce4affSfengbojiang                 "2:\n"
543*22ce4affSfengbojiang                 : "=&r" (ret), "+m" (*p), "=m" (*cmpval)
544*22ce4affSfengbojiang                 : "r" (newval), "r" (*cmpval)
545*22ce4affSfengbojiang                 : "memory");
546*22ce4affSfengbojiang 
547*22ce4affSfengbojiang 	return ret;
548*22ce4affSfengbojiang }
549*22ce4affSfengbojiang 
550a9643ea8Slogwang /*
551a9643ea8Slogwang  * Atomically compare the value stored at *p with cmpval and if the
552a9643ea8Slogwang  * two values are equal, update the value of *p with newval. Returns
553a9643ea8Slogwang  * zero if the compare failed, nonzero otherwise.
554a9643ea8Slogwang  */
555*22ce4affSfengbojiang ATOMIC_CMPSET_ACQ_REL(64);
556*22ce4affSfengbojiang ATOMIC_FCMPSET_ACQ_REL(64);
557a9643ea8Slogwang 
558a9643ea8Slogwang /*
559a9643ea8Slogwang  * Atomically add the value of v to the integer pointed to by p and return
560a9643ea8Slogwang  * the previous value of *p.
561a9643ea8Slogwang  */
562a9643ea8Slogwang static __inline uint64_t
atomic_fetchadd_64(__volatile uint64_t * p,uint64_t v)563a9643ea8Slogwang atomic_fetchadd_64(__volatile uint64_t *p, uint64_t v)
564a9643ea8Slogwang {
565a9643ea8Slogwang 	uint64_t value, temp;
566a9643ea8Slogwang 
567a9643ea8Slogwang 	__asm __volatile (
568a9643ea8Slogwang 		"1:\n\t"
569a9643ea8Slogwang 		"lld	%0, %1\n\t"		/* load old value */
570a9643ea8Slogwang 		"daddu	%2, %3, %0\n\t"		/* calculate new value */
571a9643ea8Slogwang 		"scd	%2, %1\n\t"		/* attempt to store */
572a9643ea8Slogwang 		"beqz	%2, 1b\n\t"		/* spin if failed */
573a9643ea8Slogwang 		: "=&r" (value), "=m" (*p), "=&r" (temp)
574a9643ea8Slogwang 		: "r" (v), "m" (*p));
575a9643ea8Slogwang 	return (value);
576a9643ea8Slogwang }
577a9643ea8Slogwang #endif
578a9643ea8Slogwang 
579a9643ea8Slogwang static __inline void
atomic_thread_fence_acq(void)580a9643ea8Slogwang atomic_thread_fence_acq(void)
581a9643ea8Slogwang {
582a9643ea8Slogwang 
583a9643ea8Slogwang 	mips_sync();
584a9643ea8Slogwang }
585a9643ea8Slogwang 
586a9643ea8Slogwang static __inline void
atomic_thread_fence_rel(void)587a9643ea8Slogwang atomic_thread_fence_rel(void)
588a9643ea8Slogwang {
589a9643ea8Slogwang 
590a9643ea8Slogwang 	mips_sync();
591a9643ea8Slogwang }
592a9643ea8Slogwang 
593a9643ea8Slogwang static __inline void
atomic_thread_fence_acq_rel(void)594a9643ea8Slogwang atomic_thread_fence_acq_rel(void)
595a9643ea8Slogwang {
596a9643ea8Slogwang 
597a9643ea8Slogwang 	mips_sync();
598a9643ea8Slogwang }
599a9643ea8Slogwang 
600a9643ea8Slogwang static __inline void
atomic_thread_fence_seq_cst(void)601a9643ea8Slogwang atomic_thread_fence_seq_cst(void)
602a9643ea8Slogwang {
603a9643ea8Slogwang 
604a9643ea8Slogwang 	mips_sync();
605a9643ea8Slogwang }
606a9643ea8Slogwang 
607a9643ea8Slogwang /* Operations on chars. */
608a9643ea8Slogwang #define	atomic_set_char		atomic_set_8
609a9643ea8Slogwang #define	atomic_set_acq_char	atomic_set_acq_8
610a9643ea8Slogwang #define	atomic_set_rel_char	atomic_set_rel_8
611a9643ea8Slogwang #define	atomic_clear_char	atomic_clear_8
612a9643ea8Slogwang #define	atomic_clear_acq_char	atomic_clear_acq_8
613a9643ea8Slogwang #define	atomic_clear_rel_char	atomic_clear_rel_8
614a9643ea8Slogwang #define	atomic_add_char		atomic_add_8
615a9643ea8Slogwang #define	atomic_add_acq_char	atomic_add_acq_8
616a9643ea8Slogwang #define	atomic_add_rel_char	atomic_add_rel_8
617a9643ea8Slogwang #define	atomic_subtract_char	atomic_subtract_8
618a9643ea8Slogwang #define	atomic_subtract_acq_char	atomic_subtract_acq_8
619a9643ea8Slogwang #define	atomic_subtract_rel_char	atomic_subtract_rel_8
620*22ce4affSfengbojiang #define	atomic_cmpset_char	atomic_cmpset_8
621*22ce4affSfengbojiang #define	atomic_cmpset_acq_char	atomic_cmpset_acq_8
622*22ce4affSfengbojiang #define	atomic_cmpset_rel_char	atomic_cmpset_rel_8
623*22ce4affSfengbojiang #define	atomic_fcmpset_char	atomic_fcmpset_8
624*22ce4affSfengbojiang #define	atomic_fcmpset_acq_char	atomic_fcmpset_acq_8
625*22ce4affSfengbojiang #define	atomic_fcmpset_rel_char	atomic_fcmpset_rel_8
626a9643ea8Slogwang 
627a9643ea8Slogwang /* Operations on shorts. */
628a9643ea8Slogwang #define	atomic_set_short	atomic_set_16
629a9643ea8Slogwang #define	atomic_set_acq_short	atomic_set_acq_16
630a9643ea8Slogwang #define	atomic_set_rel_short	atomic_set_rel_16
631a9643ea8Slogwang #define	atomic_clear_short	atomic_clear_16
632a9643ea8Slogwang #define	atomic_clear_acq_short	atomic_clear_acq_16
633a9643ea8Slogwang #define	atomic_clear_rel_short	atomic_clear_rel_16
634a9643ea8Slogwang #define	atomic_add_short	atomic_add_16
635a9643ea8Slogwang #define	atomic_add_acq_short	atomic_add_acq_16
636a9643ea8Slogwang #define	atomic_add_rel_short	atomic_add_rel_16
637a9643ea8Slogwang #define	atomic_subtract_short	atomic_subtract_16
638a9643ea8Slogwang #define	atomic_subtract_acq_short	atomic_subtract_acq_16
639a9643ea8Slogwang #define	atomic_subtract_rel_short	atomic_subtract_rel_16
640*22ce4affSfengbojiang #define	atomic_cmpset_short	atomic_cmpset_16
641*22ce4affSfengbojiang #define	atomic_cmpset_acq_short	atomic_cmpset_acq_16
642*22ce4affSfengbojiang #define	atomic_cmpset_rel_short	atomic_cmpset_rel_16
643*22ce4affSfengbojiang #define	atomic_fcmpset_short	atomic_fcmpset_16
644*22ce4affSfengbojiang #define	atomic_fcmpset_acq_short	atomic_fcmpset_acq_16
645*22ce4affSfengbojiang #define	atomic_fcmpset_rel_short	atomic_fcmpset_rel_16
646a9643ea8Slogwang 
647a9643ea8Slogwang /* Operations on ints. */
648a9643ea8Slogwang #define	atomic_set_int		atomic_set_32
649a9643ea8Slogwang #define	atomic_set_acq_int	atomic_set_acq_32
650a9643ea8Slogwang #define	atomic_set_rel_int	atomic_set_rel_32
651a9643ea8Slogwang #define	atomic_clear_int	atomic_clear_32
652a9643ea8Slogwang #define	atomic_clear_acq_int	atomic_clear_acq_32
653a9643ea8Slogwang #define	atomic_clear_rel_int	atomic_clear_rel_32
654a9643ea8Slogwang #define	atomic_add_int		atomic_add_32
655a9643ea8Slogwang #define	atomic_add_acq_int	atomic_add_acq_32
656a9643ea8Slogwang #define	atomic_add_rel_int	atomic_add_rel_32
657a9643ea8Slogwang #define	atomic_subtract_int	atomic_subtract_32
658a9643ea8Slogwang #define	atomic_subtract_acq_int	atomic_subtract_acq_32
659a9643ea8Slogwang #define	atomic_subtract_rel_int	atomic_subtract_rel_32
660a9643ea8Slogwang #define	atomic_cmpset_int	atomic_cmpset_32
661a9643ea8Slogwang #define	atomic_cmpset_acq_int	atomic_cmpset_acq_32
662a9643ea8Slogwang #define	atomic_cmpset_rel_int	atomic_cmpset_rel_32
663*22ce4affSfengbojiang #define	atomic_fcmpset_int	atomic_fcmpset_32
664*22ce4affSfengbojiang #define	atomic_fcmpset_acq_int	atomic_fcmpset_acq_32
665*22ce4affSfengbojiang #define	atomic_fcmpset_rel_int	atomic_fcmpset_rel_32
666a9643ea8Slogwang #define	atomic_load_acq_int	atomic_load_acq_32
667a9643ea8Slogwang #define	atomic_store_rel_int	atomic_store_rel_32
668a9643ea8Slogwang #define	atomic_readandclear_int	atomic_readandclear_32
669a9643ea8Slogwang #define	atomic_readandset_int	atomic_readandset_32
670a9643ea8Slogwang #define	atomic_fetchadd_int	atomic_fetchadd_32
671a9643ea8Slogwang 
672a9643ea8Slogwang /*
673a9643ea8Slogwang  * I think the following is right, even for n32.  For n32 the pointers
674a9643ea8Slogwang  * are still 32-bits, so we need to operate on them as 32-bit quantities,
675a9643ea8Slogwang  * even though they are sign extended in operation.  For longs, there's
676a9643ea8Slogwang  * no question because they are always 32-bits.
677a9643ea8Slogwang  */
678a9643ea8Slogwang #ifdef __mips_n64
679a9643ea8Slogwang /* Operations on longs. */
680a9643ea8Slogwang #define	atomic_set_long		atomic_set_64
681a9643ea8Slogwang #define	atomic_set_acq_long	atomic_set_acq_64
682a9643ea8Slogwang #define	atomic_set_rel_long	atomic_set_rel_64
683a9643ea8Slogwang #define	atomic_clear_long	atomic_clear_64
684a9643ea8Slogwang #define	atomic_clear_acq_long	atomic_clear_acq_64
685a9643ea8Slogwang #define	atomic_clear_rel_long	atomic_clear_rel_64
686a9643ea8Slogwang #define	atomic_add_long		atomic_add_64
687a9643ea8Slogwang #define	atomic_add_acq_long	atomic_add_acq_64
688a9643ea8Slogwang #define	atomic_add_rel_long	atomic_add_rel_64
689a9643ea8Slogwang #define	atomic_subtract_long	atomic_subtract_64
690a9643ea8Slogwang #define	atomic_subtract_acq_long	atomic_subtract_acq_64
691a9643ea8Slogwang #define	atomic_subtract_rel_long	atomic_subtract_rel_64
692a9643ea8Slogwang #define	atomic_cmpset_long	atomic_cmpset_64
693a9643ea8Slogwang #define	atomic_cmpset_acq_long	atomic_cmpset_acq_64
694a9643ea8Slogwang #define	atomic_cmpset_rel_long	atomic_cmpset_rel_64
695*22ce4affSfengbojiang #define	atomic_fcmpset_long	atomic_fcmpset_64
696*22ce4affSfengbojiang #define	atomic_fcmpset_acq_long	atomic_fcmpset_acq_64
697*22ce4affSfengbojiang #define	atomic_fcmpset_rel_long	atomic_fcmpset_rel_64
698a9643ea8Slogwang #define	atomic_load_acq_long	atomic_load_acq_64
699a9643ea8Slogwang #define	atomic_store_rel_long	atomic_store_rel_64
700a9643ea8Slogwang #define	atomic_fetchadd_long	atomic_fetchadd_64
701a9643ea8Slogwang #define	atomic_readandclear_long	atomic_readandclear_64
702a9643ea8Slogwang 
703a9643ea8Slogwang #else /* !__mips_n64 */
704a9643ea8Slogwang 
705a9643ea8Slogwang /* Operations on longs. */
706a9643ea8Slogwang #define	atomic_set_long(p, v)						\
707a9643ea8Slogwang 	atomic_set_32((volatile u_int *)(p), (u_int)(v))
708a9643ea8Slogwang #define	atomic_set_acq_long(p, v)					\
709a9643ea8Slogwang 	atomic_set_acq_32((volatile u_int *)(p), (u_int)(v))
710a9643ea8Slogwang #define	atomic_set_rel_long(p, v)					\
711a9643ea8Slogwang 	atomic_set_rel_32((volatile u_int *)(p), (u_int)(v))
712a9643ea8Slogwang #define	atomic_clear_long(p, v)						\
713a9643ea8Slogwang 	atomic_clear_32((volatile u_int *)(p), (u_int)(v))
714a9643ea8Slogwang #define	atomic_clear_acq_long(p, v)					\
715a9643ea8Slogwang 	atomic_clear_acq_32((volatile u_int *)(p), (u_int)(v))
716a9643ea8Slogwang #define	atomic_clear_rel_long(p, v)					\
717a9643ea8Slogwang 	atomic_clear_rel_32((volatile u_int *)(p), (u_int)(v))
718a9643ea8Slogwang #define	atomic_add_long(p, v)						\
719a9643ea8Slogwang 	atomic_add_32((volatile u_int *)(p), (u_int)(v))
720a9643ea8Slogwang #define	atomic_add_acq_long(p, v)					\
721a9643ea8Slogwang 	atomic_add_32((volatile u_int *)(p), (u_int)(v))
722a9643ea8Slogwang #define	atomic_add_rel_long(p, v)					\
723a9643ea8Slogwang 	atomic_add_32((volatile u_int *)(p), (u_int)(v))
724a9643ea8Slogwang #define	atomic_subtract_long(p, v)					\
725a9643ea8Slogwang 	atomic_subtract_32((volatile u_int *)(p), (u_int)(v))
726a9643ea8Slogwang #define	atomic_subtract_acq_long(p, v)					\
727a9643ea8Slogwang 	atomic_subtract_acq_32((volatile u_int *)(p), (u_int)(v))
728a9643ea8Slogwang #define	atomic_subtract_rel_long(p, v)					\
729a9643ea8Slogwang 	atomic_subtract_rel_32((volatile u_int *)(p), (u_int)(v))
730a9643ea8Slogwang #define	atomic_cmpset_long(p, cmpval, newval)				\
731a9643ea8Slogwang 	atomic_cmpset_32((volatile u_int *)(p), (u_int)(cmpval),	\
732a9643ea8Slogwang 	    (u_int)(newval))
733a9643ea8Slogwang #define	atomic_cmpset_acq_long(p, cmpval, newval)			\
734a9643ea8Slogwang 	atomic_cmpset_acq_32((volatile u_int *)(p), (u_int)(cmpval),	\
735a9643ea8Slogwang 	    (u_int)(newval))
736a9643ea8Slogwang #define	atomic_cmpset_rel_long(p, cmpval, newval)			\
737a9643ea8Slogwang 	atomic_cmpset_rel_32((volatile u_int *)(p), (u_int)(cmpval),	\
738a9643ea8Slogwang 	    (u_int)(newval))
739*22ce4affSfengbojiang #define	atomic_fcmpset_long(p, cmpval, newval)				\
740*22ce4affSfengbojiang 	atomic_fcmpset_32((volatile u_int *)(p), (u_int *)(cmpval),	\
741*22ce4affSfengbojiang 	    (u_int)(newval))
742*22ce4affSfengbojiang #define	atomic_fcmpset_acq_long(p, cmpval, newval)			\
743*22ce4affSfengbojiang 	atomic_fcmpset_acq_32((volatile u_int *)(p), (u_int *)(cmpval),	\
744*22ce4affSfengbojiang 	    (u_int)(newval))
745*22ce4affSfengbojiang #define	atomic_fcmpset_rel_long(p, cmpval, newval)			\
746*22ce4affSfengbojiang 	atomic_fcmpset_rel_32((volatile u_int *)(p), (u_int *)(cmpval),	\
747*22ce4affSfengbojiang 	    (u_int)(newval))
748a9643ea8Slogwang #define	atomic_load_acq_long(p)						\
749a9643ea8Slogwang 	(u_long)atomic_load_acq_32((volatile u_int *)(p))
750a9643ea8Slogwang #define	atomic_store_rel_long(p, v)					\
751a9643ea8Slogwang 	atomic_store_rel_32((volatile u_int *)(p), (u_int)(v))
752a9643ea8Slogwang #define	atomic_fetchadd_long(p, v)					\
753a9643ea8Slogwang 	atomic_fetchadd_32((volatile u_int *)(p), (u_int)(v))
754a9643ea8Slogwang #define	atomic_readandclear_long(p)					\
755a9643ea8Slogwang 	atomic_readandclear_32((volatile u_int *)(p))
756a9643ea8Slogwang 
757a9643ea8Slogwang #endif /* __mips_n64 */
758a9643ea8Slogwang 
759a9643ea8Slogwang /* Operations on pointers. */
760a9643ea8Slogwang #define	atomic_set_ptr		atomic_set_long
761a9643ea8Slogwang #define	atomic_set_acq_ptr	atomic_set_acq_long
762a9643ea8Slogwang #define	atomic_set_rel_ptr	atomic_set_rel_long
763a9643ea8Slogwang #define	atomic_clear_ptr	atomic_clear_long
764a9643ea8Slogwang #define	atomic_clear_acq_ptr	atomic_clear_acq_long
765a9643ea8Slogwang #define	atomic_clear_rel_ptr	atomic_clear_rel_long
766a9643ea8Slogwang #define	atomic_add_ptr		atomic_add_long
767a9643ea8Slogwang #define	atomic_add_acq_ptr	atomic_add_acq_long
768a9643ea8Slogwang #define	atomic_add_rel_ptr	atomic_add_rel_long
769a9643ea8Slogwang #define	atomic_subtract_ptr	atomic_subtract_long
770a9643ea8Slogwang #define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
771a9643ea8Slogwang #define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
772a9643ea8Slogwang #define	atomic_cmpset_ptr	atomic_cmpset_long
773a9643ea8Slogwang #define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
774a9643ea8Slogwang #define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
775*22ce4affSfengbojiang #define	atomic_fcmpset_ptr	atomic_fcmpset_long
776*22ce4affSfengbojiang #define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_long
777*22ce4affSfengbojiang #define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_long
778a9643ea8Slogwang #define	atomic_load_acq_ptr	atomic_load_acq_long
779a9643ea8Slogwang #define	atomic_store_rel_ptr	atomic_store_rel_long
780a9643ea8Slogwang #define	atomic_readandclear_ptr	atomic_readandclear_long
781a9643ea8Slogwang 
782*22ce4affSfengbojiang static __inline unsigned int
atomic_swap_int(volatile unsigned int * ptr,const unsigned int value)783*22ce4affSfengbojiang atomic_swap_int(volatile unsigned int *ptr, const unsigned int value)
784*22ce4affSfengbojiang {
785*22ce4affSfengbojiang 	unsigned int retval;
786*22ce4affSfengbojiang 
787*22ce4affSfengbojiang 	retval = *ptr;
788*22ce4affSfengbojiang 
789*22ce4affSfengbojiang 	while (!atomic_fcmpset_int(ptr, &retval, value))
790*22ce4affSfengbojiang 		;
791*22ce4affSfengbojiang 	return (retval);
792*22ce4affSfengbojiang }
793*22ce4affSfengbojiang 
794*22ce4affSfengbojiang static __inline uint32_t
atomic_swap_32(volatile uint32_t * ptr,const uint32_t value)795*22ce4affSfengbojiang atomic_swap_32(volatile uint32_t *ptr, const uint32_t value)
796*22ce4affSfengbojiang {
797*22ce4affSfengbojiang 	uint32_t retval;
798*22ce4affSfengbojiang 
799*22ce4affSfengbojiang 	retval = *ptr;
800*22ce4affSfengbojiang 
801*22ce4affSfengbojiang 	while (!atomic_fcmpset_32(ptr, &retval, value))
802*22ce4affSfengbojiang 		;
803*22ce4affSfengbojiang 	return (retval);
804*22ce4affSfengbojiang }
805*22ce4affSfengbojiang 
806*22ce4affSfengbojiang #if defined(__mips_n64) || defined(__mips_n32)
807*22ce4affSfengbojiang static __inline uint64_t
atomic_swap_64(volatile uint64_t * ptr,const uint64_t value)808*22ce4affSfengbojiang atomic_swap_64(volatile uint64_t *ptr, const uint64_t value)
809*22ce4affSfengbojiang {
810*22ce4affSfengbojiang 	uint64_t retval;
811*22ce4affSfengbojiang 
812*22ce4affSfengbojiang 	retval = *ptr;
813*22ce4affSfengbojiang 
814*22ce4affSfengbojiang 	while (!atomic_fcmpset_64(ptr, &retval, value))
815*22ce4affSfengbojiang 		;
816*22ce4affSfengbojiang 	return (retval);
817*22ce4affSfengbojiang }
818*22ce4affSfengbojiang #endif
819*22ce4affSfengbojiang 
820*22ce4affSfengbojiang #ifdef __mips_n64
821*22ce4affSfengbojiang static __inline unsigned long
atomic_swap_long(volatile unsigned long * ptr,const unsigned long value)822*22ce4affSfengbojiang atomic_swap_long(volatile unsigned long *ptr, const unsigned long value)
823*22ce4affSfengbojiang {
824*22ce4affSfengbojiang 	unsigned long retval;
825*22ce4affSfengbojiang 
826*22ce4affSfengbojiang 	retval = *ptr;
827*22ce4affSfengbojiang 
828*22ce4affSfengbojiang 	while (!atomic_fcmpset_64((volatile uint64_t *)ptr,
829*22ce4affSfengbojiang 	    (uint64_t *)&retval, value))
830*22ce4affSfengbojiang 		;
831*22ce4affSfengbojiang 	return (retval);
832*22ce4affSfengbojiang }
833*22ce4affSfengbojiang #else
834*22ce4affSfengbojiang static __inline unsigned long
atomic_swap_long(volatile unsigned long * ptr,const unsigned long value)835*22ce4affSfengbojiang atomic_swap_long(volatile unsigned long *ptr, const unsigned long value)
836*22ce4affSfengbojiang {
837*22ce4affSfengbojiang 	unsigned long retval;
838*22ce4affSfengbojiang 
839*22ce4affSfengbojiang 	retval = *ptr;
840*22ce4affSfengbojiang 
841*22ce4affSfengbojiang 	while (!atomic_fcmpset_32((volatile uint32_t *)ptr,
842*22ce4affSfengbojiang 	    (uint32_t *)&retval, value))
843*22ce4affSfengbojiang 		;
844*22ce4affSfengbojiang 	return (retval);
845*22ce4affSfengbojiang }
846*22ce4affSfengbojiang #endif
847*22ce4affSfengbojiang #define	atomic_swap_ptr(ptr, value) atomic_swap_long((unsigned long *)(ptr), value)
848*22ce4affSfengbojiang 
849*22ce4affSfengbojiang #include <sys/_atomic_subword.h>
850*22ce4affSfengbojiang 
851a9643ea8Slogwang #endif /* ! _MACHINE_ATOMIC_H_ */
852