1/*
2 * CDDL HEADER START
3 *
4 * The contents of this file are subject to the terms of the
5 * Common Development and Distribution License (the "License").
6 * You may not use this file except in compliance with the License.
7 *
8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9 * or http://www.opensolaris.org/os/licensing.
10 * See the License for the specific language governing permissions
11 * and limitations under the License.
12 *
13 * When distributing Covered Code, include this CDDL HEADER in each
14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15 * If applicable, add the following below this CDDL HEADER, with the
16 * fields enclosed by brackets "[]" replaced with your own identifying
17 * information: Portions Copyright [yyyy] [name of copyright owner]
18 *
19 * CDDL HEADER END
20 */
21/*
22 * Copyright 2007 Sun Microsystems, Inc.  All rights reserved.
23 * Use is subject to license terms.
24 */
25
26	.ident	"%Z%%M%	%I%	%E% SMI"
27
28	.file	"%M%"
29
30#define _ASM
31#ifdef __linux__
32#include <ia32/sys/asm_linkage.h>
33#elif __FreeBSD__
34#include <machine/asmacros.h>
35#define SET_SIZE(x)
36#endif
37	ENTRY(atomic_inc_8)
38	ALTENTRY(atomic_inc_uchar)
39	movl	4(%esp), %eax
40	lock
41	incb	(%eax)
42	ret
43	SET_SIZE(atomic_inc_uchar)
44	SET_SIZE(atomic_inc_8)
45
46	ENTRY(atomic_inc_16)
47	ALTENTRY(atomic_inc_ushort)
48	movl	4(%esp), %eax
49	lock
50	incw	(%eax)
51	ret
52	SET_SIZE(atomic_inc_ushort)
53	SET_SIZE(atomic_inc_16)
54
55	ENTRY(atomic_inc_32)
56	ALTENTRY(atomic_inc_uint)
57	ALTENTRY(atomic_inc_ulong)
58	movl	4(%esp), %eax
59	lock
60	incl	(%eax)
61	ret
62	SET_SIZE(atomic_inc_ulong)
63	SET_SIZE(atomic_inc_uint)
64	SET_SIZE(atomic_inc_32)
65
66	ENTRY(atomic_inc_8_nv)
67	ALTENTRY(atomic_inc_uchar_nv)
68	movl	4(%esp), %edx
69	movb	(%edx), %al
701:
71	leal	1(%eax), %ecx
72	lock
73	cmpxchgb %cl, (%edx)
74	jne	1b
75	movzbl	%cl, %eax
76	ret
77	SET_SIZE(atomic_inc_uchar_nv)
78	SET_SIZE(atomic_inc_8_nv)
79
80	ENTRY(atomic_inc_16_nv)
81	ALTENTRY(atomic_inc_ushort_nv)
82	movl	4(%esp), %edx
83	movw	(%edx), %ax
841:
85	leal	1(%eax), %ecx
86	lock
87	cmpxchgw %cx, (%edx)
88	jne	1b
89	movzwl	%cx, %eax
90	ret
91	SET_SIZE(atomic_inc_ushort_nv)
92	SET_SIZE(atomic_inc_16_nv)
93
94	ENTRY(atomic_inc_32_nv)
95	ALTENTRY(atomic_inc_uint_nv)
96	ALTENTRY(atomic_inc_ulong_nv)
97	movl	4(%esp), %edx
98	movl	(%edx), %eax
991:
100	leal	1(%eax), %ecx
101	lock
102	cmpxchgl %ecx, (%edx)
103	jne	1b
104	movl	%ecx, %eax
105	ret
106	SET_SIZE(atomic_inc_ulong_nv)
107	SET_SIZE(atomic_inc_uint_nv)
108	SET_SIZE(atomic_inc_32_nv)
109
110	/*
111	 * NOTE: If atomic_inc_64 and atomic_inc_64_nv are ever
112	 * separated, you need to also edit the libc i386 platform
113	 * specific mapfile and remove the NODYNSORT attribute
114	 * from atomic_inc_64_nv.
115	 */
116	ENTRY(atomic_inc_64)
117	ALTENTRY(atomic_inc_64_nv)
118	pushl	%edi
119	pushl	%ebx
120	movl	12(%esp), %edi
121	movl	(%edi), %eax
122	movl	4(%edi), %edx
1231:
124	xorl	%ebx, %ebx
125	xorl	%ecx, %ecx
126	incl	%ebx
127	addl	%eax, %ebx
128	adcl	%edx, %ecx
129	lock
130	cmpxchg8b (%edi)
131	jne	1b
132	movl	%ebx, %eax
133	movl	%ecx, %edx
134	popl	%ebx
135	popl	%edi
136	ret
137	SET_SIZE(atomic_inc_64_nv)
138	SET_SIZE(atomic_inc_64)
139
140	ENTRY(atomic_dec_8)
141	ALTENTRY(atomic_dec_uchar)
142	movl	4(%esp), %eax
143	lock
144	decb	(%eax)
145	ret
146	SET_SIZE(atomic_dec_uchar)
147	SET_SIZE(atomic_dec_8)
148
149	ENTRY(atomic_dec_16)
150	ALTENTRY(atomic_dec_ushort)
151	movl	4(%esp), %eax
152	lock
153	decw	(%eax)
154	ret
155	SET_SIZE(atomic_dec_ushort)
156	SET_SIZE(atomic_dec_16)
157
158	ENTRY(atomic_dec_32)
159	ALTENTRY(atomic_dec_uint)
160	ALTENTRY(atomic_dec_ulong)
161	movl	4(%esp), %eax
162	lock
163	decl	(%eax)
164	ret
165	SET_SIZE(atomic_dec_ulong)
166	SET_SIZE(atomic_dec_uint)
167	SET_SIZE(atomic_dec_32)
168
169	ENTRY(atomic_dec_8_nv)
170	ALTENTRY(atomic_dec_uchar_nv)
171	movl	4(%esp), %edx
172	movb	(%edx), %al
1731:
174	leal	-1(%eax), %ecx
175	lock
176	cmpxchgb %cl, (%edx)
177	jne	1b
178	movzbl	%cl, %eax
179	ret
180	SET_SIZE(atomic_dec_uchar_nv)
181	SET_SIZE(atomic_dec_8_nv)
182
183	ENTRY(atomic_dec_16_nv)
184	ALTENTRY(atomic_dec_ushort_nv)
185	movl	4(%esp), %edx
186	movw	(%edx), %ax
1871:
188	leal	-1(%eax), %ecx
189	lock
190	cmpxchgw %cx, (%edx)
191	jne	1b
192	movzwl	%cx, %eax
193	ret
194	SET_SIZE(atomic_dec_ushort_nv)
195	SET_SIZE(atomic_dec_16_nv)
196
197	ENTRY(atomic_dec_32_nv)
198	ALTENTRY(atomic_dec_uint_nv)
199	ALTENTRY(atomic_dec_ulong_nv)
200	movl	4(%esp), %edx
201	movl	(%edx), %eax
2021:
203	leal	-1(%eax), %ecx
204	lock
205	cmpxchgl %ecx, (%edx)
206	jne	1b
207	movl	%ecx, %eax
208	ret
209	SET_SIZE(atomic_dec_ulong_nv)
210	SET_SIZE(atomic_dec_uint_nv)
211	SET_SIZE(atomic_dec_32_nv)
212
213	/*
214	 * NOTE: If atomic_dec_64 and atomic_dec_64_nv are ever
215	 * separated, it is important to edit the libc i386 platform
216	 * specific mapfile and remove the NODYNSORT attribute
217	 * from atomic_dec_64_nv.
218	 */
219	ENTRY(atomic_dec_64)
220	ALTENTRY(atomic_dec_64_nv)
221	pushl	%edi
222	pushl	%ebx
223	movl	12(%esp), %edi
224	movl	(%edi), %eax
225	movl	4(%edi), %edx
2261:
227	xorl	%ebx, %ebx
228	xorl	%ecx, %ecx
229	not	%ecx
230	not	%ebx
231	addl	%eax, %ebx
232	adcl	%edx, %ecx
233	lock
234	cmpxchg8b (%edi)
235	jne	1b
236	movl	%ebx, %eax
237	movl	%ecx, %edx
238	popl	%ebx
239	popl	%edi
240	ret
241	SET_SIZE(atomic_dec_64_nv)
242	SET_SIZE(atomic_dec_64)
243
244	ENTRY(atomic_add_8)
245	ALTENTRY(atomic_add_char)
246	movl	4(%esp), %eax
247	movl	8(%esp), %ecx
248	lock
249	addb	%cl, (%eax)
250	ret
251	SET_SIZE(atomic_add_char)
252	SET_SIZE(atomic_add_8)
253
254	ENTRY(atomic_add_16)
255	ALTENTRY(atomic_add_short)
256	movl	4(%esp), %eax
257	movl	8(%esp), %ecx
258	lock
259	addw	%cx, (%eax)
260	ret
261	SET_SIZE(atomic_add_short)
262	SET_SIZE(atomic_add_16)
263
264	ENTRY(atomic_add_32)
265	ALTENTRY(atomic_add_int)
266	ALTENTRY(atomic_add_ptr)
267	ALTENTRY(atomic_add_long)
268	movl	4(%esp), %eax
269	movl	8(%esp), %ecx
270	lock
271	addl	%ecx, (%eax)
272	ret
273	SET_SIZE(atomic_add_long)
274	SET_SIZE(atomic_add_ptr)
275	SET_SIZE(atomic_add_int)
276	SET_SIZE(atomic_add_32)
277
278	ENTRY(atomic_sub_8)
279	ALTENTRY(atomic_sub_char)
280	movl	4(%esp), %eax
281	movl	8(%esp), %ecx
282	lock
283	subb	%cl, (%eax)
284	ret
285	SET_SIZE(atomic_sub_char)
286	SET_SIZE(atomic_sub_8)
287
288	ENTRY(atomic_sub_16)
289	ALTENTRY(atomic_sub_short)
290	movl	4(%esp), %eax
291	movl	8(%esp), %ecx
292	lock
293	subw	%cx, (%eax)
294	ret
295	SET_SIZE(atomic_sub_short)
296	SET_SIZE(atomic_sub_16)
297
298	ENTRY(atomic_sub_32)
299	ALTENTRY(atomic_sub_int)
300	ALTENTRY(atomic_sub_ptr)
301	ALTENTRY(atomic_sub_long)
302	movl	4(%esp), %eax
303	movl	8(%esp), %ecx
304	lock
305	subl	%ecx, (%eax)
306	ret
307	SET_SIZE(atomic_sub_long)
308	SET_SIZE(atomic_sub_ptr)
309	SET_SIZE(atomic_sub_int)
310	SET_SIZE(atomic_sub_32)
311
312	ENTRY(atomic_or_8)
313	ALTENTRY(atomic_or_uchar)
314	movl	4(%esp), %eax
315	movb	8(%esp), %cl
316	lock
317	orb	%cl, (%eax)
318	ret
319	SET_SIZE(atomic_or_uchar)
320	SET_SIZE(atomic_or_8)
321
322	ENTRY(atomic_or_16)
323	ALTENTRY(atomic_or_ushort)
324	movl	4(%esp), %eax
325	movw	8(%esp), %cx
326	lock
327	orw	%cx, (%eax)
328	ret
329	SET_SIZE(atomic_or_ushort)
330	SET_SIZE(atomic_or_16)
331
332	ENTRY(atomic_or_32)
333	ALTENTRY(atomic_or_uint)
334	ALTENTRY(atomic_or_ulong)
335	movl	4(%esp), %eax
336	movl	8(%esp), %ecx
337	lock
338	orl	%ecx, (%eax)
339	ret
340	SET_SIZE(atomic_or_ulong)
341	SET_SIZE(atomic_or_uint)
342	SET_SIZE(atomic_or_32)
343
344	ENTRY(atomic_and_8)
345	ALTENTRY(atomic_and_uchar)
346	movl	4(%esp), %eax
347	movb	8(%esp), %cl
348	lock
349	andb	%cl, (%eax)
350	ret
351	SET_SIZE(atomic_and_uchar)
352	SET_SIZE(atomic_and_8)
353
354	ENTRY(atomic_and_16)
355	ALTENTRY(atomic_and_ushort)
356	movl	4(%esp), %eax
357	movw	8(%esp), %cx
358	lock
359	andw	%cx, (%eax)
360	ret
361	SET_SIZE(atomic_and_ushort)
362	SET_SIZE(atomic_and_16)
363
364	ENTRY(atomic_and_32)
365	ALTENTRY(atomic_and_uint)
366	ALTENTRY(atomic_and_ulong)
367	movl	4(%esp), %eax
368	movl	8(%esp), %ecx
369	lock
370	andl	%ecx, (%eax)
371	ret
372	SET_SIZE(atomic_and_ulong)
373	SET_SIZE(atomic_and_uint)
374	SET_SIZE(atomic_and_32)
375
376	ENTRY(atomic_add_8_nv)
377	ALTENTRY(atomic_add_char_nv)
378	movl	4(%esp), %edx
379	movb	(%edx), %al
3801:
381	movl	8(%esp), %ecx
382	addb	%al, %cl
383	lock
384	cmpxchgb %cl, (%edx)
385	jne	1b
386	movzbl	%cl, %eax
387	ret
388	SET_SIZE(atomic_add_char_nv)
389	SET_SIZE(atomic_add_8_nv)
390
391	ENTRY(atomic_add_16_nv)
392	ALTENTRY(atomic_add_short_nv)
393	movl	4(%esp), %edx
394	movw	(%edx), %ax
3951:
396	movl	8(%esp), %ecx
397	addw	%ax, %cx
398	lock
399	cmpxchgw %cx, (%edx)
400	jne	1b
401	movzwl	%cx, %eax
402	ret
403	SET_SIZE(atomic_add_short_nv)
404	SET_SIZE(atomic_add_16_nv)
405
406	ENTRY(atomic_add_32_nv)
407	ALTENTRY(atomic_add_int_nv)
408	ALTENTRY(atomic_add_ptr_nv)
409	ALTENTRY(atomic_add_long_nv)
410	movl	4(%esp), %edx
411	movl	(%edx), %eax
4121:
413	movl	8(%esp), %ecx
414	addl	%eax, %ecx
415	lock
416	cmpxchgl %ecx, (%edx)
417	jne	1b
418	movl	%ecx, %eax
419	ret
420	SET_SIZE(atomic_add_long_nv)
421	SET_SIZE(atomic_add_ptr_nv)
422	SET_SIZE(atomic_add_int_nv)
423	SET_SIZE(atomic_add_32_nv)
424
425	ENTRY(atomic_sub_8_nv)
426	ALTENTRY(atomic_sub_char_nv)
427	movl	4(%esp), %edx
428	movb	(%edx), %al
4291:
430	movl	8(%esp), %ecx
431	subb	%al, %cl
432	lock
433	cmpxchgb %cl, (%edx)
434	jne	1b
435	movzbl	%cl, %eax
436	ret
437	SET_SIZE(atomic_sub_char_nv)
438	SET_SIZE(atomic_sub_8_nv)
439
440	ENTRY(atomic_sub_16_nv)
441	ALTENTRY(atomic_sub_short_nv)
442	movl	4(%esp), %edx
443	movw	(%edx), %ax
4441:
445	movl	8(%esp), %ecx
446	subw	%ax, %cx
447	lock
448	cmpxchgw %cx, (%edx)
449	jne	1b
450	movzwl	%cx, %eax
451	ret
452	SET_SIZE(atomic_sub_short_nv)
453	SET_SIZE(atomic_sub_16_nv)
454
455	ENTRY(atomic_sub_32_nv)
456	ALTENTRY(atomic_sub_int_nv)
457	ALTENTRY(atomic_sub_ptr_nv)
458	ALTENTRY(atomic_sub_long_nv)
459	movl	4(%esp), %edx
460	movl	(%edx), %eax
4611:
462	movl	8(%esp), %ecx
463	subl	%eax, %ecx
464	lock
465	cmpxchgl %ecx, (%edx)
466	jne	1b
467	movl	%ecx, %eax
468	ret
469	SET_SIZE(atomic_sub_long_nv)
470	SET_SIZE(atomic_sub_ptr_nv)
471	SET_SIZE(atomic_sub_int_nv)
472	SET_SIZE(atomic_sub_32_nv)
473
474	/*
475	 * NOTE: If atomic_add_64 and atomic_add_64_nv are ever
476	 * separated, it is important to edit the libc i386 platform
477	 * specific mapfile and remove the NODYNSORT attribute
478	 * from atomic_add_64_nv.
479	 */
480	ENTRY(atomic_add_64)
481	ALTENTRY(atomic_add_64_nv)
482	pushl	%edi
483	pushl	%ebx
484	movl	12(%esp), %edi
485	movl	(%edi), %eax
486	movl	4(%edi), %edx
4871:
488	movl	16(%esp), %ebx
489	movl	20(%esp), %ecx
490	addl	%eax, %ebx
491	adcl	%edx, %ecx
492	lock
493	cmpxchg8b (%edi)
494	jne	1b
495	movl	%ebx, %eax
496	movl	%ecx, %edx
497	popl	%ebx
498	popl	%edi
499	ret
500	SET_SIZE(atomic_add_64_nv)
501	SET_SIZE(atomic_add_64)
502
503	ENTRY(atomic_sub_64)
504	ALTENTRY(atomic_sub_64_nv)
505	pushl	%edi
506	pushl	%ebx
507	movl	12(%esp), %edi
508	movl	(%edi), %eax
509	movl	4(%edi), %edx
5101:
511	movl	16(%esp), %ebx
512	movl	20(%esp), %ecx
513	subl	%eax, %ebx
514	sbbl	%edx, %ecx
515	lock
516	cmpxchg8b (%edi)
517	jne	1b
518	movl	%ebx, %eax
519	movl	%ecx, %edx
520	popl	%ebx
521	popl	%edi
522	ret
523	SET_SIZE(atomic_sub_64_nv)
524	SET_SIZE(atomic_sub_64)
525
526	ENTRY(atomic_or_8_nv)
527	ALTENTRY(atomic_or_uchar_nv)
528	movl	4(%esp), %edx
529	movb	(%edx), %al
5301:
531	movl	8(%esp), %ecx
532	orb	%al, %cl
533	lock
534	cmpxchgb %cl, (%edx)
535	jne	1b
536	movzbl	%cl, %eax
537	ret
538	SET_SIZE(atomic_or_uchar_nv)
539	SET_SIZE(atomic_or_8_nv)
540
541	ENTRY(atomic_or_16_nv)
542	ALTENTRY(atomic_or_ushort_nv)
543	movl	4(%esp), %edx
544	movw	(%edx), %ax
5451:
546	movl	8(%esp), %ecx
547	orw	%ax, %cx
548	lock
549	cmpxchgw %cx, (%edx)
550	jne	1b
551	movzwl	%cx, %eax
552	ret
553	SET_SIZE(atomic_or_ushort_nv)
554	SET_SIZE(atomic_or_16_nv)
555
556	ENTRY(atomic_or_32_nv)
557	ALTENTRY(atomic_or_uint_nv)
558	ALTENTRY(atomic_or_ulong_nv)
559	movl	4(%esp), %edx
560	movl	(%edx), %eax
5611:
562	movl	8(%esp), %ecx
563	orl	%eax, %ecx
564	lock
565	cmpxchgl %ecx, (%edx)
566	jne	1b
567	movl	%ecx, %eax
568	ret
569	SET_SIZE(atomic_or_ulong_nv)
570	SET_SIZE(atomic_or_uint_nv)
571	SET_SIZE(atomic_or_32_nv)
572
573	/*
574	 * NOTE: If atomic_or_64 and atomic_or_64_nv are ever
575	 * separated, it is important to edit the libc i386 platform
576	 * specific mapfile and remove the NODYNSORT attribute
577	 * from atomic_or_64_nv.
578	 */
579	ENTRY(atomic_or_64)
580	ALTENTRY(atomic_or_64_nv)
581	pushl	%edi
582	pushl	%ebx
583	movl	12(%esp), %edi
584	movl	(%edi), %eax
585	movl	4(%edi), %edx
5861:
587	movl	16(%esp), %ebx
588	movl	20(%esp), %ecx
589	orl	%eax, %ebx
590	orl	%edx, %ecx
591	lock
592	cmpxchg8b (%edi)
593	jne	1b
594	movl	%ebx, %eax
595	movl	%ecx, %edx
596	popl	%ebx
597	popl	%edi
598	ret
599	SET_SIZE(atomic_or_64_nv)
600	SET_SIZE(atomic_or_64)
601
602	ENTRY(atomic_and_8_nv)
603	ALTENTRY(atomic_and_uchar_nv)
604	movl	4(%esp), %edx
605	movb	(%edx), %al
6061:
607	movl	8(%esp), %ecx
608	andb	%al, %cl
609	lock
610	cmpxchgb %cl, (%edx)
611	jne	1b
612	movzbl	%cl, %eax
613	ret
614	SET_SIZE(atomic_and_uchar_nv)
615	SET_SIZE(atomic_and_8_nv)
616
617	ENTRY(atomic_and_16_nv)
618	ALTENTRY(atomic_and_ushort_nv)
619	movl	4(%esp), %edx
620	movw	(%edx), %ax
6211:
622	movl	8(%esp), %ecx
623	andw	%ax, %cx
624	lock
625	cmpxchgw %cx, (%edx)
626	jne	1b
627	movzwl	%cx, %eax
628	ret
629	SET_SIZE(atomic_and_ushort_nv)
630	SET_SIZE(atomic_and_16_nv)
631
632	ENTRY(atomic_and_32_nv)
633	ALTENTRY(atomic_and_uint_nv)
634	ALTENTRY(atomic_and_ulong_nv)
635	movl	4(%esp), %edx
636	movl	(%edx), %eax
6371:
638	movl	8(%esp), %ecx
639	andl	%eax, %ecx
640	lock
641	cmpxchgl %ecx, (%edx)
642	jne	1b
643	movl	%ecx, %eax
644	ret
645	SET_SIZE(atomic_and_ulong_nv)
646	SET_SIZE(atomic_and_uint_nv)
647	SET_SIZE(atomic_and_32_nv)
648
649	/*
650	 * NOTE: If atomic_and_64 and atomic_and_64_nv are ever
651	 * separated, it is important to edit the libc i386 platform
652	 * specific mapfile and remove the NODYNSORT attribute
653	 * from atomic_and_64_nv.
654	 */
655	ENTRY(atomic_and_64)
656	ALTENTRY(atomic_and_64_nv)
657	pushl	%edi
658	pushl	%ebx
659	movl	12(%esp), %edi
660	movl	(%edi), %eax
661	movl	4(%edi), %edx
6621:
663	movl	16(%esp), %ebx
664	movl	20(%esp), %ecx
665	andl	%eax, %ebx
666	andl	%edx, %ecx
667	lock
668	cmpxchg8b (%edi)
669	jne	1b
670	movl	%ebx, %eax
671	movl	%ecx, %edx
672	popl	%ebx
673	popl	%edi
674	ret
675	SET_SIZE(atomic_and_64_nv)
676	SET_SIZE(atomic_and_64)
677
678	ENTRY(atomic_cas_8)
679	ALTENTRY(atomic_cas_uchar)
680	movl	4(%esp), %edx
681	movzbl	8(%esp), %eax
682	movb	12(%esp), %cl
683	lock
684	cmpxchgb %cl, (%edx)
685	ret
686	SET_SIZE(atomic_cas_uchar)
687	SET_SIZE(atomic_cas_8)
688
689	ENTRY(atomic_cas_16)
690	ALTENTRY(atomic_cas_ushort)
691	movl	4(%esp), %edx
692	movzwl	8(%esp), %eax
693	movw	12(%esp), %cx
694	lock
695	cmpxchgw %cx, (%edx)
696	ret
697	SET_SIZE(atomic_cas_ushort)
698	SET_SIZE(atomic_cas_16)
699
700	ENTRY(atomic_cas_32)
701	ALTENTRY(atomic_cas_uint)
702	ALTENTRY(atomic_cas_ulong)
703	ALTENTRY(atomic_cas_ptr)
704	movl	4(%esp), %edx
705	movl	8(%esp), %eax
706	movl	12(%esp), %ecx
707	lock
708	cmpxchgl %ecx, (%edx)
709	ret
710	SET_SIZE(atomic_cas_ptr)
711	SET_SIZE(atomic_cas_ulong)
712	SET_SIZE(atomic_cas_uint)
713	SET_SIZE(atomic_cas_32)
714
715	ENTRY(atomic_cas_64)
716	pushl	%ebx
717	pushl	%esi
718	movl	12(%esp), %esi
719	movl	16(%esp), %eax
720	movl	20(%esp), %edx
721	movl	24(%esp), %ebx
722	movl	28(%esp), %ecx
723	lock
724	cmpxchg8b (%esi)
725	popl	%esi
726	popl	%ebx
727	ret
728	SET_SIZE(atomic_cas_64)
729
730	ENTRY(atomic_swap_8)
731	ALTENTRY(atomic_swap_uchar)
732	movl	4(%esp), %edx
733	movzbl	8(%esp), %eax
734	lock
735	xchgb	%al, (%edx)
736	ret
737	SET_SIZE(atomic_swap_uchar)
738	SET_SIZE(atomic_swap_8)
739
740	ENTRY(atomic_swap_16)
741	ALTENTRY(atomic_swap_ushort)
742	movl	4(%esp), %edx
743	movzwl	8(%esp), %eax
744	lock
745	xchgw	%ax, (%edx)
746	ret
747	SET_SIZE(atomic_swap_ushort)
748	SET_SIZE(atomic_swap_16)
749
750	ENTRY(atomic_swap_32)
751	ALTENTRY(atomic_swap_uint)
752	ALTENTRY(atomic_swap_ptr)
753	ALTENTRY(atomic_swap_ulong)
754	movl	4(%esp), %edx
755	movl	8(%esp), %eax
756	lock
757	xchgl	%eax, (%edx)
758	ret
759	SET_SIZE(atomic_swap_ulong)
760	SET_SIZE(atomic_swap_ptr)
761	SET_SIZE(atomic_swap_uint)
762	SET_SIZE(atomic_swap_32)
763
764	ENTRY(atomic_swap_64)
765	pushl	%esi
766	pushl	%ebx
767	movl	12(%esp), %esi
768	movl	16(%esp), %ebx
769	movl	20(%esp), %ecx
770	movl	(%esi), %eax
771	movl	4(%esi), %edx
7721:
773	lock
774	cmpxchg8b (%esi)
775	jne	1b
776	popl	%ebx
777	popl	%esi
778	ret
779	SET_SIZE(atomic_swap_64)
780
781	ENTRY(atomic_set_long_excl)
782	movl	4(%esp), %edx
783	movl	8(%esp), %ecx
784	xorl	%eax, %eax
785	lock
786	btsl	%ecx, (%edx)
787	jnc	1f
788	decl	%eax
7891:
790	ret
791	SET_SIZE(atomic_set_long_excl)
792
793	ENTRY(atomic_clear_long_excl)
794	movl	4(%esp), %edx
795	movl	8(%esp), %ecx
796	xorl	%eax, %eax
797	lock
798	btrl	%ecx, (%edx)
799	jc	1f
800	decl	%eax
8011:
802	ret
803	SET_SIZE(atomic_clear_long_excl)
804
805	/*
806	 * NOTE: membar_enter, membar_exit, membar_producer, and
807	 * membar_consumer are all identical routines. We define them
808	 * separately, instead of using ALTENTRY definitions to alias them
809	 * together, so that DTrace and debuggers will see a unique address
810	 * for them, allowing more accurate tracing.
811	*/
812
813
814	ENTRY(membar_enter)
815	lock
816	xorl	$0, (%esp)
817	ret
818	SET_SIZE(membar_enter)
819
820	ENTRY(membar_exit)
821	lock
822	xorl	$0, (%esp)
823	ret
824	SET_SIZE(membar_exit)
825
826	ENTRY(membar_producer)
827	lock
828	xorl	$0, (%esp)
829	ret
830	SET_SIZE(membar_producer)
831
832	ENTRY(membar_consumer)
833	lock
834	xorl	$0, (%esp)
835	ret
836	SET_SIZE(membar_consumer)
837
838#ifdef __ELF__
839.section .note.GNU-stack,"",%progbits
840#endif
841