1; Test the instruction sequences produced by atomicrmw instructions. In
2; particular, ensure there are no stores/spills inserted between the exclusive
3; load and stores, which would invalidate the exclusive monitor.
4
5; RUN: llc -mtriple=armv8-unknown-none-eabi -O0 -o - %s | FileCheck %s --check-prefix=COMMON --check-prefix=EXPAND32 --check-prefix=EXPAND64
6; RUN: llc -mtriple=armv6-unknown-none-eabi -O0 -o - %s | FileCheck %s --check-prefix=COMMON --check-prefix=EXPAND32 --check-prefix=EXPAND64
7; RUN: llc -mtriple=thumbv7-unknown-none-eabi -O0 -o - %s | FileCheck %s --check-prefix=COMMON --check-prefix=EXPAND32 --check-prefix=EXPAND64
8; RUN: llc -mtriple=thumbv6-unknown-none-eabi -O0 -o - %s | FileCheck %s --check-prefix=COMMON --check-prefix=THUMB1
9; RUN: llc -mtriple=thumbv8m.base-unknown-none-eabi -O0 -o - %s | FileCheck %s --check-prefix=COMMON --check-prefix=EXPAND32 --check-prefix=BASELINE64
10
11@atomic_i8 = external global i8
12@atomic_i16 = external global i16
13@atomic_i32 = external global i32
14@atomic_i64 = external global i64
15
16define i8 @test_xchg_i8() {
17; COMMON-LABEL: test_xchg_i8:
18; EXPAND32: ldrexb
19; EXPAND32-NOT: str
20; EXPAND32: strexb
21; THUMB1: bl __sync_lock_test_and_set_1
22entry:
23  %0 = atomicrmw xchg i8* @atomic_i8, i8 1 monotonic
24  ret i8 %0
25}
26define i8 @test_add_i8() {
27; COMMON-LABEL: test_add_i8:
28; EXPAND32: ldrexb
29; EXPAND32-NOT: str
30; EXPAND32: strexb
31; THUMB1: bl __sync_fetch_and_add_1
32entry:
33  %0 = atomicrmw add i8* @atomic_i8, i8 1 monotonic
34  ret i8 %0
35}
36define i8 @test_sub_i8() {
37; COMMON-LABEL: test_sub_i8:
38; EXPAND32: ldrexb
39; EXPAND32-NOT: str
40; EXPAND32: strexb
41; THUMB1: bl __sync_fetch_and_sub_1
42entry:
43  %0 = atomicrmw sub i8* @atomic_i8, i8 1 monotonic
44  ret i8 %0
45}
46define i8 @test_and_i8() {
47; COMMON-LABEL: test_and_i8:
48; EXPAND32: ldrexb
49; EXPAND32-NOT: str
50; EXPAND32: strexb
51; THUMB1: bl __sync_fetch_and_and_1
52entry:
53  %0 = atomicrmw and i8* @atomic_i8, i8 1 monotonic
54  ret i8 %0
55}
56define i8 @test_nand_i8() {
57; COMMON-LABEL: test_nand_i8:
58; EXPAND32: ldrexb
59; EXPAND32-NOT: str
60; EXPAND32: strexb
61; THUMB1: bl __sync_fetch_and_nand_1
62entry:
63  %0 = atomicrmw nand i8* @atomic_i8, i8 1 monotonic
64  ret i8 %0
65}
66define i8 @test_or_i8() {
67; COMMON-LABEL: test_or_i8:
68; EXPAND32: ldrexb
69; EXPAND32-NOT: str
70; EXPAND32: strexb
71; THUMB1: bl __sync_fetch_and_or_1
72entry:
73  %0 = atomicrmw or i8* @atomic_i8, i8 1 monotonic
74  ret i8 %0
75}
76define i8 @test_xor_i8() {
77; COMMON-LABEL: test_xor_i8:
78; EXPAND32: ldrexb
79; EXPAND32-NOT: str
80; EXPAND32: strexb
81; THUMB1: bl __sync_fetch_and_xor_1
82entry:
83  %0 = atomicrmw xor i8* @atomic_i8, i8 1 monotonic
84  ret i8 %0
85}
86define i8 @test_max_i8() {
87; COMMON-LABEL: test_max_i8:
88; EXPAND32: ldrexb
89; EXPAND32-NOT: str
90; EXPAND32: strexb
91; THUMB1: bl __sync_fetch_and_max_1
92entry:
93  %0 = atomicrmw max i8* @atomic_i8, i8 1 monotonic
94  ret i8 %0
95}
96define i8 @test_min_i8() {
97; COMMON-LABEL: test_min_i8:
98; EXPAND32: ldrexb
99; EXPAND32-NOT: str
100; EXPAND32: strexb
101; THUMB1: bl __sync_fetch_and_min_1
102entry:
103  %0 = atomicrmw min i8* @atomic_i8, i8 1 monotonic
104  ret i8 %0
105}
106define i8 @test_umax_i8() {
107; COMMON-LABEL: test_umax_i8:
108; EXPAND32: ldrexb
109; EXPAND32-NOT: str
110; EXPAND32: strexb
111; THUMB1: bl __sync_fetch_and_umax_1
112entry:
113  %0 = atomicrmw umax i8* @atomic_i8, i8 1 monotonic
114  ret i8 %0
115}
116define i8 @test_umin_i8() {
117; COMMON-LABEL: test_umin_i8:
118; EXPAND32: ldrexb
119; EXPAND32-NOT: str
120; EXPAND32: strexb
121; THUMB1: bl __sync_fetch_and_umin_1
122entry:
123  %0 = atomicrmw umin i8* @atomic_i8, i8 1 monotonic
124  ret i8 %0
125}
126
127
128define i16 @test_xchg_i16() {
129; COMMON-LABEL: test_xchg_i16:
130; EXPAND32: ldrexh
131; EXPAND32-NOT: str
132; EXPAND32: strexh
133; THUMB1: bl __sync_lock_test_and_set_2
134entry:
135  %0 = atomicrmw xchg i16* @atomic_i16, i16 1 monotonic
136  ret i16 %0
137}
138define i16 @test_add_i16() {
139; COMMON-LABEL: test_add_i16:
140; EXPAND32: ldrexh
141; EXPAND32-NOT: str
142; EXPAND32: strexh
143; THUMB1: bl __sync_fetch_and_add_2
144entry:
145  %0 = atomicrmw add i16* @atomic_i16, i16 1 monotonic
146  ret i16 %0
147}
148define i16 @test_sub_i16() {
149; COMMON-LABEL: test_sub_i16:
150; EXPAND32: ldrexh
151; EXPAND32-NOT: str
152; EXPAND32: strexh
153; THUMB1: bl __sync_fetch_and_sub_2
154entry:
155  %0 = atomicrmw sub i16* @atomic_i16, i16 1 monotonic
156  ret i16 %0
157}
158define i16 @test_and_i16() {
159; COMMON-LABEL: test_and_i16:
160; EXPAND32: ldrexh
161; EXPAND32-NOT: str
162; EXPAND32: strexh
163; THUMB1: bl __sync_fetch_and_and_2
164entry:
165  %0 = atomicrmw and i16* @atomic_i16, i16 1 monotonic
166  ret i16 %0
167}
168define i16 @test_nand_i16() {
169; COMMON-LABEL: test_nand_i16:
170; EXPAND32: ldrexh
171; EXPAND32-NOT: str
172; EXPAND32: strexh
173; THUMB1: bl __sync_fetch_and_nand_2
174entry:
175  %0 = atomicrmw nand i16* @atomic_i16, i16 1 monotonic
176  ret i16 %0
177}
178define i16 @test_or_i16() {
179; COMMON-LABEL: test_or_i16:
180; EXPAND32: ldrexh
181; EXPAND32-NOT: str
182; EXPAND32: strexh
183; THUMB1: bl __sync_fetch_and_or_2
184entry:
185  %0 = atomicrmw or i16* @atomic_i16, i16 1 monotonic
186  ret i16 %0
187}
188define i16 @test_xor_i16() {
189; COMMON-LABEL: test_xor_i16:
190; EXPAND32: ldrexh
191; EXPAND32-NOT: str
192; EXPAND32: strexh
193; THUMB1: bl __sync_fetch_and_xor_2
194entry:
195  %0 = atomicrmw xor i16* @atomic_i16, i16 1 monotonic
196  ret i16 %0
197}
198define i16 @test_max_i16() {
199; COMMON-LABEL: test_max_i16:
200; EXPAND32: ldrexh
201; EXPAND32-NOT: str
202; EXPAND32: strexh
203; THUMB1: bl __sync_fetch_and_max_2
204entry:
205  %0 = atomicrmw max i16* @atomic_i16, i16 1 monotonic
206  ret i16 %0
207}
208define i16 @test_min_i16() {
209; COMMON-LABEL: test_min_i16:
210; EXPAND32: ldrexh
211; EXPAND32-NOT: str
212; EXPAND32: strexh
213; THUMB1: bl __sync_fetch_and_min_2
214entry:
215  %0 = atomicrmw min i16* @atomic_i16, i16 1 monotonic
216  ret i16 %0
217}
218define i16 @test_umax_i16() {
219; COMMON-LABEL: test_umax_i16:
220; EXPAND32: ldrexh
221; EXPAND32-NOT: str
222; EXPAND32: strexh
223; THUMB1: bl __sync_fetch_and_umax_2
224entry:
225  %0 = atomicrmw umax i16* @atomic_i16, i16 1 monotonic
226  ret i16 %0
227}
228define i16 @test_umin_i16() {
229; COMMON-LABEL: test_umin_i16:
230; EXPAND32: ldrexh
231; EXPAND32-NOT: str
232; EXPAND32: strexh
233; THUMB1: bl __sync_fetch_and_umin_2
234entry:
235  %0 = atomicrmw umin i16* @atomic_i16, i16 1 monotonic
236  ret i16 %0
237}
238
239
240define i32 @test_xchg_i32() {
241; COMMON-LABEL: test_xchg_i32:
242; EXPAND32: ldrex
243; EXPAND32-NOT: str
244; EXPAND32: strex
245; THUMB1: bl __sync_lock_test_and_set_4
246entry:
247  %0 = atomicrmw xchg i32* @atomic_i32, i32 1 monotonic
248  ret i32 %0
249}
250define i32 @test_add_i32() {
251; COMMON-LABEL: test_add_i32:
252; EXPAND32: ldrex
253; EXPAND32-NOT: str
254; EXPAND32: strex
255; THUMB1: bl __sync_fetch_and_add_4
256entry:
257  %0 = atomicrmw add i32* @atomic_i32, i32 1 monotonic
258  ret i32 %0
259}
260define i32 @test_sub_i32() {
261; COMMON-LABEL: test_sub_i32:
262; EXPAND32: ldrex
263; EXPAND32-NOT: str
264; EXPAND32: strex
265; THUMB1: bl __sync_fetch_and_sub_4
266entry:
267  %0 = atomicrmw sub i32* @atomic_i32, i32 1 monotonic
268  ret i32 %0
269}
270define i32 @test_and_i32() {
271; COMMON-LABEL: test_and_i32:
272; EXPAND32: ldrex
273; EXPAND32-NOT: str
274; EXPAND32: strex
275; THUMB1: bl __sync_fetch_and_and_4
276entry:
277  %0 = atomicrmw and i32* @atomic_i32, i32 1 monotonic
278  ret i32 %0
279}
280define i32 @test_nand_i32() {
281; COMMON-LABEL: test_nand_i32:
282; EXPAND32: ldrex
283; EXPAND32-NOT: str
284; EXPAND32: strex
285; THUMB1: bl __sync_fetch_and_nand_4
286entry:
287  %0 = atomicrmw nand i32* @atomic_i32, i32 1 monotonic
288  ret i32 %0
289}
290define i32 @test_or_i32() {
291; COMMON-LABEL: test_or_i32:
292; EXPAND32: ldrex
293; EXPAND32-NOT: str
294; EXPAND32: strex
295; THUMB1: bl __sync_fetch_and_or_4
296entry:
297  %0 = atomicrmw or i32* @atomic_i32, i32 1 monotonic
298  ret i32 %0
299}
300define i32 @test_xor_i32() {
301; COMMON-LABEL: test_xor_i32:
302; EXPAND32: ldrex
303; EXPAND32-NOT: str
304; EXPAND32: strex
305; THUMB1: bl __sync_fetch_and_xor_4
306entry:
307  %0 = atomicrmw xor i32* @atomic_i32, i32 1 monotonic
308  ret i32 %0
309}
310define i32 @test_max_i32() {
311; COMMON-LABEL: test_max_i32:
312; EXPAND32: ldrex
313; EXPAND32-NOT: str
314; EXPAND32: strex
315; THUMB1: bl __sync_fetch_and_max_4
316entry:
317  %0 = atomicrmw max i32* @atomic_i32, i32 1 monotonic
318  ret i32 %0
319}
320define i32 @test_min_i32() {
321; COMMON-LABEL: test_min_i32:
322; EXPAND32: ldrex
323; EXPAND32-NOT: str
324; EXPAND32: strex
325
326; THUMB1: bl __sync_fetch_and_min_4
327entry:
328  %0 = atomicrmw min i32* @atomic_i32, i32 1 monotonic
329  ret i32 %0
330}
331define i32 @test_umax_i32() {
332; COMMON-LABEL: test_umax_i32:
333; EXPAND32: ldrex
334; EXPAND32-NOT: str
335; EXPAND32: strex
336; THUMB1: bl __sync_fetch_and_umax_4
337entry:
338  %0 = atomicrmw umax i32* @atomic_i32, i32 1 monotonic
339  ret i32 %0
340}
341define i32 @test_umin_i32() {
342; COMMON-LABEL: test_umin_i32:
343; EXPAND32: ldrex
344; EXPAND32-NOT: str
345; EXPAND32: strex
346; THUMB1: bl __sync_fetch_and_umin_4
347entry:
348  %0 = atomicrmw umin i32* @atomic_i32, i32 1 monotonic
349  ret i32 %0
350}
351
352define i64 @test_xchg_i64() {
353; COMMON-LABEL: test_xchg_i64:
354; EXPAND64: ldrexd
355; EXPAND64-NOT: str
356; EXPAND64: strexd
357; THUMB1: bl __sync_lock_test_and_set_8
358; BASELINE64: bl __sync_val_compare_and_swap_8
359entry:
360  %0 = atomicrmw xchg i64* @atomic_i64, i64 1 monotonic
361  ret i64 %0
362}
363define i64 @test_add_i64() {
364; COMMON-LABEL: test_add_i64:
365; EXPAND64: ldrexd
366; EXPAND64-NOT: str
367; EXPAND64: strexd
368; THUMB1: bl __sync_fetch_and_add_8
369; BASELINE64: bl __sync_val_compare_and_swap_8
370entry:
371  %0 = atomicrmw add i64* @atomic_i64, i64 1 monotonic
372  ret i64 %0
373}
374define i64 @test_sub_i64() {
375; COMMON-LABEL: test_sub_i64:
376; EXPAND64: ldrexd
377; EXPAND64-NOT: str
378; EXPAND64: strexd
379; THUMB1: bl __sync_fetch_and_sub_8
380; BASELINE64: bl __sync_val_compare_and_swap_8
381entry:
382  %0 = atomicrmw sub i64* @atomic_i64, i64 1 monotonic
383  ret i64 %0
384}
385define i64 @test_and_i64() {
386; COMMON-LABEL: test_and_i64:
387; EXPAND64: ldrexd
388; EXPAND64-NOT: str
389; EXPAND64: strexd
390; THUMB1: bl __sync_fetch_and_and_8
391; BASELINE64: bl __sync_val_compare_and_swap_8
392entry:
393  %0 = atomicrmw and i64* @atomic_i64, i64 1 monotonic
394  ret i64 %0
395}
396define i64 @test_nand_i64() {
397; COMMON-LABEL: test_nand_i64:
398; EXPAND64: ldrexd
399; EXPAND64-NOT: str
400; EXPAND64: strexd
401; THUMB1: bl __sync_fetch_and_nand_8
402; BASELINE64: bl __sync_val_compare_and_swap_8
403entry:
404  %0 = atomicrmw nand i64* @atomic_i64, i64 1 monotonic
405  ret i64 %0
406}
407define i64 @test_or_i64() {
408; COMMON-LABEL: test_or_i64:
409; EXPAND64: ldrexd
410; EXPAND64-NOT: str
411; EXPAND64: strexd
412; THUMB1: bl __sync_fetch_and_or_8
413; BASELINE64: bl __sync_val_compare_and_swap_8
414entry:
415  %0 = atomicrmw or i64* @atomic_i64, i64 1 monotonic
416  ret i64 %0
417}
418define i64 @test_xor_i64() {
419; COMMON-LABEL: test_xor_i64:
420; EXPAND64: ldrexd
421; EXPAND64-NOT: str
422; EXPAND64: strexd
423; THUMB1: bl __sync_fetch_and_xor_8
424; BASELINE64: bl __sync_val_compare_and_swap_8
425entry:
426  %0 = atomicrmw xor i64* @atomic_i64, i64 1 monotonic
427  ret i64 %0
428}
429
430define i64 @test_max_i64() {
431; COMMON-LABEL: test_max_i64:
432; EXPAND64: ldrexd
433; EXPAND64-NOT: str
434; EXPAND64: strexd
435; THUMB1: bl __sync_fetch_and_max_8
436; BASELINE64: bl __sync_val_compare_and_swap_8
437entry:
438  %0 = atomicrmw max i64* @atomic_i64, i64 1 monotonic
439  ret i64 %0
440}
441define i64 @test_min_i64() {
442; COMMON-LABEL: test_min_i64:
443; EXPAND64: ldrexd
444; EXPAND64-NOT: str
445; EXPAND64: strexd
446; THUMB1: bl __sync_fetch_and_min_8
447; BASELINE64: bl __sync_val_compare_and_swap_8
448entry:
449  %0 = atomicrmw min i64* @atomic_i64, i64 1 monotonic
450  ret i64 %0
451}
452define i64 @test_umax_i64() {
453; COMMON-LABEL: test_umax_i64:
454; EXPAND64: ldrexd
455; EXPAND64-NOT: str
456; EXPAND64: strexd
457; THUMB1: bl __sync_fetch_and_umax_8
458; BASELINE64: bl __sync_val_compare_and_swap_8
459entry:
460  %0 = atomicrmw umax i64* @atomic_i64, i64 1 monotonic
461  ret i64 %0
462}
463define i64 @test_umin_i64() {
464; COMMON-LABEL: test_umin_i64:
465; EXPAND64: ldrexd
466; EXPAND64-NOT: str
467; EXPAND64: strexd
468; THUMB1: bl __sync_fetch_and_umin_8
469; BASELINE64: bl __sync_val_compare_and_swap_8
470entry:
471  %0 = atomicrmw umin i64* @atomic_i64, i64 1 monotonic
472  ret i64 %0
473}
474