1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \
3; RUN:   | FileCheck %s -check-prefix=RV32I
4; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \
5; RUN:   | FileCheck %s -check-prefix=RV64I
6; RUN: llc -mtriple=riscv32 -mattr=+zbb -verify-machineinstrs < %s \
7; RUN:   | FileCheck %s -check-prefixes=RV32ZB,RV32ZBB
8; RUN: llc -mtriple=riscv64 -mattr=+zbb -verify-machineinstrs < %s \
9; RUN:   | FileCheck %s -check-prefixes=RV64ZB,RV64ZBB
10; RUN: llc -mtriple=riscv32 -mattr=+zbkb -verify-machineinstrs < %s \
11; RUN:   | FileCheck %s -check-prefixes=RV32ZB,RV32ZBKB
12; RUN: llc -mtriple=riscv64 -mattr=+zbkb -verify-machineinstrs < %s \
13; RUN:   | FileCheck %s -check-prefixes=RV64ZB,RV64ZBKB
14; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbp -verify-machineinstrs < %s \
15; RUN:   | FileCheck %s -check-prefixes=RV32ZBP
16; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbp -verify-machineinstrs < %s \
17; RUN:   | FileCheck %s -check-prefixes=RV64ZBP
18
19declare i16 @llvm.bswap.i16(i16)
20declare i32 @llvm.bswap.i32(i32)
21declare i64 @llvm.bswap.i64(i64)
22declare i8 @llvm.bitreverse.i8(i8)
23declare i16 @llvm.bitreverse.i16(i16)
24declare i32 @llvm.bitreverse.i32(i32)
25declare i64 @llvm.bitreverse.i64(i64)
26
27define i16 @test_bswap_i16(i16 %a) nounwind {
28; RV32I-LABEL: test_bswap_i16:
29; RV32I:       # %bb.0:
30; RV32I-NEXT:    slli a1, a0, 8
31; RV32I-NEXT:    slli a0, a0, 16
32; RV32I-NEXT:    srli a0, a0, 24
33; RV32I-NEXT:    or a0, a1, a0
34; RV32I-NEXT:    ret
35;
36; RV64I-LABEL: test_bswap_i16:
37; RV64I:       # %bb.0:
38; RV64I-NEXT:    slli a1, a0, 8
39; RV64I-NEXT:    slli a0, a0, 48
40; RV64I-NEXT:    srli a0, a0, 56
41; RV64I-NEXT:    or a0, a1, a0
42; RV64I-NEXT:    ret
43;
44; RV32ZB-LABEL: test_bswap_i16:
45; RV32ZB:       # %bb.0:
46; RV32ZB-NEXT:    rev8 a0, a0
47; RV32ZB-NEXT:    srli a0, a0, 16
48; RV32ZB-NEXT:    ret
49;
50; RV64ZB-LABEL: test_bswap_i16:
51; RV64ZB:       # %bb.0:
52; RV64ZB-NEXT:    rev8 a0, a0
53; RV64ZB-NEXT:    srli a0, a0, 48
54; RV64ZB-NEXT:    ret
55;
56; RV32ZBP-LABEL: test_bswap_i16:
57; RV32ZBP:       # %bb.0:
58; RV32ZBP-NEXT:    rev8.h a0, a0
59; RV32ZBP-NEXT:    ret
60;
61; RV64ZBP-LABEL: test_bswap_i16:
62; RV64ZBP:       # %bb.0:
63; RV64ZBP-NEXT:    rev8.h a0, a0
64; RV64ZBP-NEXT:    ret
65  %tmp = call i16 @llvm.bswap.i16(i16 %a)
66  ret i16 %tmp
67}
68
69define i32 @test_bswap_i32(i32 %a) nounwind {
70; RV32I-LABEL: test_bswap_i32:
71; RV32I:       # %bb.0:
72; RV32I-NEXT:    srli a1, a0, 8
73; RV32I-NEXT:    lui a2, 16
74; RV32I-NEXT:    addi a2, a2, -256
75; RV32I-NEXT:    and a1, a1, a2
76; RV32I-NEXT:    srli a2, a0, 24
77; RV32I-NEXT:    or a1, a1, a2
78; RV32I-NEXT:    slli a2, a0, 8
79; RV32I-NEXT:    lui a3, 4080
80; RV32I-NEXT:    and a2, a2, a3
81; RV32I-NEXT:    slli a0, a0, 24
82; RV32I-NEXT:    or a0, a0, a2
83; RV32I-NEXT:    or a0, a0, a1
84; RV32I-NEXT:    ret
85;
86; RV64I-LABEL: test_bswap_i32:
87; RV64I:       # %bb.0:
88; RV64I-NEXT:    srliw a1, a0, 8
89; RV64I-NEXT:    lui a2, 16
90; RV64I-NEXT:    addiw a2, a2, -256
91; RV64I-NEXT:    and a1, a1, a2
92; RV64I-NEXT:    srliw a2, a0, 24
93; RV64I-NEXT:    or a1, a1, a2
94; RV64I-NEXT:    slli a2, a0, 8
95; RV64I-NEXT:    lui a3, 4080
96; RV64I-NEXT:    and a2, a2, a3
97; RV64I-NEXT:    slliw a0, a0, 24
98; RV64I-NEXT:    or a0, a0, a2
99; RV64I-NEXT:    or a0, a0, a1
100; RV64I-NEXT:    ret
101;
102; RV32ZB-LABEL: test_bswap_i32:
103; RV32ZB:       # %bb.0:
104; RV32ZB-NEXT:    rev8 a0, a0
105; RV32ZB-NEXT:    ret
106;
107; RV64ZB-LABEL: test_bswap_i32:
108; RV64ZB:       # %bb.0:
109; RV64ZB-NEXT:    rev8 a0, a0
110; RV64ZB-NEXT:    srli a0, a0, 32
111; RV64ZB-NEXT:    ret
112;
113; RV32ZBP-LABEL: test_bswap_i32:
114; RV32ZBP:       # %bb.0:
115; RV32ZBP-NEXT:    rev8 a0, a0
116; RV32ZBP-NEXT:    ret
117;
118; RV64ZBP-LABEL: test_bswap_i32:
119; RV64ZBP:       # %bb.0:
120; RV64ZBP-NEXT:    rev8.w a0, a0
121; RV64ZBP-NEXT:    ret
122  %tmp = call i32 @llvm.bswap.i32(i32 %a)
123  ret i32 %tmp
124}
125
126define i64 @test_bswap_i64(i64 %a) nounwind {
127; RV32I-LABEL: test_bswap_i64:
128; RV32I:       # %bb.0:
129; RV32I-NEXT:    srli a2, a1, 8
130; RV32I-NEXT:    lui a3, 16
131; RV32I-NEXT:    addi a3, a3, -256
132; RV32I-NEXT:    and a2, a2, a3
133; RV32I-NEXT:    srli a4, a1, 24
134; RV32I-NEXT:    or a2, a2, a4
135; RV32I-NEXT:    slli a4, a1, 8
136; RV32I-NEXT:    lui a5, 4080
137; RV32I-NEXT:    and a4, a4, a5
138; RV32I-NEXT:    slli a1, a1, 24
139; RV32I-NEXT:    or a1, a1, a4
140; RV32I-NEXT:    or a2, a1, a2
141; RV32I-NEXT:    srli a1, a0, 8
142; RV32I-NEXT:    and a1, a1, a3
143; RV32I-NEXT:    srli a3, a0, 24
144; RV32I-NEXT:    or a1, a1, a3
145; RV32I-NEXT:    slli a3, a0, 8
146; RV32I-NEXT:    and a3, a3, a5
147; RV32I-NEXT:    slli a0, a0, 24
148; RV32I-NEXT:    or a0, a0, a3
149; RV32I-NEXT:    or a1, a0, a1
150; RV32I-NEXT:    mv a0, a2
151; RV32I-NEXT:    ret
152;
153; RV64I-LABEL: test_bswap_i64:
154; RV64I:       # %bb.0:
155; RV64I-NEXT:    srli a1, a0, 24
156; RV64I-NEXT:    lui a2, 4080
157; RV64I-NEXT:    and a1, a1, a2
158; RV64I-NEXT:    srli a2, a0, 8
159; RV64I-NEXT:    li a3, 255
160; RV64I-NEXT:    slli a4, a3, 24
161; RV64I-NEXT:    and a2, a2, a4
162; RV64I-NEXT:    or a1, a2, a1
163; RV64I-NEXT:    srli a2, a0, 40
164; RV64I-NEXT:    lui a4, 16
165; RV64I-NEXT:    addiw a4, a4, -256
166; RV64I-NEXT:    and a2, a2, a4
167; RV64I-NEXT:    srli a4, a0, 56
168; RV64I-NEXT:    or a2, a2, a4
169; RV64I-NEXT:    or a1, a1, a2
170; RV64I-NEXT:    slli a2, a0, 24
171; RV64I-NEXT:    slli a4, a3, 40
172; RV64I-NEXT:    and a2, a2, a4
173; RV64I-NEXT:    srliw a4, a0, 24
174; RV64I-NEXT:    slli a4, a4, 32
175; RV64I-NEXT:    or a2, a2, a4
176; RV64I-NEXT:    slli a4, a0, 40
177; RV64I-NEXT:    slli a3, a3, 48
178; RV64I-NEXT:    and a3, a4, a3
179; RV64I-NEXT:    slli a0, a0, 56
180; RV64I-NEXT:    or a0, a0, a3
181; RV64I-NEXT:    or a0, a0, a2
182; RV64I-NEXT:    or a0, a0, a1
183; RV64I-NEXT:    ret
184;
185; RV32ZB-LABEL: test_bswap_i64:
186; RV32ZB:       # %bb.0:
187; RV32ZB-NEXT:    rev8 a2, a1
188; RV32ZB-NEXT:    rev8 a1, a0
189; RV32ZB-NEXT:    mv a0, a2
190; RV32ZB-NEXT:    ret
191;
192; RV64ZB-LABEL: test_bswap_i64:
193; RV64ZB:       # %bb.0:
194; RV64ZB-NEXT:    rev8 a0, a0
195; RV64ZB-NEXT:    ret
196;
197; RV32ZBP-LABEL: test_bswap_i64:
198; RV32ZBP:       # %bb.0:
199; RV32ZBP-NEXT:    rev8 a2, a1
200; RV32ZBP-NEXT:    rev8 a1, a0
201; RV32ZBP-NEXT:    mv a0, a2
202; RV32ZBP-NEXT:    ret
203;
204; RV64ZBP-LABEL: test_bswap_i64:
205; RV64ZBP:       # %bb.0:
206; RV64ZBP-NEXT:    rev8 a0, a0
207; RV64ZBP-NEXT:    ret
208  %tmp = call i64 @llvm.bswap.i64(i64 %a)
209  ret i64 %tmp
210}
211
212define i8 @test_bitreverse_i8(i8 %a) nounwind {
213; RV32I-LABEL: test_bitreverse_i8:
214; RV32I:       # %bb.0:
215; RV32I-NEXT:    andi a1, a0, 15
216; RV32I-NEXT:    slli a1, a1, 4
217; RV32I-NEXT:    slli a0, a0, 24
218; RV32I-NEXT:    srli a0, a0, 28
219; RV32I-NEXT:    or a0, a0, a1
220; RV32I-NEXT:    andi a1, a0, 51
221; RV32I-NEXT:    slli a1, a1, 2
222; RV32I-NEXT:    srli a0, a0, 2
223; RV32I-NEXT:    andi a0, a0, 51
224; RV32I-NEXT:    or a0, a0, a1
225; RV32I-NEXT:    andi a1, a0, 85
226; RV32I-NEXT:    slli a1, a1, 1
227; RV32I-NEXT:    srli a0, a0, 1
228; RV32I-NEXT:    andi a0, a0, 85
229; RV32I-NEXT:    or a0, a0, a1
230; RV32I-NEXT:    ret
231;
232; RV64I-LABEL: test_bitreverse_i8:
233; RV64I:       # %bb.0:
234; RV64I-NEXT:    andi a1, a0, 15
235; RV64I-NEXT:    slli a1, a1, 4
236; RV64I-NEXT:    slli a0, a0, 56
237; RV64I-NEXT:    srli a0, a0, 60
238; RV64I-NEXT:    or a0, a0, a1
239; RV64I-NEXT:    andi a1, a0, 51
240; RV64I-NEXT:    slli a1, a1, 2
241; RV64I-NEXT:    srli a0, a0, 2
242; RV64I-NEXT:    andi a0, a0, 51
243; RV64I-NEXT:    or a0, a0, a1
244; RV64I-NEXT:    andi a1, a0, 85
245; RV64I-NEXT:    slli a1, a1, 1
246; RV64I-NEXT:    srli a0, a0, 1
247; RV64I-NEXT:    andi a0, a0, 85
248; RV64I-NEXT:    or a0, a0, a1
249; RV64I-NEXT:    ret
250;
251; RV32ZBB-LABEL: test_bitreverse_i8:
252; RV32ZBB:       # %bb.0:
253; RV32ZBB-NEXT:    andi a1, a0, 15
254; RV32ZBB-NEXT:    slli a1, a1, 4
255; RV32ZBB-NEXT:    slli a0, a0, 24
256; RV32ZBB-NEXT:    srli a0, a0, 28
257; RV32ZBB-NEXT:    or a0, a0, a1
258; RV32ZBB-NEXT:    andi a1, a0, 51
259; RV32ZBB-NEXT:    slli a1, a1, 2
260; RV32ZBB-NEXT:    srli a0, a0, 2
261; RV32ZBB-NEXT:    andi a0, a0, 51
262; RV32ZBB-NEXT:    or a0, a0, a1
263; RV32ZBB-NEXT:    andi a1, a0, 85
264; RV32ZBB-NEXT:    slli a1, a1, 1
265; RV32ZBB-NEXT:    srli a0, a0, 1
266; RV32ZBB-NEXT:    andi a0, a0, 85
267; RV32ZBB-NEXT:    or a0, a0, a1
268; RV32ZBB-NEXT:    ret
269;
270; RV64ZBB-LABEL: test_bitreverse_i8:
271; RV64ZBB:       # %bb.0:
272; RV64ZBB-NEXT:    andi a1, a0, 15
273; RV64ZBB-NEXT:    slli a1, a1, 4
274; RV64ZBB-NEXT:    slli a0, a0, 56
275; RV64ZBB-NEXT:    srli a0, a0, 60
276; RV64ZBB-NEXT:    or a0, a0, a1
277; RV64ZBB-NEXT:    andi a1, a0, 51
278; RV64ZBB-NEXT:    slli a1, a1, 2
279; RV64ZBB-NEXT:    srli a0, a0, 2
280; RV64ZBB-NEXT:    andi a0, a0, 51
281; RV64ZBB-NEXT:    or a0, a0, a1
282; RV64ZBB-NEXT:    andi a1, a0, 85
283; RV64ZBB-NEXT:    slli a1, a1, 1
284; RV64ZBB-NEXT:    srli a0, a0, 1
285; RV64ZBB-NEXT:    andi a0, a0, 85
286; RV64ZBB-NEXT:    or a0, a0, a1
287; RV64ZBB-NEXT:    ret
288;
289; RV32ZBKB-LABEL: test_bitreverse_i8:
290; RV32ZBKB:       # %bb.0:
291; RV32ZBKB-NEXT:    rev8 a0, a0
292; RV32ZBKB-NEXT:    brev8 a0, a0
293; RV32ZBKB-NEXT:    srli a0, a0, 24
294; RV32ZBKB-NEXT:    ret
295;
296; RV64ZBKB-LABEL: test_bitreverse_i8:
297; RV64ZBKB:       # %bb.0:
298; RV64ZBKB-NEXT:    rev8 a0, a0
299; RV64ZBKB-NEXT:    brev8 a0, a0
300; RV64ZBKB-NEXT:    srli a0, a0, 56
301; RV64ZBKB-NEXT:    ret
302;
303; RV32ZBP-LABEL: test_bitreverse_i8:
304; RV32ZBP:       # %bb.0:
305; RV32ZBP-NEXT:    rev.b a0, a0
306; RV32ZBP-NEXT:    ret
307;
308; RV64ZBP-LABEL: test_bitreverse_i8:
309; RV64ZBP:       # %bb.0:
310; RV64ZBP-NEXT:    rev.b a0, a0
311; RV64ZBP-NEXT:    ret
312  %tmp = call i8 @llvm.bitreverse.i8(i8 %a)
313  ret i8 %tmp
314}
315
316define i16 @test_bitreverse_i16(i16 %a) nounwind {
317; RV32I-LABEL: test_bitreverse_i16:
318; RV32I:       # %bb.0:
319; RV32I-NEXT:    slli a1, a0, 8
320; RV32I-NEXT:    slli a0, a0, 16
321; RV32I-NEXT:    srli a0, a0, 24
322; RV32I-NEXT:    or a0, a1, a0
323; RV32I-NEXT:    srli a1, a0, 4
324; RV32I-NEXT:    lui a2, 1
325; RV32I-NEXT:    addi a2, a2, -241
326; RV32I-NEXT:    and a1, a1, a2
327; RV32I-NEXT:    and a0, a0, a2
328; RV32I-NEXT:    slli a0, a0, 4
329; RV32I-NEXT:    or a0, a1, a0
330; RV32I-NEXT:    srli a1, a0, 2
331; RV32I-NEXT:    lui a2, 3
332; RV32I-NEXT:    addi a2, a2, 819
333; RV32I-NEXT:    and a1, a1, a2
334; RV32I-NEXT:    and a0, a0, a2
335; RV32I-NEXT:    slli a0, a0, 2
336; RV32I-NEXT:    or a0, a1, a0
337; RV32I-NEXT:    srli a1, a0, 1
338; RV32I-NEXT:    lui a2, 5
339; RV32I-NEXT:    addi a2, a2, 1365
340; RV32I-NEXT:    and a1, a1, a2
341; RV32I-NEXT:    and a0, a0, a2
342; RV32I-NEXT:    slli a0, a0, 1
343; RV32I-NEXT:    or a0, a1, a0
344; RV32I-NEXT:    ret
345;
346; RV64I-LABEL: test_bitreverse_i16:
347; RV64I:       # %bb.0:
348; RV64I-NEXT:    slli a1, a0, 8
349; RV64I-NEXT:    slli a0, a0, 48
350; RV64I-NEXT:    srli a0, a0, 56
351; RV64I-NEXT:    or a0, a1, a0
352; RV64I-NEXT:    srli a1, a0, 4
353; RV64I-NEXT:    lui a2, 1
354; RV64I-NEXT:    addiw a2, a2, -241
355; RV64I-NEXT:    and a1, a1, a2
356; RV64I-NEXT:    and a0, a0, a2
357; RV64I-NEXT:    slli a0, a0, 4
358; RV64I-NEXT:    or a0, a1, a0
359; RV64I-NEXT:    srli a1, a0, 2
360; RV64I-NEXT:    lui a2, 3
361; RV64I-NEXT:    addiw a2, a2, 819
362; RV64I-NEXT:    and a1, a1, a2
363; RV64I-NEXT:    and a0, a0, a2
364; RV64I-NEXT:    slli a0, a0, 2
365; RV64I-NEXT:    or a0, a1, a0
366; RV64I-NEXT:    srli a1, a0, 1
367; RV64I-NEXT:    lui a2, 5
368; RV64I-NEXT:    addiw a2, a2, 1365
369; RV64I-NEXT:    and a1, a1, a2
370; RV64I-NEXT:    and a0, a0, a2
371; RV64I-NEXT:    slli a0, a0, 1
372; RV64I-NEXT:    or a0, a1, a0
373; RV64I-NEXT:    ret
374;
375; RV32ZBB-LABEL: test_bitreverse_i16:
376; RV32ZBB:       # %bb.0:
377; RV32ZBB-NEXT:    rev8 a0, a0
378; RV32ZBB-NEXT:    srli a1, a0, 12
379; RV32ZBB-NEXT:    lui a2, 15
380; RV32ZBB-NEXT:    addi a2, a2, 240
381; RV32ZBB-NEXT:    and a1, a1, a2
382; RV32ZBB-NEXT:    srli a0, a0, 20
383; RV32ZBB-NEXT:    andi a0, a0, -241
384; RV32ZBB-NEXT:    or a0, a0, a1
385; RV32ZBB-NEXT:    srli a1, a0, 2
386; RV32ZBB-NEXT:    lui a2, 3
387; RV32ZBB-NEXT:    addi a2, a2, 819
388; RV32ZBB-NEXT:    and a1, a1, a2
389; RV32ZBB-NEXT:    and a0, a0, a2
390; RV32ZBB-NEXT:    slli a0, a0, 2
391; RV32ZBB-NEXT:    or a0, a1, a0
392; RV32ZBB-NEXT:    srli a1, a0, 1
393; RV32ZBB-NEXT:    lui a2, 5
394; RV32ZBB-NEXT:    addi a2, a2, 1365
395; RV32ZBB-NEXT:    and a1, a1, a2
396; RV32ZBB-NEXT:    and a0, a0, a2
397; RV32ZBB-NEXT:    slli a0, a0, 1
398; RV32ZBB-NEXT:    or a0, a1, a0
399; RV32ZBB-NEXT:    ret
400;
401; RV64ZBB-LABEL: test_bitreverse_i16:
402; RV64ZBB:       # %bb.0:
403; RV64ZBB-NEXT:    rev8 a0, a0
404; RV64ZBB-NEXT:    srli a1, a0, 44
405; RV64ZBB-NEXT:    lui a2, 15
406; RV64ZBB-NEXT:    addiw a2, a2, 240
407; RV64ZBB-NEXT:    and a1, a1, a2
408; RV64ZBB-NEXT:    srli a0, a0, 52
409; RV64ZBB-NEXT:    andi a0, a0, -241
410; RV64ZBB-NEXT:    or a0, a0, a1
411; RV64ZBB-NEXT:    srli a1, a0, 2
412; RV64ZBB-NEXT:    lui a2, 3
413; RV64ZBB-NEXT:    addiw a2, a2, 819
414; RV64ZBB-NEXT:    and a1, a1, a2
415; RV64ZBB-NEXT:    and a0, a0, a2
416; RV64ZBB-NEXT:    slli a0, a0, 2
417; RV64ZBB-NEXT:    or a0, a1, a0
418; RV64ZBB-NEXT:    srli a1, a0, 1
419; RV64ZBB-NEXT:    lui a2, 5
420; RV64ZBB-NEXT:    addiw a2, a2, 1365
421; RV64ZBB-NEXT:    and a1, a1, a2
422; RV64ZBB-NEXT:    and a0, a0, a2
423; RV64ZBB-NEXT:    slli a0, a0, 1
424; RV64ZBB-NEXT:    or a0, a1, a0
425; RV64ZBB-NEXT:    ret
426;
427; RV32ZBKB-LABEL: test_bitreverse_i16:
428; RV32ZBKB:       # %bb.0:
429; RV32ZBKB-NEXT:    rev8 a0, a0
430; RV32ZBKB-NEXT:    brev8 a0, a0
431; RV32ZBKB-NEXT:    srli a0, a0, 16
432; RV32ZBKB-NEXT:    ret
433;
434; RV64ZBKB-LABEL: test_bitreverse_i16:
435; RV64ZBKB:       # %bb.0:
436; RV64ZBKB-NEXT:    rev8 a0, a0
437; RV64ZBKB-NEXT:    brev8 a0, a0
438; RV64ZBKB-NEXT:    srli a0, a0, 48
439; RV64ZBKB-NEXT:    ret
440;
441; RV32ZBP-LABEL: test_bitreverse_i16:
442; RV32ZBP:       # %bb.0:
443; RV32ZBP-NEXT:    rev.h a0, a0
444; RV32ZBP-NEXT:    ret
445;
446; RV64ZBP-LABEL: test_bitreverse_i16:
447; RV64ZBP:       # %bb.0:
448; RV64ZBP-NEXT:    rev.h a0, a0
449; RV64ZBP-NEXT:    ret
450  %tmp = call i16 @llvm.bitreverse.i16(i16 %a)
451  ret i16 %tmp
452}
453
454define i32 @test_bitreverse_i32(i32 %a) nounwind {
455; RV32I-LABEL: test_bitreverse_i32:
456; RV32I:       # %bb.0:
457; RV32I-NEXT:    srli a1, a0, 8
458; RV32I-NEXT:    lui a2, 16
459; RV32I-NEXT:    addi a2, a2, -256
460; RV32I-NEXT:    and a1, a1, a2
461; RV32I-NEXT:    srli a2, a0, 24
462; RV32I-NEXT:    or a1, a1, a2
463; RV32I-NEXT:    slli a2, a0, 8
464; RV32I-NEXT:    lui a3, 4080
465; RV32I-NEXT:    and a2, a2, a3
466; RV32I-NEXT:    slli a0, a0, 24
467; RV32I-NEXT:    or a0, a0, a2
468; RV32I-NEXT:    or a0, a0, a1
469; RV32I-NEXT:    srli a1, a0, 4
470; RV32I-NEXT:    lui a2, 61681
471; RV32I-NEXT:    addi a2, a2, -241
472; RV32I-NEXT:    and a1, a1, a2
473; RV32I-NEXT:    and a0, a0, a2
474; RV32I-NEXT:    slli a0, a0, 4
475; RV32I-NEXT:    or a0, a1, a0
476; RV32I-NEXT:    srli a1, a0, 2
477; RV32I-NEXT:    lui a2, 209715
478; RV32I-NEXT:    addi a2, a2, 819
479; RV32I-NEXT:    and a1, a1, a2
480; RV32I-NEXT:    and a0, a0, a2
481; RV32I-NEXT:    slli a0, a0, 2
482; RV32I-NEXT:    or a0, a1, a0
483; RV32I-NEXT:    srli a1, a0, 1
484; RV32I-NEXT:    lui a2, 349525
485; RV32I-NEXT:    addi a2, a2, 1365
486; RV32I-NEXT:    and a1, a1, a2
487; RV32I-NEXT:    and a0, a0, a2
488; RV32I-NEXT:    slli a0, a0, 1
489; RV32I-NEXT:    or a0, a1, a0
490; RV32I-NEXT:    ret
491;
492; RV64I-LABEL: test_bitreverse_i32:
493; RV64I:       # %bb.0:
494; RV64I-NEXT:    srliw a1, a0, 8
495; RV64I-NEXT:    lui a2, 16
496; RV64I-NEXT:    addiw a2, a2, -256
497; RV64I-NEXT:    and a1, a1, a2
498; RV64I-NEXT:    srliw a2, a0, 24
499; RV64I-NEXT:    or a1, a1, a2
500; RV64I-NEXT:    slli a2, a0, 8
501; RV64I-NEXT:    lui a3, 4080
502; RV64I-NEXT:    and a2, a2, a3
503; RV64I-NEXT:    slliw a0, a0, 24
504; RV64I-NEXT:    or a0, a0, a2
505; RV64I-NEXT:    or a0, a0, a1
506; RV64I-NEXT:    srli a1, a0, 4
507; RV64I-NEXT:    lui a2, 61681
508; RV64I-NEXT:    addiw a2, a2, -241
509; RV64I-NEXT:    and a1, a1, a2
510; RV64I-NEXT:    and a0, a0, a2
511; RV64I-NEXT:    slliw a0, a0, 4
512; RV64I-NEXT:    or a0, a1, a0
513; RV64I-NEXT:    srli a1, a0, 2
514; RV64I-NEXT:    lui a2, 209715
515; RV64I-NEXT:    addiw a2, a2, 819
516; RV64I-NEXT:    and a1, a1, a2
517; RV64I-NEXT:    and a0, a0, a2
518; RV64I-NEXT:    slliw a0, a0, 2
519; RV64I-NEXT:    or a0, a1, a0
520; RV64I-NEXT:    srli a1, a0, 1
521; RV64I-NEXT:    lui a2, 349525
522; RV64I-NEXT:    addiw a2, a2, 1365
523; RV64I-NEXT:    and a1, a1, a2
524; RV64I-NEXT:    and a0, a0, a2
525; RV64I-NEXT:    slliw a0, a0, 1
526; RV64I-NEXT:    or a0, a1, a0
527; RV64I-NEXT:    ret
528;
529; RV32ZBB-LABEL: test_bitreverse_i32:
530; RV32ZBB:       # %bb.0:
531; RV32ZBB-NEXT:    rev8 a0, a0
532; RV32ZBB-NEXT:    srli a1, a0, 4
533; RV32ZBB-NEXT:    lui a2, 61681
534; RV32ZBB-NEXT:    addi a2, a2, -241
535; RV32ZBB-NEXT:    and a1, a1, a2
536; RV32ZBB-NEXT:    and a0, a0, a2
537; RV32ZBB-NEXT:    slli a0, a0, 4
538; RV32ZBB-NEXT:    or a0, a1, a0
539; RV32ZBB-NEXT:    srli a1, a0, 2
540; RV32ZBB-NEXT:    lui a2, 209715
541; RV32ZBB-NEXT:    addi a2, a2, 819
542; RV32ZBB-NEXT:    and a1, a1, a2
543; RV32ZBB-NEXT:    and a0, a0, a2
544; RV32ZBB-NEXT:    slli a0, a0, 2
545; RV32ZBB-NEXT:    or a0, a1, a0
546; RV32ZBB-NEXT:    srli a1, a0, 1
547; RV32ZBB-NEXT:    lui a2, 349525
548; RV32ZBB-NEXT:    addi a2, a2, 1365
549; RV32ZBB-NEXT:    and a1, a1, a2
550; RV32ZBB-NEXT:    and a0, a0, a2
551; RV32ZBB-NEXT:    slli a0, a0, 1
552; RV32ZBB-NEXT:    or a0, a1, a0
553; RV32ZBB-NEXT:    ret
554;
555; RV64ZBB-LABEL: test_bitreverse_i32:
556; RV64ZBB:       # %bb.0:
557; RV64ZBB-NEXT:    rev8 a0, a0
558; RV64ZBB-NEXT:    srli a1, a0, 36
559; RV64ZBB-NEXT:    lui a2, 61681
560; RV64ZBB-NEXT:    addiw a2, a2, -241
561; RV64ZBB-NEXT:    and a1, a1, a2
562; RV64ZBB-NEXT:    srli a0, a0, 28
563; RV64ZBB-NEXT:    lui a2, 986895
564; RV64ZBB-NEXT:    addiw a2, a2, 240
565; RV64ZBB-NEXT:    and a0, a0, a2
566; RV64ZBB-NEXT:    sext.w a0, a0
567; RV64ZBB-NEXT:    or a0, a1, a0
568; RV64ZBB-NEXT:    srli a1, a0, 2
569; RV64ZBB-NEXT:    lui a2, 209715
570; RV64ZBB-NEXT:    addiw a2, a2, 819
571; RV64ZBB-NEXT:    and a1, a1, a2
572; RV64ZBB-NEXT:    and a0, a0, a2
573; RV64ZBB-NEXT:    slliw a0, a0, 2
574; RV64ZBB-NEXT:    or a0, a1, a0
575; RV64ZBB-NEXT:    srli a1, a0, 1
576; RV64ZBB-NEXT:    lui a2, 349525
577; RV64ZBB-NEXT:    addiw a2, a2, 1365
578; RV64ZBB-NEXT:    and a1, a1, a2
579; RV64ZBB-NEXT:    and a0, a0, a2
580; RV64ZBB-NEXT:    slliw a0, a0, 1
581; RV64ZBB-NEXT:    or a0, a1, a0
582; RV64ZBB-NEXT:    ret
583;
584; RV32ZBKB-LABEL: test_bitreverse_i32:
585; RV32ZBKB:       # %bb.0:
586; RV32ZBKB-NEXT:    rev8 a0, a0
587; RV32ZBKB-NEXT:    brev8 a0, a0
588; RV32ZBKB-NEXT:    ret
589;
590; RV64ZBKB-LABEL: test_bitreverse_i32:
591; RV64ZBKB:       # %bb.0:
592; RV64ZBKB-NEXT:    rev8 a0, a0
593; RV64ZBKB-NEXT:    brev8 a0, a0
594; RV64ZBKB-NEXT:    srli a0, a0, 32
595; RV64ZBKB-NEXT:    ret
596;
597; RV32ZBP-LABEL: test_bitreverse_i32:
598; RV32ZBP:       # %bb.0:
599; RV32ZBP-NEXT:    rev a0, a0
600; RV32ZBP-NEXT:    ret
601;
602; RV64ZBP-LABEL: test_bitreverse_i32:
603; RV64ZBP:       # %bb.0:
604; RV64ZBP-NEXT:    rev.w a0, a0
605; RV64ZBP-NEXT:    ret
606  %tmp = call i32 @llvm.bitreverse.i32(i32 %a)
607  ret i32 %tmp
608}
609
610define i64 @test_bitreverse_i64(i64 %a) nounwind {
611; RV32I-LABEL: test_bitreverse_i64:
612; RV32I:       # %bb.0:
613; RV32I-NEXT:    srli a2, a1, 8
614; RV32I-NEXT:    lui a3, 16
615; RV32I-NEXT:    addi a3, a3, -256
616; RV32I-NEXT:    and a2, a2, a3
617; RV32I-NEXT:    srli a4, a1, 24
618; RV32I-NEXT:    or a2, a2, a4
619; RV32I-NEXT:    slli a4, a1, 8
620; RV32I-NEXT:    lui a5, 4080
621; RV32I-NEXT:    and a4, a4, a5
622; RV32I-NEXT:    slli a1, a1, 24
623; RV32I-NEXT:    or a1, a1, a4
624; RV32I-NEXT:    or a1, a1, a2
625; RV32I-NEXT:    srli a2, a1, 4
626; RV32I-NEXT:    lui a4, 61681
627; RV32I-NEXT:    addi a4, a4, -241
628; RV32I-NEXT:    and a2, a2, a4
629; RV32I-NEXT:    and a1, a1, a4
630; RV32I-NEXT:    slli a1, a1, 4
631; RV32I-NEXT:    or a1, a2, a1
632; RV32I-NEXT:    srli a2, a1, 2
633; RV32I-NEXT:    lui a6, 209715
634; RV32I-NEXT:    addi a6, a6, 819
635; RV32I-NEXT:    and a2, a2, a6
636; RV32I-NEXT:    and a1, a1, a6
637; RV32I-NEXT:    slli a1, a1, 2
638; RV32I-NEXT:    or a1, a2, a1
639; RV32I-NEXT:    srli a2, a1, 1
640; RV32I-NEXT:    lui a7, 349525
641; RV32I-NEXT:    addi a7, a7, 1365
642; RV32I-NEXT:    and a2, a2, a7
643; RV32I-NEXT:    and a1, a1, a7
644; RV32I-NEXT:    slli a1, a1, 1
645; RV32I-NEXT:    or a2, a2, a1
646; RV32I-NEXT:    srli a1, a0, 8
647; RV32I-NEXT:    and a1, a1, a3
648; RV32I-NEXT:    srli a3, a0, 24
649; RV32I-NEXT:    or a1, a1, a3
650; RV32I-NEXT:    slli a3, a0, 8
651; RV32I-NEXT:    and a3, a3, a5
652; RV32I-NEXT:    slli a0, a0, 24
653; RV32I-NEXT:    or a0, a0, a3
654; RV32I-NEXT:    or a0, a0, a1
655; RV32I-NEXT:    srli a1, a0, 4
656; RV32I-NEXT:    and a1, a1, a4
657; RV32I-NEXT:    and a0, a0, a4
658; RV32I-NEXT:    slli a0, a0, 4
659; RV32I-NEXT:    or a0, a1, a0
660; RV32I-NEXT:    srli a1, a0, 2
661; RV32I-NEXT:    and a1, a1, a6
662; RV32I-NEXT:    and a0, a0, a6
663; RV32I-NEXT:    slli a0, a0, 2
664; RV32I-NEXT:    or a0, a1, a0
665; RV32I-NEXT:    srli a1, a0, 1
666; RV32I-NEXT:    and a1, a1, a7
667; RV32I-NEXT:    and a0, a0, a7
668; RV32I-NEXT:    slli a0, a0, 1
669; RV32I-NEXT:    or a1, a1, a0
670; RV32I-NEXT:    mv a0, a2
671; RV32I-NEXT:    ret
672;
673; RV64I-LABEL: test_bitreverse_i64:
674; RV64I:       # %bb.0:
675; RV64I-NEXT:    srli a1, a0, 24
676; RV64I-NEXT:    lui a2, 4080
677; RV64I-NEXT:    and a1, a1, a2
678; RV64I-NEXT:    srli a2, a0, 8
679; RV64I-NEXT:    li a3, 255
680; RV64I-NEXT:    slli a4, a3, 24
681; RV64I-NEXT:    and a2, a2, a4
682; RV64I-NEXT:    or a1, a2, a1
683; RV64I-NEXT:    srli a2, a0, 40
684; RV64I-NEXT:    lui a4, 16
685; RV64I-NEXT:    addiw a4, a4, -256
686; RV64I-NEXT:    and a2, a2, a4
687; RV64I-NEXT:    srli a4, a0, 56
688; RV64I-NEXT:    or a2, a2, a4
689; RV64I-NEXT:    or a1, a1, a2
690; RV64I-NEXT:    slli a2, a0, 24
691; RV64I-NEXT:    slli a4, a3, 40
692; RV64I-NEXT:    and a2, a2, a4
693; RV64I-NEXT:    srliw a4, a0, 24
694; RV64I-NEXT:    slli a4, a4, 32
695; RV64I-NEXT:    or a2, a2, a4
696; RV64I-NEXT:    slli a4, a0, 40
697; RV64I-NEXT:    slli a3, a3, 48
698; RV64I-NEXT:    and a3, a4, a3
699; RV64I-NEXT:    slli a0, a0, 56
700; RV64I-NEXT:    or a0, a0, a3
701; RV64I-NEXT:    lui a3, %hi(.LCPI6_0)
702; RV64I-NEXT:    ld a3, %lo(.LCPI6_0)(a3)
703; RV64I-NEXT:    or a0, a0, a2
704; RV64I-NEXT:    or a0, a0, a1
705; RV64I-NEXT:    srli a1, a0, 4
706; RV64I-NEXT:    and a1, a1, a3
707; RV64I-NEXT:    and a0, a0, a3
708; RV64I-NEXT:    lui a2, %hi(.LCPI6_1)
709; RV64I-NEXT:    ld a2, %lo(.LCPI6_1)(a2)
710; RV64I-NEXT:    slli a0, a0, 4
711; RV64I-NEXT:    or a0, a1, a0
712; RV64I-NEXT:    srli a1, a0, 2
713; RV64I-NEXT:    and a1, a1, a2
714; RV64I-NEXT:    and a0, a0, a2
715; RV64I-NEXT:    lui a2, %hi(.LCPI6_2)
716; RV64I-NEXT:    ld a2, %lo(.LCPI6_2)(a2)
717; RV64I-NEXT:    slli a0, a0, 2
718; RV64I-NEXT:    or a0, a1, a0
719; RV64I-NEXT:    srli a1, a0, 1
720; RV64I-NEXT:    and a1, a1, a2
721; RV64I-NEXT:    and a0, a0, a2
722; RV64I-NEXT:    slli a0, a0, 1
723; RV64I-NEXT:    or a0, a1, a0
724; RV64I-NEXT:    ret
725;
726; RV32ZBB-LABEL: test_bitreverse_i64:
727; RV32ZBB:       # %bb.0:
728; RV32ZBB-NEXT:    rev8 a1, a1
729; RV32ZBB-NEXT:    srli a2, a1, 4
730; RV32ZBB-NEXT:    lui a3, 61681
731; RV32ZBB-NEXT:    addi a3, a3, -241
732; RV32ZBB-NEXT:    and a2, a2, a3
733; RV32ZBB-NEXT:    and a1, a1, a3
734; RV32ZBB-NEXT:    slli a1, a1, 4
735; RV32ZBB-NEXT:    or a1, a2, a1
736; RV32ZBB-NEXT:    srli a2, a1, 2
737; RV32ZBB-NEXT:    lui a4, 209715
738; RV32ZBB-NEXT:    addi a4, a4, 819
739; RV32ZBB-NEXT:    and a2, a2, a4
740; RV32ZBB-NEXT:    and a1, a1, a4
741; RV32ZBB-NEXT:    slli a1, a1, 2
742; RV32ZBB-NEXT:    or a1, a2, a1
743; RV32ZBB-NEXT:    srli a2, a1, 1
744; RV32ZBB-NEXT:    lui a5, 349525
745; RV32ZBB-NEXT:    addi a5, a5, 1365
746; RV32ZBB-NEXT:    and a2, a2, a5
747; RV32ZBB-NEXT:    and a1, a1, a5
748; RV32ZBB-NEXT:    slli a1, a1, 1
749; RV32ZBB-NEXT:    or a2, a2, a1
750; RV32ZBB-NEXT:    rev8 a0, a0
751; RV32ZBB-NEXT:    srli a1, a0, 4
752; RV32ZBB-NEXT:    and a1, a1, a3
753; RV32ZBB-NEXT:    and a0, a0, a3
754; RV32ZBB-NEXT:    slli a0, a0, 4
755; RV32ZBB-NEXT:    or a0, a1, a0
756; RV32ZBB-NEXT:    srli a1, a0, 2
757; RV32ZBB-NEXT:    and a1, a1, a4
758; RV32ZBB-NEXT:    and a0, a0, a4
759; RV32ZBB-NEXT:    slli a0, a0, 2
760; RV32ZBB-NEXT:    or a0, a1, a0
761; RV32ZBB-NEXT:    srli a1, a0, 1
762; RV32ZBB-NEXT:    and a1, a1, a5
763; RV32ZBB-NEXT:    and a0, a0, a5
764; RV32ZBB-NEXT:    slli a0, a0, 1
765; RV32ZBB-NEXT:    or a1, a1, a0
766; RV32ZBB-NEXT:    mv a0, a2
767; RV32ZBB-NEXT:    ret
768;
769; RV64ZBB-LABEL: test_bitreverse_i64:
770; RV64ZBB:       # %bb.0:
771; RV64ZBB-NEXT:    lui a1, %hi(.LCPI6_0)
772; RV64ZBB-NEXT:    ld a1, %lo(.LCPI6_0)(a1)
773; RV64ZBB-NEXT:    rev8 a0, a0
774; RV64ZBB-NEXT:    srli a2, a0, 4
775; RV64ZBB-NEXT:    and a2, a2, a1
776; RV64ZBB-NEXT:    and a0, a0, a1
777; RV64ZBB-NEXT:    lui a1, %hi(.LCPI6_1)
778; RV64ZBB-NEXT:    ld a1, %lo(.LCPI6_1)(a1)
779; RV64ZBB-NEXT:    slli a0, a0, 4
780; RV64ZBB-NEXT:    or a0, a2, a0
781; RV64ZBB-NEXT:    srli a2, a0, 2
782; RV64ZBB-NEXT:    and a2, a2, a1
783; RV64ZBB-NEXT:    and a0, a0, a1
784; RV64ZBB-NEXT:    lui a1, %hi(.LCPI6_2)
785; RV64ZBB-NEXT:    ld a1, %lo(.LCPI6_2)(a1)
786; RV64ZBB-NEXT:    slli a0, a0, 2
787; RV64ZBB-NEXT:    or a0, a2, a0
788; RV64ZBB-NEXT:    srli a2, a0, 1
789; RV64ZBB-NEXT:    and a2, a2, a1
790; RV64ZBB-NEXT:    and a0, a0, a1
791; RV64ZBB-NEXT:    slli a0, a0, 1
792; RV64ZBB-NEXT:    or a0, a2, a0
793; RV64ZBB-NEXT:    ret
794;
795; RV32ZBKB-LABEL: test_bitreverse_i64:
796; RV32ZBKB:       # %bb.0:
797; RV32ZBKB-NEXT:    rev8 a1, a1
798; RV32ZBKB-NEXT:    brev8 a2, a1
799; RV32ZBKB-NEXT:    rev8 a0, a0
800; RV32ZBKB-NEXT:    brev8 a1, a0
801; RV32ZBKB-NEXT:    mv a0, a2
802; RV32ZBKB-NEXT:    ret
803;
804; RV64ZBKB-LABEL: test_bitreverse_i64:
805; RV64ZBKB:       # %bb.0:
806; RV64ZBKB-NEXT:    rev8 a0, a0
807; RV64ZBKB-NEXT:    brev8 a0, a0
808; RV64ZBKB-NEXT:    ret
809;
810; RV32ZBP-LABEL: test_bitreverse_i64:
811; RV32ZBP:       # %bb.0:
812; RV32ZBP-NEXT:    rev a2, a1
813; RV32ZBP-NEXT:    rev a1, a0
814; RV32ZBP-NEXT:    mv a0, a2
815; RV32ZBP-NEXT:    ret
816;
817; RV64ZBP-LABEL: test_bitreverse_i64:
818; RV64ZBP:       # %bb.0:
819; RV64ZBP-NEXT:    rev a0, a0
820; RV64ZBP-NEXT:    ret
821  %tmp = call i64 @llvm.bitreverse.i64(i64 %a)
822  ret i64 %tmp
823}
824
825define i16 @test_bswap_bitreverse_i16(i16 %a) nounwind {
826; RV32I-LABEL: test_bswap_bitreverse_i16:
827; RV32I:       # %bb.0:
828; RV32I-NEXT:    srli a1, a0, 4
829; RV32I-NEXT:    lui a2, 1
830; RV32I-NEXT:    addi a2, a2, -241
831; RV32I-NEXT:    and a1, a1, a2
832; RV32I-NEXT:    and a0, a0, a2
833; RV32I-NEXT:    slli a0, a0, 4
834; RV32I-NEXT:    or a0, a1, a0
835; RV32I-NEXT:    srli a1, a0, 2
836; RV32I-NEXT:    lui a2, 3
837; RV32I-NEXT:    addi a2, a2, 819
838; RV32I-NEXT:    and a1, a1, a2
839; RV32I-NEXT:    and a0, a0, a2
840; RV32I-NEXT:    slli a0, a0, 2
841; RV32I-NEXT:    or a0, a1, a0
842; RV32I-NEXT:    srli a1, a0, 1
843; RV32I-NEXT:    lui a2, 5
844; RV32I-NEXT:    addi a2, a2, 1365
845; RV32I-NEXT:    and a1, a1, a2
846; RV32I-NEXT:    and a0, a0, a2
847; RV32I-NEXT:    slli a0, a0, 1
848; RV32I-NEXT:    or a0, a1, a0
849; RV32I-NEXT:    ret
850;
851; RV64I-LABEL: test_bswap_bitreverse_i16:
852; RV64I:       # %bb.0:
853; RV64I-NEXT:    srli a1, a0, 4
854; RV64I-NEXT:    lui a2, 1
855; RV64I-NEXT:    addiw a2, a2, -241
856; RV64I-NEXT:    and a1, a1, a2
857; RV64I-NEXT:    and a0, a0, a2
858; RV64I-NEXT:    slli a0, a0, 4
859; RV64I-NEXT:    or a0, a1, a0
860; RV64I-NEXT:    srli a1, a0, 2
861; RV64I-NEXT:    lui a2, 3
862; RV64I-NEXT:    addiw a2, a2, 819
863; RV64I-NEXT:    and a1, a1, a2
864; RV64I-NEXT:    and a0, a0, a2
865; RV64I-NEXT:    slli a0, a0, 2
866; RV64I-NEXT:    or a0, a1, a0
867; RV64I-NEXT:    srli a1, a0, 1
868; RV64I-NEXT:    lui a2, 5
869; RV64I-NEXT:    addiw a2, a2, 1365
870; RV64I-NEXT:    and a1, a1, a2
871; RV64I-NEXT:    and a0, a0, a2
872; RV64I-NEXT:    slli a0, a0, 1
873; RV64I-NEXT:    or a0, a1, a0
874; RV64I-NEXT:    ret
875;
876; RV32ZBB-LABEL: test_bswap_bitreverse_i16:
877; RV32ZBB:       # %bb.0:
878; RV32ZBB-NEXT:    srli a1, a0, 4
879; RV32ZBB-NEXT:    lui a2, 1
880; RV32ZBB-NEXT:    addi a2, a2, -241
881; RV32ZBB-NEXT:    and a1, a1, a2
882; RV32ZBB-NEXT:    and a0, a0, a2
883; RV32ZBB-NEXT:    slli a0, a0, 4
884; RV32ZBB-NEXT:    or a0, a1, a0
885; RV32ZBB-NEXT:    srli a1, a0, 2
886; RV32ZBB-NEXT:    lui a2, 3
887; RV32ZBB-NEXT:    addi a2, a2, 819
888; RV32ZBB-NEXT:    and a1, a1, a2
889; RV32ZBB-NEXT:    and a0, a0, a2
890; RV32ZBB-NEXT:    slli a0, a0, 2
891; RV32ZBB-NEXT:    or a0, a1, a0
892; RV32ZBB-NEXT:    srli a1, a0, 1
893; RV32ZBB-NEXT:    lui a2, 5
894; RV32ZBB-NEXT:    addi a2, a2, 1365
895; RV32ZBB-NEXT:    and a1, a1, a2
896; RV32ZBB-NEXT:    and a0, a0, a2
897; RV32ZBB-NEXT:    slli a0, a0, 1
898; RV32ZBB-NEXT:    or a0, a1, a0
899; RV32ZBB-NEXT:    ret
900;
901; RV64ZBB-LABEL: test_bswap_bitreverse_i16:
902; RV64ZBB:       # %bb.0:
903; RV64ZBB-NEXT:    srli a1, a0, 4
904; RV64ZBB-NEXT:    lui a2, 1
905; RV64ZBB-NEXT:    addiw a2, a2, -241
906; RV64ZBB-NEXT:    and a1, a1, a2
907; RV64ZBB-NEXT:    and a0, a0, a2
908; RV64ZBB-NEXT:    slli a0, a0, 4
909; RV64ZBB-NEXT:    or a0, a1, a0
910; RV64ZBB-NEXT:    srli a1, a0, 2
911; RV64ZBB-NEXT:    lui a2, 3
912; RV64ZBB-NEXT:    addiw a2, a2, 819
913; RV64ZBB-NEXT:    and a1, a1, a2
914; RV64ZBB-NEXT:    and a0, a0, a2
915; RV64ZBB-NEXT:    slli a0, a0, 2
916; RV64ZBB-NEXT:    or a0, a1, a0
917; RV64ZBB-NEXT:    srli a1, a0, 1
918; RV64ZBB-NEXT:    lui a2, 5
919; RV64ZBB-NEXT:    addiw a2, a2, 1365
920; RV64ZBB-NEXT:    and a1, a1, a2
921; RV64ZBB-NEXT:    and a0, a0, a2
922; RV64ZBB-NEXT:    slli a0, a0, 1
923; RV64ZBB-NEXT:    or a0, a1, a0
924; RV64ZBB-NEXT:    ret
925;
926; RV32ZBKB-LABEL: test_bswap_bitreverse_i16:
927; RV32ZBKB:       # %bb.0:
928; RV32ZBKB-NEXT:    brev8 a0, a0
929; RV32ZBKB-NEXT:    ret
930;
931; RV64ZBKB-LABEL: test_bswap_bitreverse_i16:
932; RV64ZBKB:       # %bb.0:
933; RV64ZBKB-NEXT:    brev8 a0, a0
934; RV64ZBKB-NEXT:    ret
935;
936; RV32ZBP-LABEL: test_bswap_bitreverse_i16:
937; RV32ZBP:       # %bb.0:
938; RV32ZBP-NEXT:    rev.b a0, a0
939; RV32ZBP-NEXT:    ret
940;
941; RV64ZBP-LABEL: test_bswap_bitreverse_i16:
942; RV64ZBP:       # %bb.0:
943; RV64ZBP-NEXT:    rev.b a0, a0
944; RV64ZBP-NEXT:    ret
945  %tmp = call i16 @llvm.bswap.i16(i16 %a)
946  %tmp2 = call i16 @llvm.bitreverse.i16(i16 %tmp)
947  ret i16 %tmp2
948}
949
950define i32 @test_bswap_bitreverse_i32(i32 %a) nounwind {
951; RV32I-LABEL: test_bswap_bitreverse_i32:
952; RV32I:       # %bb.0:
953; RV32I-NEXT:    srli a1, a0, 4
954; RV32I-NEXT:    lui a2, 61681
955; RV32I-NEXT:    addi a2, a2, -241
956; RV32I-NEXT:    and a1, a1, a2
957; RV32I-NEXT:    and a0, a0, a2
958; RV32I-NEXT:    slli a0, a0, 4
959; RV32I-NEXT:    or a0, a1, a0
960; RV32I-NEXT:    srli a1, a0, 2
961; RV32I-NEXT:    lui a2, 209715
962; RV32I-NEXT:    addi a2, a2, 819
963; RV32I-NEXT:    and a1, a1, a2
964; RV32I-NEXT:    and a0, a0, a2
965; RV32I-NEXT:    slli a0, a0, 2
966; RV32I-NEXT:    or a0, a1, a0
967; RV32I-NEXT:    srli a1, a0, 1
968; RV32I-NEXT:    lui a2, 349525
969; RV32I-NEXT:    addi a2, a2, 1365
970; RV32I-NEXT:    and a1, a1, a2
971; RV32I-NEXT:    and a0, a0, a2
972; RV32I-NEXT:    slli a0, a0, 1
973; RV32I-NEXT:    or a0, a1, a0
974; RV32I-NEXT:    ret
975;
976; RV64I-LABEL: test_bswap_bitreverse_i32:
977; RV64I:       # %bb.0:
978; RV64I-NEXT:    srli a1, a0, 4
979; RV64I-NEXT:    lui a2, 61681
980; RV64I-NEXT:    addiw a2, a2, -241
981; RV64I-NEXT:    and a1, a1, a2
982; RV64I-NEXT:    and a0, a0, a2
983; RV64I-NEXT:    slliw a0, a0, 4
984; RV64I-NEXT:    or a0, a1, a0
985; RV64I-NEXT:    srli a1, a0, 2
986; RV64I-NEXT:    lui a2, 209715
987; RV64I-NEXT:    addiw a2, a2, 819
988; RV64I-NEXT:    and a1, a1, a2
989; RV64I-NEXT:    and a0, a0, a2
990; RV64I-NEXT:    slliw a0, a0, 2
991; RV64I-NEXT:    or a0, a1, a0
992; RV64I-NEXT:    srli a1, a0, 1
993; RV64I-NEXT:    lui a2, 349525
994; RV64I-NEXT:    addiw a2, a2, 1365
995; RV64I-NEXT:    and a1, a1, a2
996; RV64I-NEXT:    and a0, a0, a2
997; RV64I-NEXT:    slliw a0, a0, 1
998; RV64I-NEXT:    or a0, a1, a0
999; RV64I-NEXT:    ret
1000;
1001; RV32ZBB-LABEL: test_bswap_bitreverse_i32:
1002; RV32ZBB:       # %bb.0:
1003; RV32ZBB-NEXT:    srli a1, a0, 4
1004; RV32ZBB-NEXT:    lui a2, 61681
1005; RV32ZBB-NEXT:    addi a2, a2, -241
1006; RV32ZBB-NEXT:    and a1, a1, a2
1007; RV32ZBB-NEXT:    and a0, a0, a2
1008; RV32ZBB-NEXT:    slli a0, a0, 4
1009; RV32ZBB-NEXT:    or a0, a1, a0
1010; RV32ZBB-NEXT:    srli a1, a0, 2
1011; RV32ZBB-NEXT:    lui a2, 209715
1012; RV32ZBB-NEXT:    addi a2, a2, 819
1013; RV32ZBB-NEXT:    and a1, a1, a2
1014; RV32ZBB-NEXT:    and a0, a0, a2
1015; RV32ZBB-NEXT:    slli a0, a0, 2
1016; RV32ZBB-NEXT:    or a0, a1, a0
1017; RV32ZBB-NEXT:    srli a1, a0, 1
1018; RV32ZBB-NEXT:    lui a2, 349525
1019; RV32ZBB-NEXT:    addi a2, a2, 1365
1020; RV32ZBB-NEXT:    and a1, a1, a2
1021; RV32ZBB-NEXT:    and a0, a0, a2
1022; RV32ZBB-NEXT:    slli a0, a0, 1
1023; RV32ZBB-NEXT:    or a0, a1, a0
1024; RV32ZBB-NEXT:    ret
1025;
1026; RV64ZBB-LABEL: test_bswap_bitreverse_i32:
1027; RV64ZBB:       # %bb.0:
1028; RV64ZBB-NEXT:    srli a1, a0, 4
1029; RV64ZBB-NEXT:    lui a2, 61681
1030; RV64ZBB-NEXT:    addiw a2, a2, -241
1031; RV64ZBB-NEXT:    and a1, a1, a2
1032; RV64ZBB-NEXT:    and a0, a0, a2
1033; RV64ZBB-NEXT:    slliw a0, a0, 4
1034; RV64ZBB-NEXT:    or a0, a1, a0
1035; RV64ZBB-NEXT:    srli a1, a0, 2
1036; RV64ZBB-NEXT:    lui a2, 209715
1037; RV64ZBB-NEXT:    addiw a2, a2, 819
1038; RV64ZBB-NEXT:    and a1, a1, a2
1039; RV64ZBB-NEXT:    and a0, a0, a2
1040; RV64ZBB-NEXT:    slliw a0, a0, 2
1041; RV64ZBB-NEXT:    or a0, a1, a0
1042; RV64ZBB-NEXT:    srli a1, a0, 1
1043; RV64ZBB-NEXT:    lui a2, 349525
1044; RV64ZBB-NEXT:    addiw a2, a2, 1365
1045; RV64ZBB-NEXT:    and a1, a1, a2
1046; RV64ZBB-NEXT:    and a0, a0, a2
1047; RV64ZBB-NEXT:    slliw a0, a0, 1
1048; RV64ZBB-NEXT:    or a0, a1, a0
1049; RV64ZBB-NEXT:    ret
1050;
1051; RV32ZBKB-LABEL: test_bswap_bitreverse_i32:
1052; RV32ZBKB:       # %bb.0:
1053; RV32ZBKB-NEXT:    brev8 a0, a0
1054; RV32ZBKB-NEXT:    ret
1055;
1056; RV64ZBKB-LABEL: test_bswap_bitreverse_i32:
1057; RV64ZBKB:       # %bb.0:
1058; RV64ZBKB-NEXT:    brev8 a0, a0
1059; RV64ZBKB-NEXT:    ret
1060;
1061; RV32ZBP-LABEL: test_bswap_bitreverse_i32:
1062; RV32ZBP:       # %bb.0:
1063; RV32ZBP-NEXT:    rev.b a0, a0
1064; RV32ZBP-NEXT:    ret
1065;
1066; RV64ZBP-LABEL: test_bswap_bitreverse_i32:
1067; RV64ZBP:       # %bb.0:
1068; RV64ZBP-NEXT:    rev.b a0, a0
1069; RV64ZBP-NEXT:    ret
1070  %tmp = call i32 @llvm.bswap.i32(i32 %a)
1071  %tmp2 = call i32 @llvm.bitreverse.i32(i32 %tmp)
1072  ret i32 %tmp2
1073}
1074
1075define i64 @test_bswap_bitreverse_i64(i64 %a) nounwind {
1076; RV32I-LABEL: test_bswap_bitreverse_i64:
1077; RV32I:       # %bb.0:
1078; RV32I-NEXT:    srli a2, a0, 4
1079; RV32I-NEXT:    lui a3, 61681
1080; RV32I-NEXT:    addi a3, a3, -241
1081; RV32I-NEXT:    and a2, a2, a3
1082; RV32I-NEXT:    and a0, a0, a3
1083; RV32I-NEXT:    slli a0, a0, 4
1084; RV32I-NEXT:    or a0, a2, a0
1085; RV32I-NEXT:    srli a2, a0, 2
1086; RV32I-NEXT:    lui a4, 209715
1087; RV32I-NEXT:    addi a4, a4, 819
1088; RV32I-NEXT:    and a2, a2, a4
1089; RV32I-NEXT:    and a0, a0, a4
1090; RV32I-NEXT:    slli a0, a0, 2
1091; RV32I-NEXT:    or a0, a2, a0
1092; RV32I-NEXT:    srli a2, a0, 1
1093; RV32I-NEXT:    lui a5, 349525
1094; RV32I-NEXT:    addi a5, a5, 1365
1095; RV32I-NEXT:    and a2, a2, a5
1096; RV32I-NEXT:    and a0, a0, a5
1097; RV32I-NEXT:    slli a0, a0, 1
1098; RV32I-NEXT:    or a0, a2, a0
1099; RV32I-NEXT:    srli a2, a1, 4
1100; RV32I-NEXT:    and a2, a2, a3
1101; RV32I-NEXT:    and a1, a1, a3
1102; RV32I-NEXT:    slli a1, a1, 4
1103; RV32I-NEXT:    or a1, a2, a1
1104; RV32I-NEXT:    srli a2, a1, 2
1105; RV32I-NEXT:    and a2, a2, a4
1106; RV32I-NEXT:    and a1, a1, a4
1107; RV32I-NEXT:    slli a1, a1, 2
1108; RV32I-NEXT:    or a1, a2, a1
1109; RV32I-NEXT:    srli a2, a1, 1
1110; RV32I-NEXT:    and a2, a2, a5
1111; RV32I-NEXT:    and a1, a1, a5
1112; RV32I-NEXT:    slli a1, a1, 1
1113; RV32I-NEXT:    or a1, a2, a1
1114; RV32I-NEXT:    ret
1115;
1116; RV64I-LABEL: test_bswap_bitreverse_i64:
1117; RV64I:       # %bb.0:
1118; RV64I-NEXT:    lui a1, %hi(.LCPI9_0)
1119; RV64I-NEXT:    ld a1, %lo(.LCPI9_0)(a1)
1120; RV64I-NEXT:    srli a2, a0, 4
1121; RV64I-NEXT:    and a2, a2, a1
1122; RV64I-NEXT:    and a0, a0, a1
1123; RV64I-NEXT:    lui a1, %hi(.LCPI9_1)
1124; RV64I-NEXT:    ld a1, %lo(.LCPI9_1)(a1)
1125; RV64I-NEXT:    slli a0, a0, 4
1126; RV64I-NEXT:    or a0, a2, a0
1127; RV64I-NEXT:    srli a2, a0, 2
1128; RV64I-NEXT:    and a2, a2, a1
1129; RV64I-NEXT:    and a0, a0, a1
1130; RV64I-NEXT:    lui a1, %hi(.LCPI9_2)
1131; RV64I-NEXT:    ld a1, %lo(.LCPI9_2)(a1)
1132; RV64I-NEXT:    slli a0, a0, 2
1133; RV64I-NEXT:    or a0, a2, a0
1134; RV64I-NEXT:    srli a2, a0, 1
1135; RV64I-NEXT:    and a2, a2, a1
1136; RV64I-NEXT:    and a0, a0, a1
1137; RV64I-NEXT:    slli a0, a0, 1
1138; RV64I-NEXT:    or a0, a2, a0
1139; RV64I-NEXT:    ret
1140;
1141; RV32ZBB-LABEL: test_bswap_bitreverse_i64:
1142; RV32ZBB:       # %bb.0:
1143; RV32ZBB-NEXT:    srli a2, a0, 4
1144; RV32ZBB-NEXT:    lui a3, 61681
1145; RV32ZBB-NEXT:    addi a3, a3, -241
1146; RV32ZBB-NEXT:    and a2, a2, a3
1147; RV32ZBB-NEXT:    and a0, a0, a3
1148; RV32ZBB-NEXT:    slli a0, a0, 4
1149; RV32ZBB-NEXT:    or a0, a2, a0
1150; RV32ZBB-NEXT:    srli a2, a0, 2
1151; RV32ZBB-NEXT:    lui a4, 209715
1152; RV32ZBB-NEXT:    addi a4, a4, 819
1153; RV32ZBB-NEXT:    and a2, a2, a4
1154; RV32ZBB-NEXT:    and a0, a0, a4
1155; RV32ZBB-NEXT:    slli a0, a0, 2
1156; RV32ZBB-NEXT:    or a0, a2, a0
1157; RV32ZBB-NEXT:    srli a2, a0, 1
1158; RV32ZBB-NEXT:    lui a5, 349525
1159; RV32ZBB-NEXT:    addi a5, a5, 1365
1160; RV32ZBB-NEXT:    and a2, a2, a5
1161; RV32ZBB-NEXT:    and a0, a0, a5
1162; RV32ZBB-NEXT:    slli a0, a0, 1
1163; RV32ZBB-NEXT:    or a0, a2, a0
1164; RV32ZBB-NEXT:    srli a2, a1, 4
1165; RV32ZBB-NEXT:    and a2, a2, a3
1166; RV32ZBB-NEXT:    and a1, a1, a3
1167; RV32ZBB-NEXT:    slli a1, a1, 4
1168; RV32ZBB-NEXT:    or a1, a2, a1
1169; RV32ZBB-NEXT:    srli a2, a1, 2
1170; RV32ZBB-NEXT:    and a2, a2, a4
1171; RV32ZBB-NEXT:    and a1, a1, a4
1172; RV32ZBB-NEXT:    slli a1, a1, 2
1173; RV32ZBB-NEXT:    or a1, a2, a1
1174; RV32ZBB-NEXT:    srli a2, a1, 1
1175; RV32ZBB-NEXT:    and a2, a2, a5
1176; RV32ZBB-NEXT:    and a1, a1, a5
1177; RV32ZBB-NEXT:    slli a1, a1, 1
1178; RV32ZBB-NEXT:    or a1, a2, a1
1179; RV32ZBB-NEXT:    ret
1180;
1181; RV64ZBB-LABEL: test_bswap_bitreverse_i64:
1182; RV64ZBB:       # %bb.0:
1183; RV64ZBB-NEXT:    lui a1, %hi(.LCPI9_0)
1184; RV64ZBB-NEXT:    ld a1, %lo(.LCPI9_0)(a1)
1185; RV64ZBB-NEXT:    srli a2, a0, 4
1186; RV64ZBB-NEXT:    and a2, a2, a1
1187; RV64ZBB-NEXT:    and a0, a0, a1
1188; RV64ZBB-NEXT:    lui a1, %hi(.LCPI9_1)
1189; RV64ZBB-NEXT:    ld a1, %lo(.LCPI9_1)(a1)
1190; RV64ZBB-NEXT:    slli a0, a0, 4
1191; RV64ZBB-NEXT:    or a0, a2, a0
1192; RV64ZBB-NEXT:    srli a2, a0, 2
1193; RV64ZBB-NEXT:    and a2, a2, a1
1194; RV64ZBB-NEXT:    and a0, a0, a1
1195; RV64ZBB-NEXT:    lui a1, %hi(.LCPI9_2)
1196; RV64ZBB-NEXT:    ld a1, %lo(.LCPI9_2)(a1)
1197; RV64ZBB-NEXT:    slli a0, a0, 2
1198; RV64ZBB-NEXT:    or a0, a2, a0
1199; RV64ZBB-NEXT:    srli a2, a0, 1
1200; RV64ZBB-NEXT:    and a2, a2, a1
1201; RV64ZBB-NEXT:    and a0, a0, a1
1202; RV64ZBB-NEXT:    slli a0, a0, 1
1203; RV64ZBB-NEXT:    or a0, a2, a0
1204; RV64ZBB-NEXT:    ret
1205;
1206; RV32ZBKB-LABEL: test_bswap_bitreverse_i64:
1207; RV32ZBKB:       # %bb.0:
1208; RV32ZBKB-NEXT:    brev8 a0, a0
1209; RV32ZBKB-NEXT:    brev8 a1, a1
1210; RV32ZBKB-NEXT:    ret
1211;
1212; RV64ZBKB-LABEL: test_bswap_bitreverse_i64:
1213; RV64ZBKB:       # %bb.0:
1214; RV64ZBKB-NEXT:    brev8 a0, a0
1215; RV64ZBKB-NEXT:    ret
1216;
1217; RV32ZBP-LABEL: test_bswap_bitreverse_i64:
1218; RV32ZBP:       # %bb.0:
1219; RV32ZBP-NEXT:    rev.b a0, a0
1220; RV32ZBP-NEXT:    rev.b a1, a1
1221; RV32ZBP-NEXT:    ret
1222;
1223; RV64ZBP-LABEL: test_bswap_bitreverse_i64:
1224; RV64ZBP:       # %bb.0:
1225; RV64ZBP-NEXT:    rev.b a0, a0
1226; RV64ZBP-NEXT:    ret
1227  %tmp = call i64 @llvm.bswap.i64(i64 %a)
1228  %tmp2 = call i64 @llvm.bitreverse.i64(i64 %tmp)
1229  ret i64 %tmp2
1230}
1231
1232define i16 @test_bitreverse_bswap_i16(i16 %a) nounwind {
1233; RV32I-LABEL: test_bitreverse_bswap_i16:
1234; RV32I:       # %bb.0:
1235; RV32I-NEXT:    srli a1, a0, 4
1236; RV32I-NEXT:    lui a2, 1
1237; RV32I-NEXT:    addi a2, a2, -241
1238; RV32I-NEXT:    and a1, a1, a2
1239; RV32I-NEXT:    and a0, a0, a2
1240; RV32I-NEXT:    slli a0, a0, 4
1241; RV32I-NEXT:    or a0, a1, a0
1242; RV32I-NEXT:    srli a1, a0, 2
1243; RV32I-NEXT:    lui a2, 3
1244; RV32I-NEXT:    addi a2, a2, 819
1245; RV32I-NEXT:    and a1, a1, a2
1246; RV32I-NEXT:    and a0, a0, a2
1247; RV32I-NEXT:    slli a0, a0, 2
1248; RV32I-NEXT:    or a0, a1, a0
1249; RV32I-NEXT:    srli a1, a0, 1
1250; RV32I-NEXT:    lui a2, 5
1251; RV32I-NEXT:    addi a2, a2, 1365
1252; RV32I-NEXT:    and a1, a1, a2
1253; RV32I-NEXT:    and a0, a0, a2
1254; RV32I-NEXT:    slli a0, a0, 1
1255; RV32I-NEXT:    or a0, a1, a0
1256; RV32I-NEXT:    ret
1257;
1258; RV64I-LABEL: test_bitreverse_bswap_i16:
1259; RV64I:       # %bb.0:
1260; RV64I-NEXT:    srli a1, a0, 4
1261; RV64I-NEXT:    lui a2, 1
1262; RV64I-NEXT:    addiw a2, a2, -241
1263; RV64I-NEXT:    and a1, a1, a2
1264; RV64I-NEXT:    and a0, a0, a2
1265; RV64I-NEXT:    slli a0, a0, 4
1266; RV64I-NEXT:    or a0, a1, a0
1267; RV64I-NEXT:    srli a1, a0, 2
1268; RV64I-NEXT:    lui a2, 3
1269; RV64I-NEXT:    addiw a2, a2, 819
1270; RV64I-NEXT:    and a1, a1, a2
1271; RV64I-NEXT:    and a0, a0, a2
1272; RV64I-NEXT:    slli a0, a0, 2
1273; RV64I-NEXT:    or a0, a1, a0
1274; RV64I-NEXT:    srli a1, a0, 1
1275; RV64I-NEXT:    lui a2, 5
1276; RV64I-NEXT:    addiw a2, a2, 1365
1277; RV64I-NEXT:    and a1, a1, a2
1278; RV64I-NEXT:    and a0, a0, a2
1279; RV64I-NEXT:    slli a0, a0, 1
1280; RV64I-NEXT:    or a0, a1, a0
1281; RV64I-NEXT:    ret
1282;
1283; RV32ZBB-LABEL: test_bitreverse_bswap_i16:
1284; RV32ZBB:       # %bb.0:
1285; RV32ZBB-NEXT:    srli a1, a0, 4
1286; RV32ZBB-NEXT:    lui a2, 1
1287; RV32ZBB-NEXT:    addi a2, a2, -241
1288; RV32ZBB-NEXT:    and a1, a1, a2
1289; RV32ZBB-NEXT:    and a0, a0, a2
1290; RV32ZBB-NEXT:    slli a0, a0, 4
1291; RV32ZBB-NEXT:    or a0, a1, a0
1292; RV32ZBB-NEXT:    srli a1, a0, 2
1293; RV32ZBB-NEXT:    lui a2, 3
1294; RV32ZBB-NEXT:    addi a2, a2, 819
1295; RV32ZBB-NEXT:    and a1, a1, a2
1296; RV32ZBB-NEXT:    and a0, a0, a2
1297; RV32ZBB-NEXT:    slli a0, a0, 2
1298; RV32ZBB-NEXT:    or a0, a1, a0
1299; RV32ZBB-NEXT:    srli a1, a0, 1
1300; RV32ZBB-NEXT:    lui a2, 5
1301; RV32ZBB-NEXT:    addi a2, a2, 1365
1302; RV32ZBB-NEXT:    and a1, a1, a2
1303; RV32ZBB-NEXT:    and a0, a0, a2
1304; RV32ZBB-NEXT:    slli a0, a0, 1
1305; RV32ZBB-NEXT:    or a0, a1, a0
1306; RV32ZBB-NEXT:    ret
1307;
1308; RV64ZBB-LABEL: test_bitreverse_bswap_i16:
1309; RV64ZBB:       # %bb.0:
1310; RV64ZBB-NEXT:    srli a1, a0, 4
1311; RV64ZBB-NEXT:    lui a2, 1
1312; RV64ZBB-NEXT:    addiw a2, a2, -241
1313; RV64ZBB-NEXT:    and a1, a1, a2
1314; RV64ZBB-NEXT:    and a0, a0, a2
1315; RV64ZBB-NEXT:    slli a0, a0, 4
1316; RV64ZBB-NEXT:    or a0, a1, a0
1317; RV64ZBB-NEXT:    srli a1, a0, 2
1318; RV64ZBB-NEXT:    lui a2, 3
1319; RV64ZBB-NEXT:    addiw a2, a2, 819
1320; RV64ZBB-NEXT:    and a1, a1, a2
1321; RV64ZBB-NEXT:    and a0, a0, a2
1322; RV64ZBB-NEXT:    slli a0, a0, 2
1323; RV64ZBB-NEXT:    or a0, a1, a0
1324; RV64ZBB-NEXT:    srli a1, a0, 1
1325; RV64ZBB-NEXT:    lui a2, 5
1326; RV64ZBB-NEXT:    addiw a2, a2, 1365
1327; RV64ZBB-NEXT:    and a1, a1, a2
1328; RV64ZBB-NEXT:    and a0, a0, a2
1329; RV64ZBB-NEXT:    slli a0, a0, 1
1330; RV64ZBB-NEXT:    or a0, a1, a0
1331; RV64ZBB-NEXT:    ret
1332;
1333; RV32ZBKB-LABEL: test_bitreverse_bswap_i16:
1334; RV32ZBKB:       # %bb.0:
1335; RV32ZBKB-NEXT:    brev8 a0, a0
1336; RV32ZBKB-NEXT:    ret
1337;
1338; RV64ZBKB-LABEL: test_bitreverse_bswap_i16:
1339; RV64ZBKB:       # %bb.0:
1340; RV64ZBKB-NEXT:    brev8 a0, a0
1341; RV64ZBKB-NEXT:    ret
1342;
1343; RV32ZBP-LABEL: test_bitreverse_bswap_i16:
1344; RV32ZBP:       # %bb.0:
1345; RV32ZBP-NEXT:    rev.b a0, a0
1346; RV32ZBP-NEXT:    ret
1347;
1348; RV64ZBP-LABEL: test_bitreverse_bswap_i16:
1349; RV64ZBP:       # %bb.0:
1350; RV64ZBP-NEXT:    rev.b a0, a0
1351; RV64ZBP-NEXT:    ret
1352  %tmp = call i16 @llvm.bitreverse.i16(i16 %a)
1353  %tmp2 = call i16 @llvm.bswap.i16(i16 %tmp)
1354  ret i16 %tmp2
1355}
1356
1357define i32 @test_bitreverse_bswap_i32(i32 %a) nounwind {
1358; RV32I-LABEL: test_bitreverse_bswap_i32:
1359; RV32I:       # %bb.0:
1360; RV32I-NEXT:    srli a1, a0, 4
1361; RV32I-NEXT:    lui a2, 61681
1362; RV32I-NEXT:    addi a2, a2, -241
1363; RV32I-NEXT:    and a1, a1, a2
1364; RV32I-NEXT:    and a0, a0, a2
1365; RV32I-NEXT:    slli a0, a0, 4
1366; RV32I-NEXT:    or a0, a1, a0
1367; RV32I-NEXT:    srli a1, a0, 2
1368; RV32I-NEXT:    lui a2, 209715
1369; RV32I-NEXT:    addi a2, a2, 819
1370; RV32I-NEXT:    and a1, a1, a2
1371; RV32I-NEXT:    and a0, a0, a2
1372; RV32I-NEXT:    slli a0, a0, 2
1373; RV32I-NEXT:    or a0, a1, a0
1374; RV32I-NEXT:    srli a1, a0, 1
1375; RV32I-NEXT:    lui a2, 349525
1376; RV32I-NEXT:    addi a2, a2, 1365
1377; RV32I-NEXT:    and a1, a1, a2
1378; RV32I-NEXT:    and a0, a0, a2
1379; RV32I-NEXT:    slli a0, a0, 1
1380; RV32I-NEXT:    or a0, a1, a0
1381; RV32I-NEXT:    ret
1382;
1383; RV64I-LABEL: test_bitreverse_bswap_i32:
1384; RV64I:       # %bb.0:
1385; RV64I-NEXT:    srli a1, a0, 4
1386; RV64I-NEXT:    lui a2, 61681
1387; RV64I-NEXT:    addiw a2, a2, -241
1388; RV64I-NEXT:    and a1, a1, a2
1389; RV64I-NEXT:    and a0, a0, a2
1390; RV64I-NEXT:    slliw a0, a0, 4
1391; RV64I-NEXT:    or a0, a1, a0
1392; RV64I-NEXT:    srli a1, a0, 2
1393; RV64I-NEXT:    lui a2, 209715
1394; RV64I-NEXT:    addiw a2, a2, 819
1395; RV64I-NEXT:    and a1, a1, a2
1396; RV64I-NEXT:    and a0, a0, a2
1397; RV64I-NEXT:    slliw a0, a0, 2
1398; RV64I-NEXT:    or a0, a1, a0
1399; RV64I-NEXT:    srli a1, a0, 1
1400; RV64I-NEXT:    lui a2, 349525
1401; RV64I-NEXT:    addiw a2, a2, 1365
1402; RV64I-NEXT:    and a1, a1, a2
1403; RV64I-NEXT:    and a0, a0, a2
1404; RV64I-NEXT:    slliw a0, a0, 1
1405; RV64I-NEXT:    or a0, a1, a0
1406; RV64I-NEXT:    ret
1407;
1408; RV32ZBB-LABEL: test_bitreverse_bswap_i32:
1409; RV32ZBB:       # %bb.0:
1410; RV32ZBB-NEXT:    srli a1, a0, 4
1411; RV32ZBB-NEXT:    lui a2, 61681
1412; RV32ZBB-NEXT:    addi a2, a2, -241
1413; RV32ZBB-NEXT:    and a1, a1, a2
1414; RV32ZBB-NEXT:    and a0, a0, a2
1415; RV32ZBB-NEXT:    slli a0, a0, 4
1416; RV32ZBB-NEXT:    or a0, a1, a0
1417; RV32ZBB-NEXT:    srli a1, a0, 2
1418; RV32ZBB-NEXT:    lui a2, 209715
1419; RV32ZBB-NEXT:    addi a2, a2, 819
1420; RV32ZBB-NEXT:    and a1, a1, a2
1421; RV32ZBB-NEXT:    and a0, a0, a2
1422; RV32ZBB-NEXT:    slli a0, a0, 2
1423; RV32ZBB-NEXT:    or a0, a1, a0
1424; RV32ZBB-NEXT:    srli a1, a0, 1
1425; RV32ZBB-NEXT:    lui a2, 349525
1426; RV32ZBB-NEXT:    addi a2, a2, 1365
1427; RV32ZBB-NEXT:    and a1, a1, a2
1428; RV32ZBB-NEXT:    and a0, a0, a2
1429; RV32ZBB-NEXT:    slli a0, a0, 1
1430; RV32ZBB-NEXT:    or a0, a1, a0
1431; RV32ZBB-NEXT:    ret
1432;
1433; RV64ZBB-LABEL: test_bitreverse_bswap_i32:
1434; RV64ZBB:       # %bb.0:
1435; RV64ZBB-NEXT:    srli a1, a0, 4
1436; RV64ZBB-NEXT:    lui a2, 61681
1437; RV64ZBB-NEXT:    addiw a2, a2, -241
1438; RV64ZBB-NEXT:    and a1, a1, a2
1439; RV64ZBB-NEXT:    and a0, a0, a2
1440; RV64ZBB-NEXT:    slliw a0, a0, 4
1441; RV64ZBB-NEXT:    or a0, a1, a0
1442; RV64ZBB-NEXT:    srli a1, a0, 2
1443; RV64ZBB-NEXT:    lui a2, 209715
1444; RV64ZBB-NEXT:    addiw a2, a2, 819
1445; RV64ZBB-NEXT:    and a1, a1, a2
1446; RV64ZBB-NEXT:    and a0, a0, a2
1447; RV64ZBB-NEXT:    slliw a0, a0, 2
1448; RV64ZBB-NEXT:    or a0, a1, a0
1449; RV64ZBB-NEXT:    srli a1, a0, 1
1450; RV64ZBB-NEXT:    lui a2, 349525
1451; RV64ZBB-NEXT:    addiw a2, a2, 1365
1452; RV64ZBB-NEXT:    and a1, a1, a2
1453; RV64ZBB-NEXT:    and a0, a0, a2
1454; RV64ZBB-NEXT:    slliw a0, a0, 1
1455; RV64ZBB-NEXT:    or a0, a1, a0
1456; RV64ZBB-NEXT:    ret
1457;
1458; RV32ZBKB-LABEL: test_bitreverse_bswap_i32:
1459; RV32ZBKB:       # %bb.0:
1460; RV32ZBKB-NEXT:    brev8 a0, a0
1461; RV32ZBKB-NEXT:    ret
1462;
1463; RV64ZBKB-LABEL: test_bitreverse_bswap_i32:
1464; RV64ZBKB:       # %bb.0:
1465; RV64ZBKB-NEXT:    brev8 a0, a0
1466; RV64ZBKB-NEXT:    ret
1467;
1468; RV32ZBP-LABEL: test_bitreverse_bswap_i32:
1469; RV32ZBP:       # %bb.0:
1470; RV32ZBP-NEXT:    rev.b a0, a0
1471; RV32ZBP-NEXT:    ret
1472;
1473; RV64ZBP-LABEL: test_bitreverse_bswap_i32:
1474; RV64ZBP:       # %bb.0:
1475; RV64ZBP-NEXT:    rev.b a0, a0
1476; RV64ZBP-NEXT:    ret
1477  %tmp = call i32 @llvm.bitreverse.i32(i32 %a)
1478  %tmp2 = call i32 @llvm.bswap.i32(i32 %tmp)
1479  ret i32 %tmp2
1480}
1481
1482define i64 @test_bitreverse_bswap_i64(i64 %a) nounwind {
1483; RV32I-LABEL: test_bitreverse_bswap_i64:
1484; RV32I:       # %bb.0:
1485; RV32I-NEXT:    srli a2, a0, 4
1486; RV32I-NEXT:    lui a3, 61681
1487; RV32I-NEXT:    addi a3, a3, -241
1488; RV32I-NEXT:    and a2, a2, a3
1489; RV32I-NEXT:    and a0, a0, a3
1490; RV32I-NEXT:    slli a0, a0, 4
1491; RV32I-NEXT:    or a0, a2, a0
1492; RV32I-NEXT:    srli a2, a0, 2
1493; RV32I-NEXT:    lui a4, 209715
1494; RV32I-NEXT:    addi a4, a4, 819
1495; RV32I-NEXT:    and a2, a2, a4
1496; RV32I-NEXT:    and a0, a0, a4
1497; RV32I-NEXT:    slli a0, a0, 2
1498; RV32I-NEXT:    or a0, a2, a0
1499; RV32I-NEXT:    srli a2, a0, 1
1500; RV32I-NEXT:    lui a5, 349525
1501; RV32I-NEXT:    addi a5, a5, 1365
1502; RV32I-NEXT:    and a2, a2, a5
1503; RV32I-NEXT:    and a0, a0, a5
1504; RV32I-NEXT:    slli a0, a0, 1
1505; RV32I-NEXT:    or a0, a2, a0
1506; RV32I-NEXT:    srli a2, a1, 4
1507; RV32I-NEXT:    and a2, a2, a3
1508; RV32I-NEXT:    and a1, a1, a3
1509; RV32I-NEXT:    slli a1, a1, 4
1510; RV32I-NEXT:    or a1, a2, a1
1511; RV32I-NEXT:    srli a2, a1, 2
1512; RV32I-NEXT:    and a2, a2, a4
1513; RV32I-NEXT:    and a1, a1, a4
1514; RV32I-NEXT:    slli a1, a1, 2
1515; RV32I-NEXT:    or a1, a2, a1
1516; RV32I-NEXT:    srli a2, a1, 1
1517; RV32I-NEXT:    and a2, a2, a5
1518; RV32I-NEXT:    and a1, a1, a5
1519; RV32I-NEXT:    slli a1, a1, 1
1520; RV32I-NEXT:    or a1, a2, a1
1521; RV32I-NEXT:    ret
1522;
1523; RV64I-LABEL: test_bitreverse_bswap_i64:
1524; RV64I:       # %bb.0:
1525; RV64I-NEXT:    lui a1, %hi(.LCPI12_0)
1526; RV64I-NEXT:    ld a1, %lo(.LCPI12_0)(a1)
1527; RV64I-NEXT:    srli a2, a0, 4
1528; RV64I-NEXT:    and a2, a2, a1
1529; RV64I-NEXT:    and a0, a0, a1
1530; RV64I-NEXT:    lui a1, %hi(.LCPI12_1)
1531; RV64I-NEXT:    ld a1, %lo(.LCPI12_1)(a1)
1532; RV64I-NEXT:    slli a0, a0, 4
1533; RV64I-NEXT:    or a0, a2, a0
1534; RV64I-NEXT:    srli a2, a0, 2
1535; RV64I-NEXT:    and a2, a2, a1
1536; RV64I-NEXT:    and a0, a0, a1
1537; RV64I-NEXT:    lui a1, %hi(.LCPI12_2)
1538; RV64I-NEXT:    ld a1, %lo(.LCPI12_2)(a1)
1539; RV64I-NEXT:    slli a0, a0, 2
1540; RV64I-NEXT:    or a0, a2, a0
1541; RV64I-NEXT:    srli a2, a0, 1
1542; RV64I-NEXT:    and a2, a2, a1
1543; RV64I-NEXT:    and a0, a0, a1
1544; RV64I-NEXT:    slli a0, a0, 1
1545; RV64I-NEXT:    or a0, a2, a0
1546; RV64I-NEXT:    ret
1547;
1548; RV32ZBB-LABEL: test_bitreverse_bswap_i64:
1549; RV32ZBB:       # %bb.0:
1550; RV32ZBB-NEXT:    srli a2, a0, 4
1551; RV32ZBB-NEXT:    lui a3, 61681
1552; RV32ZBB-NEXT:    addi a3, a3, -241
1553; RV32ZBB-NEXT:    and a2, a2, a3
1554; RV32ZBB-NEXT:    and a0, a0, a3
1555; RV32ZBB-NEXT:    slli a0, a0, 4
1556; RV32ZBB-NEXT:    or a0, a2, a0
1557; RV32ZBB-NEXT:    srli a2, a0, 2
1558; RV32ZBB-NEXT:    lui a4, 209715
1559; RV32ZBB-NEXT:    addi a4, a4, 819
1560; RV32ZBB-NEXT:    and a2, a2, a4
1561; RV32ZBB-NEXT:    and a0, a0, a4
1562; RV32ZBB-NEXT:    slli a0, a0, 2
1563; RV32ZBB-NEXT:    or a0, a2, a0
1564; RV32ZBB-NEXT:    srli a2, a0, 1
1565; RV32ZBB-NEXT:    lui a5, 349525
1566; RV32ZBB-NEXT:    addi a5, a5, 1365
1567; RV32ZBB-NEXT:    and a2, a2, a5
1568; RV32ZBB-NEXT:    and a0, a0, a5
1569; RV32ZBB-NEXT:    slli a0, a0, 1
1570; RV32ZBB-NEXT:    or a0, a2, a0
1571; RV32ZBB-NEXT:    srli a2, a1, 4
1572; RV32ZBB-NEXT:    and a2, a2, a3
1573; RV32ZBB-NEXT:    and a1, a1, a3
1574; RV32ZBB-NEXT:    slli a1, a1, 4
1575; RV32ZBB-NEXT:    or a1, a2, a1
1576; RV32ZBB-NEXT:    srli a2, a1, 2
1577; RV32ZBB-NEXT:    and a2, a2, a4
1578; RV32ZBB-NEXT:    and a1, a1, a4
1579; RV32ZBB-NEXT:    slli a1, a1, 2
1580; RV32ZBB-NEXT:    or a1, a2, a1
1581; RV32ZBB-NEXT:    srli a2, a1, 1
1582; RV32ZBB-NEXT:    and a2, a2, a5
1583; RV32ZBB-NEXT:    and a1, a1, a5
1584; RV32ZBB-NEXT:    slli a1, a1, 1
1585; RV32ZBB-NEXT:    or a1, a2, a1
1586; RV32ZBB-NEXT:    ret
1587;
1588; RV64ZBB-LABEL: test_bitreverse_bswap_i64:
1589; RV64ZBB:       # %bb.0:
1590; RV64ZBB-NEXT:    lui a1, %hi(.LCPI12_0)
1591; RV64ZBB-NEXT:    ld a1, %lo(.LCPI12_0)(a1)
1592; RV64ZBB-NEXT:    srli a2, a0, 4
1593; RV64ZBB-NEXT:    and a2, a2, a1
1594; RV64ZBB-NEXT:    and a0, a0, a1
1595; RV64ZBB-NEXT:    lui a1, %hi(.LCPI12_1)
1596; RV64ZBB-NEXT:    ld a1, %lo(.LCPI12_1)(a1)
1597; RV64ZBB-NEXT:    slli a0, a0, 4
1598; RV64ZBB-NEXT:    or a0, a2, a0
1599; RV64ZBB-NEXT:    srli a2, a0, 2
1600; RV64ZBB-NEXT:    and a2, a2, a1
1601; RV64ZBB-NEXT:    and a0, a0, a1
1602; RV64ZBB-NEXT:    lui a1, %hi(.LCPI12_2)
1603; RV64ZBB-NEXT:    ld a1, %lo(.LCPI12_2)(a1)
1604; RV64ZBB-NEXT:    slli a0, a0, 2
1605; RV64ZBB-NEXT:    or a0, a2, a0
1606; RV64ZBB-NEXT:    srli a2, a0, 1
1607; RV64ZBB-NEXT:    and a2, a2, a1
1608; RV64ZBB-NEXT:    and a0, a0, a1
1609; RV64ZBB-NEXT:    slli a0, a0, 1
1610; RV64ZBB-NEXT:    or a0, a2, a0
1611; RV64ZBB-NEXT:    ret
1612;
1613; RV32ZBKB-LABEL: test_bitreverse_bswap_i64:
1614; RV32ZBKB:       # %bb.0:
1615; RV32ZBKB-NEXT:    brev8 a0, a0
1616; RV32ZBKB-NEXT:    brev8 a1, a1
1617; RV32ZBKB-NEXT:    ret
1618;
1619; RV64ZBKB-LABEL: test_bitreverse_bswap_i64:
1620; RV64ZBKB:       # %bb.0:
1621; RV64ZBKB-NEXT:    brev8 a0, a0
1622; RV64ZBKB-NEXT:    ret
1623;
1624; RV32ZBP-LABEL: test_bitreverse_bswap_i64:
1625; RV32ZBP:       # %bb.0:
1626; RV32ZBP-NEXT:    rev.b a0, a0
1627; RV32ZBP-NEXT:    rev.b a1, a1
1628; RV32ZBP-NEXT:    ret
1629;
1630; RV64ZBP-LABEL: test_bitreverse_bswap_i64:
1631; RV64ZBP:       # %bb.0:
1632; RV64ZBP-NEXT:    rev.b a0, a0
1633; RV64ZBP-NEXT:    ret
1634  %tmp = call i64 @llvm.bitreverse.i64(i64 %a)
1635  %tmp2 = call i64 @llvm.bswap.i64(i64 %tmp)
1636  ret i64 %tmp2
1637}
1638
1639define i32 @pr55484(i32 %0) {
1640; RV32I-LABEL: pr55484:
1641; RV32I:       # %bb.0:
1642; RV32I-NEXT:    srli a1, a0, 8
1643; RV32I-NEXT:    slli a0, a0, 8
1644; RV32I-NEXT:    or a0, a1, a0
1645; RV32I-NEXT:    slli a0, a0, 16
1646; RV32I-NEXT:    srai a0, a0, 16
1647; RV32I-NEXT:    ret
1648;
1649; RV64I-LABEL: pr55484:
1650; RV64I:       # %bb.0:
1651; RV64I-NEXT:    srli a1, a0, 8
1652; RV64I-NEXT:    slli a0, a0, 8
1653; RV64I-NEXT:    or a0, a1, a0
1654; RV64I-NEXT:    slli a0, a0, 48
1655; RV64I-NEXT:    srai a0, a0, 48
1656; RV64I-NEXT:    ret
1657;
1658; RV32ZBB-LABEL: pr55484:
1659; RV32ZBB:       # %bb.0:
1660; RV32ZBB-NEXT:    srli a1, a0, 8
1661; RV32ZBB-NEXT:    slli a0, a0, 8
1662; RV32ZBB-NEXT:    or a0, a1, a0
1663; RV32ZBB-NEXT:    sext.h a0, a0
1664; RV32ZBB-NEXT:    ret
1665;
1666; RV64ZBB-LABEL: pr55484:
1667; RV64ZBB:       # %bb.0:
1668; RV64ZBB-NEXT:    srli a1, a0, 8
1669; RV64ZBB-NEXT:    slli a0, a0, 8
1670; RV64ZBB-NEXT:    or a0, a1, a0
1671; RV64ZBB-NEXT:    sext.h a0, a0
1672; RV64ZBB-NEXT:    ret
1673;
1674; RV32ZBKB-LABEL: pr55484:
1675; RV32ZBKB:       # %bb.0:
1676; RV32ZBKB-NEXT:    srli a1, a0, 8
1677; RV32ZBKB-NEXT:    slli a0, a0, 8
1678; RV32ZBKB-NEXT:    or a0, a1, a0
1679; RV32ZBKB-NEXT:    slli a0, a0, 16
1680; RV32ZBKB-NEXT:    srai a0, a0, 16
1681; RV32ZBKB-NEXT:    ret
1682;
1683; RV64ZBKB-LABEL: pr55484:
1684; RV64ZBKB:       # %bb.0:
1685; RV64ZBKB-NEXT:    srli a1, a0, 8
1686; RV64ZBKB-NEXT:    slli a0, a0, 8
1687; RV64ZBKB-NEXT:    or a0, a1, a0
1688; RV64ZBKB-NEXT:    slli a0, a0, 48
1689; RV64ZBKB-NEXT:    srai a0, a0, 48
1690; RV64ZBKB-NEXT:    ret
1691;
1692; RV32ZBP-LABEL: pr55484:
1693; RV32ZBP:       # %bb.0:
1694; RV32ZBP-NEXT:    srli a1, a0, 8
1695; RV32ZBP-NEXT:    slli a0, a0, 8
1696; RV32ZBP-NEXT:    or a0, a1, a0
1697; RV32ZBP-NEXT:    slli a0, a0, 16
1698; RV32ZBP-NEXT:    srai a0, a0, 16
1699; RV32ZBP-NEXT:    ret
1700;
1701; RV64ZBP-LABEL: pr55484:
1702; RV64ZBP:       # %bb.0:
1703; RV64ZBP-NEXT:    srli a1, a0, 8
1704; RV64ZBP-NEXT:    slli a0, a0, 8
1705; RV64ZBP-NEXT:    or a0, a1, a0
1706; RV64ZBP-NEXT:    slli a0, a0, 48
1707; RV64ZBP-NEXT:    srai a0, a0, 48
1708; RV64ZBP-NEXT:    ret
1709  %2 = lshr i32 %0, 8
1710  %3 = shl i32 %0, 8
1711  %4 = or i32 %2, %3
1712  %5 = trunc i32 %4 to i16
1713  %6 = sext i16 %5 to i32
1714  ret i32 %6
1715}
1716