1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc -mtriple=riscv32 -mattr=+zbkb -verify-machineinstrs < %s \
3; RUN:   | FileCheck %s -check-prefixes=RV32ZBKB
4; RUN: llc -mtriple=riscv64 -mattr=+zbkb -verify-machineinstrs < %s \
5; RUN:   | FileCheck %s -check-prefixes=RV64ZBKB
6
7; TODO: These tests can be optmised
8;       fold (bitreverse(srl (bitreverse c), x)) -> (shl c, x)
9;       fold (bitreverse(shl (bitreverse c), x)) -> (srl c, x)
10
11declare i8 @llvm.bitreverse.i8(i8)
12declare i16 @llvm.bitreverse.i16(i16)
13declare i32 @llvm.bitreverse.i32(i32)
14declare i64 @llvm.bitreverse.i64(i64)
15
16define i8 @test_bitreverse_srli_bitreverse_i8(i8 %a) nounwind {
17; RV32ZBKB-LABEL: test_bitreverse_srli_bitreverse_i8:
18; RV32ZBKB:       # %bb.0:
19; RV32ZBKB-NEXT:    rev8 a0, a0
20; RV32ZBKB-NEXT:    brev8 a0, a0
21; RV32ZBKB-NEXT:    srli a0, a0, 27
22; RV32ZBKB-NEXT:    rev8 a0, a0
23; RV32ZBKB-NEXT:    brev8 a0, a0
24; RV32ZBKB-NEXT:    srli a0, a0, 24
25; RV32ZBKB-NEXT:    ret
26;
27; RV64ZBKB-LABEL: test_bitreverse_srli_bitreverse_i8:
28; RV64ZBKB:       # %bb.0:
29; RV64ZBKB-NEXT:    rev8 a0, a0
30; RV64ZBKB-NEXT:    brev8 a0, a0
31; RV64ZBKB-NEXT:    srli a0, a0, 59
32; RV64ZBKB-NEXT:    rev8 a0, a0
33; RV64ZBKB-NEXT:    brev8 a0, a0
34; RV64ZBKB-NEXT:    srli a0, a0, 56
35; RV64ZBKB-NEXT:    ret
36    %1 = call i8 @llvm.bitreverse.i8(i8 %a)
37    %2 = lshr i8 %1, 3
38    %3 = call i8 @llvm.bitreverse.i8(i8 %2)
39    ret i8 %3
40}
41
42define i16 @test_bitreverse_srli_bitreverse_i16(i16 %a) nounwind {
43; RV32ZBKB-LABEL: test_bitreverse_srli_bitreverse_i16:
44; RV32ZBKB:       # %bb.0:
45; RV32ZBKB-NEXT:    rev8 a0, a0
46; RV32ZBKB-NEXT:    brev8 a0, a0
47; RV32ZBKB-NEXT:    srli a0, a0, 23
48; RV32ZBKB-NEXT:    rev8 a0, a0
49; RV32ZBKB-NEXT:    brev8 a0, a0
50; RV32ZBKB-NEXT:    srli a0, a0, 16
51; RV32ZBKB-NEXT:    ret
52;
53; RV64ZBKB-LABEL: test_bitreverse_srli_bitreverse_i16:
54; RV64ZBKB:       # %bb.0:
55; RV64ZBKB-NEXT:    rev8 a0, a0
56; RV64ZBKB-NEXT:    brev8 a0, a0
57; RV64ZBKB-NEXT:    srli a0, a0, 55
58; RV64ZBKB-NEXT:    rev8 a0, a0
59; RV64ZBKB-NEXT:    brev8 a0, a0
60; RV64ZBKB-NEXT:    srli a0, a0, 48
61; RV64ZBKB-NEXT:    ret
62    %1 = call i16 @llvm.bitreverse.i16(i16 %a)
63    %2 = lshr i16 %1, 7
64    %3 = call i16 @llvm.bitreverse.i16(i16 %2)
65    ret i16 %3
66}
67
68define i32 @test_bitreverse_srli_bitreverse_i32(i32 %a) nounwind {
69; RV32ZBKB-LABEL: test_bitreverse_srli_bitreverse_i32:
70; RV32ZBKB:       # %bb.0:
71; RV32ZBKB-NEXT:    rev8 a0, a0
72; RV32ZBKB-NEXT:    brev8 a0, a0
73; RV32ZBKB-NEXT:    srli a0, a0, 15
74; RV32ZBKB-NEXT:    rev8 a0, a0
75; RV32ZBKB-NEXT:    brev8 a0, a0
76; RV32ZBKB-NEXT:    ret
77;
78; RV64ZBKB-LABEL: test_bitreverse_srli_bitreverse_i32:
79; RV64ZBKB:       # %bb.0:
80; RV64ZBKB-NEXT:    rev8 a0, a0
81; RV64ZBKB-NEXT:    brev8 a0, a0
82; RV64ZBKB-NEXT:    srli a0, a0, 47
83; RV64ZBKB-NEXT:    rev8 a0, a0
84; RV64ZBKB-NEXT:    brev8 a0, a0
85; RV64ZBKB-NEXT:    srli a0, a0, 32
86; RV64ZBKB-NEXT:    ret
87    %1 = call i32 @llvm.bitreverse.i32(i32 %a)
88    %2 = lshr i32 %1, 15
89    %3 = call i32 @llvm.bitreverse.i32(i32 %2)
90    ret i32 %3
91}
92
93define i64 @test_bitreverse_srli_bitreverse_i64(i64 %a) nounwind {
94; RV32ZBKB-LABEL: test_bitreverse_srli_bitreverse_i64:
95; RV32ZBKB:       # %bb.0:
96; RV32ZBKB-NEXT:    rev8 a0, a0
97; RV32ZBKB-NEXT:    brev8 a0, a0
98; RV32ZBKB-NEXT:    srli a0, a0, 1
99; RV32ZBKB-NEXT:    rev8 a0, a0
100; RV32ZBKB-NEXT:    brev8 a1, a0
101; RV32ZBKB-NEXT:    li a0, 0
102; RV32ZBKB-NEXT:    ret
103;
104; RV64ZBKB-LABEL: test_bitreverse_srli_bitreverse_i64:
105; RV64ZBKB:       # %bb.0:
106; RV64ZBKB-NEXT:    rev8 a0, a0
107; RV64ZBKB-NEXT:    brev8 a0, a0
108; RV64ZBKB-NEXT:    srli a0, a0, 33
109; RV64ZBKB-NEXT:    rev8 a0, a0
110; RV64ZBKB-NEXT:    brev8 a0, a0
111; RV64ZBKB-NEXT:    ret
112    %1 = call i64 @llvm.bitreverse.i64(i64 %a)
113    %2 = lshr i64 %1, 33
114    %3 = call i64 @llvm.bitreverse.i64(i64 %2)
115    ret i64 %3
116}
117
118define i8 @test_bitreverse_shli_bitreverse_i8(i8 %a) nounwind {
119; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i8:
120; RV32ZBKB:       # %bb.0:
121; RV32ZBKB-NEXT:    rev8 a0, a0
122; RV32ZBKB-NEXT:    brev8 a0, a0
123; RV32ZBKB-NEXT:    srli a0, a0, 24
124; RV32ZBKB-NEXT:    slli a0, a0, 3
125; RV32ZBKB-NEXT:    rev8 a0, a0
126; RV32ZBKB-NEXT:    brev8 a0, a0
127; RV32ZBKB-NEXT:    srli a0, a0, 24
128; RV32ZBKB-NEXT:    ret
129;
130; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i8:
131; RV64ZBKB:       # %bb.0:
132; RV64ZBKB-NEXT:    rev8 a0, a0
133; RV64ZBKB-NEXT:    brev8 a0, a0
134; RV64ZBKB-NEXT:    srli a0, a0, 56
135; RV64ZBKB-NEXT:    slli a0, a0, 3
136; RV64ZBKB-NEXT:    rev8 a0, a0
137; RV64ZBKB-NEXT:    brev8 a0, a0
138; RV64ZBKB-NEXT:    srli a0, a0, 56
139; RV64ZBKB-NEXT:    ret
140    %1 = call i8 @llvm.bitreverse.i8(i8 %a)
141    %2 = shl i8 %1, 3
142    %3 = call i8 @llvm.bitreverse.i8(i8 %2)
143    ret i8 %3
144}
145
146define i16 @test_bitreverse_shli_bitreverse_i16(i16 %a) nounwind {
147; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i16:
148; RV32ZBKB:       # %bb.0:
149; RV32ZBKB-NEXT:    rev8 a0, a0
150; RV32ZBKB-NEXT:    brev8 a0, a0
151; RV32ZBKB-NEXT:    srli a0, a0, 16
152; RV32ZBKB-NEXT:    slli a0, a0, 7
153; RV32ZBKB-NEXT:    rev8 a0, a0
154; RV32ZBKB-NEXT:    brev8 a0, a0
155; RV32ZBKB-NEXT:    srli a0, a0, 16
156; RV32ZBKB-NEXT:    ret
157;
158; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i16:
159; RV64ZBKB:       # %bb.0:
160; RV64ZBKB-NEXT:    rev8 a0, a0
161; RV64ZBKB-NEXT:    brev8 a0, a0
162; RV64ZBKB-NEXT:    srli a0, a0, 48
163; RV64ZBKB-NEXT:    slli a0, a0, 7
164; RV64ZBKB-NEXT:    rev8 a0, a0
165; RV64ZBKB-NEXT:    brev8 a0, a0
166; RV64ZBKB-NEXT:    srli a0, a0, 48
167; RV64ZBKB-NEXT:    ret
168    %1 = call i16 @llvm.bitreverse.i16(i16 %a)
169    %2 = shl i16 %1, 7
170    %3 = call i16 @llvm.bitreverse.i16(i16 %2)
171    ret i16 %3
172}
173
174define i32 @test_bitreverse_shli_bitreverse_i32(i32 %a) nounwind {
175; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i32:
176; RV32ZBKB:       # %bb.0:
177; RV32ZBKB-NEXT:    rev8 a0, a0
178; RV32ZBKB-NEXT:    brev8 a0, a0
179; RV32ZBKB-NEXT:    slli a0, a0, 15
180; RV32ZBKB-NEXT:    rev8 a0, a0
181; RV32ZBKB-NEXT:    brev8 a0, a0
182; RV32ZBKB-NEXT:    ret
183;
184; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i32:
185; RV64ZBKB:       # %bb.0:
186; RV64ZBKB-NEXT:    rev8 a0, a0
187; RV64ZBKB-NEXT:    brev8 a0, a0
188; RV64ZBKB-NEXT:    srli a0, a0, 32
189; RV64ZBKB-NEXT:    slli a0, a0, 15
190; RV64ZBKB-NEXT:    rev8 a0, a0
191; RV64ZBKB-NEXT:    brev8 a0, a0
192; RV64ZBKB-NEXT:    srli a0, a0, 32
193; RV64ZBKB-NEXT:    ret
194    %1 = call i32 @llvm.bitreverse.i32(i32 %a)
195    %2 = shl i32 %1, 15
196    %3 = call i32 @llvm.bitreverse.i32(i32 %2)
197    ret i32 %3
198}
199
200define i64 @test_bitreverse_shli_bitreverse_i64(i64 %a) nounwind {
201; RV32ZBKB-LABEL: test_bitreverse_shli_bitreverse_i64:
202; RV32ZBKB:       # %bb.0:
203; RV32ZBKB-NEXT:    rev8 a0, a1
204; RV32ZBKB-NEXT:    brev8 a0, a0
205; RV32ZBKB-NEXT:    slli a0, a0, 1
206; RV32ZBKB-NEXT:    rev8 a0, a0
207; RV32ZBKB-NEXT:    brev8 a0, a0
208; RV32ZBKB-NEXT:    li a1, 0
209; RV32ZBKB-NEXT:    ret
210;
211; RV64ZBKB-LABEL: test_bitreverse_shli_bitreverse_i64:
212; RV64ZBKB:       # %bb.0:
213; RV64ZBKB-NEXT:    rev8 a0, a0
214; RV64ZBKB-NEXT:    brev8 a0, a0
215; RV64ZBKB-NEXT:    slli a0, a0, 33
216; RV64ZBKB-NEXT:    rev8 a0, a0
217; RV64ZBKB-NEXT:    brev8 a0, a0
218; RV64ZBKB-NEXT:    ret
219    %1 = call i64 @llvm.bitreverse.i64(i64 %a)
220    %2 = shl i64 %1, 33
221    %3 = call i64 @llvm.bitreverse.i64(i64 %2)
222    ret i64 %3
223}
224