1ada54585SCraig Topper; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
23cf15af2SeopXD; RUN: llc -mtriple=riscv32 -mattr=+v -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,RV32
33cf15af2SeopXD; RUN: llc -mtriple=riscv64 -mattr=+v -verify-machineinstrs < %s | FileCheck %s --check-prefixes=CHECK,RV64
4ada54585SCraig Topper
5ada54585SCraig Topperdefine <vscale x 1 x i8> @bitreverse_nxv1i8(<vscale x 1 x i8> %va) {
6ada54585SCraig Topper; CHECK-LABEL: bitreverse_nxv1i8:
7ada54585SCraig Topper; CHECK:       # %bb.0:
8ada54585SCraig Topper; CHECK-NEXT:    vsetvli a0, zero, e8, mf8, ta, mu
9ada54585SCraig Topper; CHECK-NEXT:    vand.vi v9, v8, 15
10ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v9, v9, 4
11ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v8, v8, 4
12ada54585SCraig Topper; CHECK-NEXT:    vand.vi v8, v8, 15
13ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v8, v9
14ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v9, v8, 2
15af0ecfccSwangpc; CHECK-NEXT:    li a0, 51
16ada54585SCraig Topper; CHECK-NEXT:    vand.vx v9, v9, a0
17ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
18ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v8, v8, 2
19ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v9, v8
20ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v9, v8, 1
21af0ecfccSwangpc; CHECK-NEXT:    li a0, 85
22ada54585SCraig Topper; CHECK-NEXT:    vand.vx v9, v9, a0
23ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
24ada54585SCraig Topper; CHECK-NEXT:    vadd.vv v8, v8, v8
25ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v9, v8
26ada54585SCraig Topper; CHECK-NEXT:    ret
27ada54585SCraig Topper  %a = call <vscale x 1 x i8> @llvm.bitreverse.nxv1i8(<vscale x 1 x i8> %va)
28ada54585SCraig Topper  ret <vscale x 1 x i8> %a
29ada54585SCraig Topper}
30ada54585SCraig Topperdeclare <vscale x 1 x i8> @llvm.bitreverse.nxv1i8(<vscale x 1 x i8>)
31ada54585SCraig Topper
32ada54585SCraig Topperdefine <vscale x 2 x i8> @bitreverse_nxv2i8(<vscale x 2 x i8> %va) {
33ada54585SCraig Topper; CHECK-LABEL: bitreverse_nxv2i8:
34ada54585SCraig Topper; CHECK:       # %bb.0:
35ada54585SCraig Topper; CHECK-NEXT:    vsetvli a0, zero, e8, mf4, ta, mu
36ada54585SCraig Topper; CHECK-NEXT:    vand.vi v9, v8, 15
37ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v9, v9, 4
38ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v8, v8, 4
39ada54585SCraig Topper; CHECK-NEXT:    vand.vi v8, v8, 15
40ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v8, v9
41ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v9, v8, 2
42af0ecfccSwangpc; CHECK-NEXT:    li a0, 51
43ada54585SCraig Topper; CHECK-NEXT:    vand.vx v9, v9, a0
44ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
45ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v8, v8, 2
46ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v9, v8
47ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v9, v8, 1
48af0ecfccSwangpc; CHECK-NEXT:    li a0, 85
49ada54585SCraig Topper; CHECK-NEXT:    vand.vx v9, v9, a0
50ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
51ada54585SCraig Topper; CHECK-NEXT:    vadd.vv v8, v8, v8
52ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v9, v8
53ada54585SCraig Topper; CHECK-NEXT:    ret
54ada54585SCraig Topper  %a = call <vscale x 2 x i8> @llvm.bitreverse.nxv2i8(<vscale x 2 x i8> %va)
55ada54585SCraig Topper  ret <vscale x 2 x i8> %a
56ada54585SCraig Topper}
57ada54585SCraig Topperdeclare <vscale x 2 x i8> @llvm.bitreverse.nxv2i8(<vscale x 2 x i8>)
58ada54585SCraig Topper
59ada54585SCraig Topperdefine <vscale x 4 x i8> @bitreverse_nxv4i8(<vscale x 4 x i8> %va) {
60ada54585SCraig Topper; CHECK-LABEL: bitreverse_nxv4i8:
61ada54585SCraig Topper; CHECK:       # %bb.0:
62ada54585SCraig Topper; CHECK-NEXT:    vsetvli a0, zero, e8, mf2, ta, mu
63ada54585SCraig Topper; CHECK-NEXT:    vand.vi v9, v8, 15
64ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v9, v9, 4
65ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v8, v8, 4
66ada54585SCraig Topper; CHECK-NEXT:    vand.vi v8, v8, 15
67ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v8, v9
68ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v9, v8, 2
69af0ecfccSwangpc; CHECK-NEXT:    li a0, 51
70ada54585SCraig Topper; CHECK-NEXT:    vand.vx v9, v9, a0
71ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
72ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v8, v8, 2
73ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v9, v8
74ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v9, v8, 1
75af0ecfccSwangpc; CHECK-NEXT:    li a0, 85
76ada54585SCraig Topper; CHECK-NEXT:    vand.vx v9, v9, a0
77ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
78ada54585SCraig Topper; CHECK-NEXT:    vadd.vv v8, v8, v8
79ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v9, v8
80ada54585SCraig Topper; CHECK-NEXT:    ret
81ada54585SCraig Topper  %a = call <vscale x 4 x i8> @llvm.bitreverse.nxv4i8(<vscale x 4 x i8> %va)
82ada54585SCraig Topper  ret <vscale x 4 x i8> %a
83ada54585SCraig Topper}
84ada54585SCraig Topperdeclare <vscale x 4 x i8> @llvm.bitreverse.nxv4i8(<vscale x 4 x i8>)
85ada54585SCraig Topper
86ada54585SCraig Topperdefine <vscale x 8 x i8> @bitreverse_nxv8i8(<vscale x 8 x i8> %va) {
87ada54585SCraig Topper; CHECK-LABEL: bitreverse_nxv8i8:
88ada54585SCraig Topper; CHECK:       # %bb.0:
89ada54585SCraig Topper; CHECK-NEXT:    vsetvli a0, zero, e8, m1, ta, mu
90ada54585SCraig Topper; CHECK-NEXT:    vand.vi v9, v8, 15
91ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v9, v9, 4
92ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v8, v8, 4
93ada54585SCraig Topper; CHECK-NEXT:    vand.vi v8, v8, 15
94ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v8, v9
95ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v9, v8, 2
96af0ecfccSwangpc; CHECK-NEXT:    li a0, 51
97ada54585SCraig Topper; CHECK-NEXT:    vand.vx v9, v9, a0
98ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
99ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v8, v8, 2
100ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v9, v8
101ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v9, v8, 1
102af0ecfccSwangpc; CHECK-NEXT:    li a0, 85
103ada54585SCraig Topper; CHECK-NEXT:    vand.vx v9, v9, a0
104ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
105ada54585SCraig Topper; CHECK-NEXT:    vadd.vv v8, v8, v8
106ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v9, v8
107ada54585SCraig Topper; CHECK-NEXT:    ret
108ada54585SCraig Topper  %a = call <vscale x 8 x i8> @llvm.bitreverse.nxv8i8(<vscale x 8 x i8> %va)
109ada54585SCraig Topper  ret <vscale x 8 x i8> %a
110ada54585SCraig Topper}
111ada54585SCraig Topperdeclare <vscale x 8 x i8> @llvm.bitreverse.nxv8i8(<vscale x 8 x i8>)
112ada54585SCraig Topper
113ada54585SCraig Topperdefine <vscale x 16 x i8> @bitreverse_nxv16i8(<vscale x 16 x i8> %va) {
114ada54585SCraig Topper; CHECK-LABEL: bitreverse_nxv16i8:
115ada54585SCraig Topper; CHECK:       # %bb.0:
116ada54585SCraig Topper; CHECK-NEXT:    vsetvli a0, zero, e8, m2, ta, mu
117ada54585SCraig Topper; CHECK-NEXT:    vand.vi v10, v8, 15
118ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v10, v10, 4
119ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v8, v8, 4
120ada54585SCraig Topper; CHECK-NEXT:    vand.vi v8, v8, 15
121ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v8, v10
122ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v10, v8, 2
123af0ecfccSwangpc; CHECK-NEXT:    li a0, 51
124ada54585SCraig Topper; CHECK-NEXT:    vand.vx v10, v10, a0
125ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
126ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v8, v8, 2
127ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v10, v8
128ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v10, v8, 1
129af0ecfccSwangpc; CHECK-NEXT:    li a0, 85
130ada54585SCraig Topper; CHECK-NEXT:    vand.vx v10, v10, a0
131ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
132ada54585SCraig Topper; CHECK-NEXT:    vadd.vv v8, v8, v8
133ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v10, v8
134ada54585SCraig Topper; CHECK-NEXT:    ret
135ada54585SCraig Topper  %a = call <vscale x 16 x i8> @llvm.bitreverse.nxv16i8(<vscale x 16 x i8> %va)
136ada54585SCraig Topper  ret <vscale x 16 x i8> %a
137ada54585SCraig Topper}
138ada54585SCraig Topperdeclare <vscale x 16 x i8> @llvm.bitreverse.nxv16i8(<vscale x 16 x i8>)
139ada54585SCraig Topper
140ada54585SCraig Topperdefine <vscale x 32 x i8> @bitreverse_nxv32i8(<vscale x 32 x i8> %va) {
141ada54585SCraig Topper; CHECK-LABEL: bitreverse_nxv32i8:
142ada54585SCraig Topper; CHECK:       # %bb.0:
143ada54585SCraig Topper; CHECK-NEXT:    vsetvli a0, zero, e8, m4, ta, mu
144ada54585SCraig Topper; CHECK-NEXT:    vand.vi v12, v8, 15
145ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v12, v12, 4
146ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v8, v8, 4
147ada54585SCraig Topper; CHECK-NEXT:    vand.vi v8, v8, 15
148ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v8, v12
149ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v12, v8, 2
150af0ecfccSwangpc; CHECK-NEXT:    li a0, 51
151ada54585SCraig Topper; CHECK-NEXT:    vand.vx v12, v12, a0
152ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
153ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v8, v8, 2
154ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v12, v8
155ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v12, v8, 1
156af0ecfccSwangpc; CHECK-NEXT:    li a0, 85
157ada54585SCraig Topper; CHECK-NEXT:    vand.vx v12, v12, a0
158ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
159ada54585SCraig Topper; CHECK-NEXT:    vadd.vv v8, v8, v8
160ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v12, v8
161ada54585SCraig Topper; CHECK-NEXT:    ret
162ada54585SCraig Topper  %a = call <vscale x 32 x i8> @llvm.bitreverse.nxv32i8(<vscale x 32 x i8> %va)
163ada54585SCraig Topper  ret <vscale x 32 x i8> %a
164ada54585SCraig Topper}
165ada54585SCraig Topperdeclare <vscale x 32 x i8> @llvm.bitreverse.nxv32i8(<vscale x 32 x i8>)
166ada54585SCraig Topper
167ada54585SCraig Topperdefine <vscale x 64 x i8> @bitreverse_nxv64i8(<vscale x 64 x i8> %va) {
168ada54585SCraig Topper; CHECK-LABEL: bitreverse_nxv64i8:
169ada54585SCraig Topper; CHECK:       # %bb.0:
170ada54585SCraig Topper; CHECK-NEXT:    vsetvli a0, zero, e8, m8, ta, mu
171ada54585SCraig Topper; CHECK-NEXT:    vand.vi v16, v8, 15
172ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v16, v16, 4
173ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v8, v8, 4
174ada54585SCraig Topper; CHECK-NEXT:    vand.vi v8, v8, 15
175ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v8, v16
176ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v16, v8, 2
177af0ecfccSwangpc; CHECK-NEXT:    li a0, 51
178ada54585SCraig Topper; CHECK-NEXT:    vand.vx v16, v16, a0
179ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
180ada54585SCraig Topper; CHECK-NEXT:    vsll.vi v8, v8, 2
181ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v16, v8
182ada54585SCraig Topper; CHECK-NEXT:    vsrl.vi v16, v8, 1
183af0ecfccSwangpc; CHECK-NEXT:    li a0, 85
184ada54585SCraig Topper; CHECK-NEXT:    vand.vx v16, v16, a0
185ada54585SCraig Topper; CHECK-NEXT:    vand.vx v8, v8, a0
186ada54585SCraig Topper; CHECK-NEXT:    vadd.vv v8, v8, v8
187ada54585SCraig Topper; CHECK-NEXT:    vor.vv v8, v16, v8
188ada54585SCraig Topper; CHECK-NEXT:    ret
189ada54585SCraig Topper  %a = call <vscale x 64 x i8> @llvm.bitreverse.nxv64i8(<vscale x 64 x i8> %va)
190ada54585SCraig Topper  ret <vscale x 64 x i8> %a
191ada54585SCraig Topper}
192ada54585SCraig Topperdeclare <vscale x 64 x i8> @llvm.bitreverse.nxv64i8(<vscale x 64 x i8>)
193ada54585SCraig Topper
194ada54585SCraig Topperdefine <vscale x 1 x i16> @bitreverse_nxv1i16(<vscale x 1 x i16> %va) {
195ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv1i16:
196ada54585SCraig Topper; RV32:       # %bb.0:
197ada54585SCraig Topper; RV32-NEXT:    vsetvli a0, zero, e16, mf4, ta, mu
198ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 8
199ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 8
200ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v9
201ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 4
202ada54585SCraig Topper; RV32-NEXT:    lui a0, 1
203ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -241
204ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
205ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
206ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
207ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
208ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 2
209ada54585SCraig Topper; RV32-NEXT:    lui a0, 3
210ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 819
211ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
212ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
213ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
214ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
215ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 1
216ada54585SCraig Topper; RV32-NEXT:    lui a0, 5
217ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 1365
218ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
219ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
220ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
221ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
222ada54585SCraig Topper; RV32-NEXT:    ret
223ada54585SCraig Topper;
224ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv1i16:
225ada54585SCraig Topper; RV64:       # %bb.0:
226ada54585SCraig Topper; RV64-NEXT:    vsetvli a0, zero, e16, mf4, ta, mu
227ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 8
228ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 8
229ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v9
230ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 4
231ada54585SCraig Topper; RV64-NEXT:    lui a0, 1
232ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -241
233ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
234ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
235ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
236ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
237ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 2
238ada54585SCraig Topper; RV64-NEXT:    lui a0, 3
239ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 819
240ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
241ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
242ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
243ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
244ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 1
245ada54585SCraig Topper; RV64-NEXT:    lui a0, 5
246ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 1365
247ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
248ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
249ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
250ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
251ada54585SCraig Topper; RV64-NEXT:    ret
252ada54585SCraig Topper  %a = call <vscale x 1 x i16> @llvm.bitreverse.nxv1i16(<vscale x 1 x i16> %va)
253ada54585SCraig Topper  ret <vscale x 1 x i16> %a
254ada54585SCraig Topper}
255ada54585SCraig Topperdeclare <vscale x 1 x i16> @llvm.bitreverse.nxv1i16(<vscale x 1 x i16>)
256ada54585SCraig Topper
257ada54585SCraig Topperdefine <vscale x 2 x i16> @bitreverse_nxv2i16(<vscale x 2 x i16> %va) {
258ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv2i16:
259ada54585SCraig Topper; RV32:       # %bb.0:
260ada54585SCraig Topper; RV32-NEXT:    vsetvli a0, zero, e16, mf2, ta, mu
261ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 8
262ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 8
263ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v9
264ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 4
265ada54585SCraig Topper; RV32-NEXT:    lui a0, 1
266ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -241
267ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
268ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
269ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
270ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
271ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 2
272ada54585SCraig Topper; RV32-NEXT:    lui a0, 3
273ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 819
274ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
275ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
276ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
277ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
278ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 1
279ada54585SCraig Topper; RV32-NEXT:    lui a0, 5
280ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 1365
281ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
282ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
283ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
284ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
285ada54585SCraig Topper; RV32-NEXT:    ret
286ada54585SCraig Topper;
287ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv2i16:
288ada54585SCraig Topper; RV64:       # %bb.0:
289ada54585SCraig Topper; RV64-NEXT:    vsetvli a0, zero, e16, mf2, ta, mu
290ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 8
291ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 8
292ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v9
293ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 4
294ada54585SCraig Topper; RV64-NEXT:    lui a0, 1
295ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -241
296ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
297ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
298ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
299ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
300ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 2
301ada54585SCraig Topper; RV64-NEXT:    lui a0, 3
302ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 819
303ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
304ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
305ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
306ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
307ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 1
308ada54585SCraig Topper; RV64-NEXT:    lui a0, 5
309ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 1365
310ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
311ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
312ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
313ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
314ada54585SCraig Topper; RV64-NEXT:    ret
315ada54585SCraig Topper  %a = call <vscale x 2 x i16> @llvm.bitreverse.nxv2i16(<vscale x 2 x i16> %va)
316ada54585SCraig Topper  ret <vscale x 2 x i16> %a
317ada54585SCraig Topper}
318ada54585SCraig Topperdeclare <vscale x 2 x i16> @llvm.bitreverse.nxv2i16(<vscale x 2 x i16>)
319ada54585SCraig Topper
320ada54585SCraig Topperdefine <vscale x 4 x i16> @bitreverse_nxv4i16(<vscale x 4 x i16> %va) {
321ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv4i16:
322ada54585SCraig Topper; RV32:       # %bb.0:
323ada54585SCraig Topper; RV32-NEXT:    vsetvli a0, zero, e16, m1, ta, mu
324ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 8
325ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 8
326ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v9
327ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 4
328ada54585SCraig Topper; RV32-NEXT:    lui a0, 1
329ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -241
330ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
331ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
332ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
333ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
334ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 2
335ada54585SCraig Topper; RV32-NEXT:    lui a0, 3
336ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 819
337ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
338ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
339ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
340ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
341ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 1
342ada54585SCraig Topper; RV32-NEXT:    lui a0, 5
343ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 1365
344ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
345ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
346ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
347ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
348ada54585SCraig Topper; RV32-NEXT:    ret
349ada54585SCraig Topper;
350ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv4i16:
351ada54585SCraig Topper; RV64:       # %bb.0:
352ada54585SCraig Topper; RV64-NEXT:    vsetvli a0, zero, e16, m1, ta, mu
353ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 8
354ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 8
355ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v9
356ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 4
357ada54585SCraig Topper; RV64-NEXT:    lui a0, 1
358ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -241
359ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
360ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
361ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
362ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
363ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 2
364ada54585SCraig Topper; RV64-NEXT:    lui a0, 3
365ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 819
366ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
367ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
368ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
369ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
370ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 1
371ada54585SCraig Topper; RV64-NEXT:    lui a0, 5
372ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 1365
373ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
374ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
375ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
376ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
377ada54585SCraig Topper; RV64-NEXT:    ret
378ada54585SCraig Topper  %a = call <vscale x 4 x i16> @llvm.bitreverse.nxv4i16(<vscale x 4 x i16> %va)
379ada54585SCraig Topper  ret <vscale x 4 x i16> %a
380ada54585SCraig Topper}
381ada54585SCraig Topperdeclare <vscale x 4 x i16> @llvm.bitreverse.nxv4i16(<vscale x 4 x i16>)
382ada54585SCraig Topper
383ada54585SCraig Topperdefine <vscale x 8 x i16> @bitreverse_nxv8i16(<vscale x 8 x i16> %va) {
384ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv8i16:
385ada54585SCraig Topper; RV32:       # %bb.0:
386ada54585SCraig Topper; RV32-NEXT:    vsetvli a0, zero, e16, m2, ta, mu
387ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 8
388ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 8
389ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v10
390ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 4
391ada54585SCraig Topper; RV32-NEXT:    lui a0, 1
392ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -241
393ada54585SCraig Topper; RV32-NEXT:    vand.vx v10, v10, a0
394ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
395ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
396ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v10, v8
397ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 2
398ada54585SCraig Topper; RV32-NEXT:    lui a0, 3
399ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 819
400ada54585SCraig Topper; RV32-NEXT:    vand.vx v10, v10, a0
401ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
402ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
403ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v10, v8
404ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 1
405ada54585SCraig Topper; RV32-NEXT:    lui a0, 5
406ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 1365
407ada54585SCraig Topper; RV32-NEXT:    vand.vx v10, v10, a0
408ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
409ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
410ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v10, v8
411ada54585SCraig Topper; RV32-NEXT:    ret
412ada54585SCraig Topper;
413ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv8i16:
414ada54585SCraig Topper; RV64:       # %bb.0:
415ada54585SCraig Topper; RV64-NEXT:    vsetvli a0, zero, e16, m2, ta, mu
416ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 8
417ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 8
418ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v10
419ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 4
420ada54585SCraig Topper; RV64-NEXT:    lui a0, 1
421ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -241
422ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
423ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
424ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
425ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v10, v8
426ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 2
427ada54585SCraig Topper; RV64-NEXT:    lui a0, 3
428ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 819
429ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
430ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
431ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
432ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v10, v8
433ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 1
434ada54585SCraig Topper; RV64-NEXT:    lui a0, 5
435ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 1365
436ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
437ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
438ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
439ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v10, v8
440ada54585SCraig Topper; RV64-NEXT:    ret
441ada54585SCraig Topper  %a = call <vscale x 8 x i16> @llvm.bitreverse.nxv8i16(<vscale x 8 x i16> %va)
442ada54585SCraig Topper  ret <vscale x 8 x i16> %a
443ada54585SCraig Topper}
444ada54585SCraig Topperdeclare <vscale x 8 x i16> @llvm.bitreverse.nxv8i16(<vscale x 8 x i16>)
445ada54585SCraig Topper
446ada54585SCraig Topperdefine <vscale x 16 x i16> @bitreverse_nxv16i16(<vscale x 16 x i16> %va) {
447ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv16i16:
448ada54585SCraig Topper; RV32:       # %bb.0:
449ada54585SCraig Topper; RV32-NEXT:    vsetvli a0, zero, e16, m4, ta, mu
450ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 8
451ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 8
452ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v12
453ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 4
454ada54585SCraig Topper; RV32-NEXT:    lui a0, 1
455ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -241
456ada54585SCraig Topper; RV32-NEXT:    vand.vx v12, v12, a0
457ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
458ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
459ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v12, v8
460ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 2
461ada54585SCraig Topper; RV32-NEXT:    lui a0, 3
462ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 819
463ada54585SCraig Topper; RV32-NEXT:    vand.vx v12, v12, a0
464ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
465ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
466ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v12, v8
467ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 1
468ada54585SCraig Topper; RV32-NEXT:    lui a0, 5
469ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 1365
470ada54585SCraig Topper; RV32-NEXT:    vand.vx v12, v12, a0
471ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
472ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
473ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v12, v8
474ada54585SCraig Topper; RV32-NEXT:    ret
475ada54585SCraig Topper;
476ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv16i16:
477ada54585SCraig Topper; RV64:       # %bb.0:
478ada54585SCraig Topper; RV64-NEXT:    vsetvli a0, zero, e16, m4, ta, mu
479ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 8
480ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 8
481ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v12
482ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 4
483ada54585SCraig Topper; RV64-NEXT:    lui a0, 1
484ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -241
485ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a0
486ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
487ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
488ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v12, v8
489ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 2
490ada54585SCraig Topper; RV64-NEXT:    lui a0, 3
491ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 819
492ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a0
493ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
494ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
495ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v12, v8
496ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 1
497ada54585SCraig Topper; RV64-NEXT:    lui a0, 5
498ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 1365
499ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a0
500ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
501ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
502ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v12, v8
503ada54585SCraig Topper; RV64-NEXT:    ret
504ada54585SCraig Topper  %a = call <vscale x 16 x i16> @llvm.bitreverse.nxv16i16(<vscale x 16 x i16> %va)
505ada54585SCraig Topper  ret <vscale x 16 x i16> %a
506ada54585SCraig Topper}
507ada54585SCraig Topperdeclare <vscale x 16 x i16> @llvm.bitreverse.nxv16i16(<vscale x 16 x i16>)
508ada54585SCraig Topper
509ada54585SCraig Topperdefine <vscale x 32 x i16> @bitreverse_nxv32i16(<vscale x 32 x i16> %va) {
510ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv32i16:
511ada54585SCraig Topper; RV32:       # %bb.0:
512ada54585SCraig Topper; RV32-NEXT:    vsetvli a0, zero, e16, m8, ta, mu
513ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v16, v8, 8
514ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 8
515ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v16
516ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v16, v8, 4
517ada54585SCraig Topper; RV32-NEXT:    lui a0, 1
518ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -241
519ada54585SCraig Topper; RV32-NEXT:    vand.vx v16, v16, a0
520ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
521ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
522ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v16, v8
523ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v16, v8, 2
524ada54585SCraig Topper; RV32-NEXT:    lui a0, 3
525ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 819
526ada54585SCraig Topper; RV32-NEXT:    vand.vx v16, v16, a0
527ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
528ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
529ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v16, v8
530ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v16, v8, 1
531ada54585SCraig Topper; RV32-NEXT:    lui a0, 5
532ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 1365
533ada54585SCraig Topper; RV32-NEXT:    vand.vx v16, v16, a0
534ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
535ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
536ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v16, v8
537ada54585SCraig Topper; RV32-NEXT:    ret
538ada54585SCraig Topper;
539ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv32i16:
540ada54585SCraig Topper; RV64:       # %bb.0:
541ada54585SCraig Topper; RV64-NEXT:    vsetvli a0, zero, e16, m8, ta, mu
542ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 8
543ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 8
544ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v16
545ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 4
546ada54585SCraig Topper; RV64-NEXT:    lui a0, 1
547ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -241
548ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a0
549ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
550ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
551ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v16, v8
552ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 2
553ada54585SCraig Topper; RV64-NEXT:    lui a0, 3
554ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 819
555ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a0
556ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
557ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
558ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v16, v8
559ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 1
560ada54585SCraig Topper; RV64-NEXT:    lui a0, 5
561ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 1365
562ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a0
563ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
564ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
565ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v16, v8
566ada54585SCraig Topper; RV64-NEXT:    ret
567ada54585SCraig Topper  %a = call <vscale x 32 x i16> @llvm.bitreverse.nxv32i16(<vscale x 32 x i16> %va)
568ada54585SCraig Topper  ret <vscale x 32 x i16> %a
569ada54585SCraig Topper}
570ada54585SCraig Topperdeclare <vscale x 32 x i16> @llvm.bitreverse.nxv32i16(<vscale x 32 x i16>)
571ada54585SCraig Topper
572ada54585SCraig Topperdefine <vscale x 1 x i32> @bitreverse_nxv1i32(<vscale x 1 x i32> %va) {
573ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv1i32:
574ada54585SCraig Topper; RV32:       # %bb.0:
575ada54585SCraig Topper; RV32-NEXT:    vsetvli a0, zero, e32, mf2, ta, mu
576ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 8
577ada54585SCraig Topper; RV32-NEXT:    lui a0, 16
578ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -256
579ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
580ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 24
581ada54585SCraig Topper; RV32-NEXT:    vor.vv v9, v9, v10
582ada54585SCraig Topper; RV32-NEXT:    vsll.vi v10, v8, 8
583ada54585SCraig Topper; RV32-NEXT:    lui a0, 4080
584ada54585SCraig Topper; RV32-NEXT:    vand.vx v10, v10, a0
585ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 24
586ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v10
587ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v9
588ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 4
589ada54585SCraig Topper; RV32-NEXT:    lui a0, 61681
590ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -241
591ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
592ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
593ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
594ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
595ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 2
596ada54585SCraig Topper; RV32-NEXT:    lui a0, 209715
597ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 819
598ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
599ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
600ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
601ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
602ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 1
603ada54585SCraig Topper; RV32-NEXT:    lui a0, 349525
604ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 1365
605ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
606ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
607ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
608ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
609ada54585SCraig Topper; RV32-NEXT:    ret
610ada54585SCraig Topper;
611ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv1i32:
612ada54585SCraig Topper; RV64:       # %bb.0:
613ada54585SCraig Topper; RV64-NEXT:    vsetvli a0, zero, e32, mf2, ta, mu
614ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 8
615ada54585SCraig Topper; RV64-NEXT:    lui a0, 16
616ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -256
617ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
618ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 24
619ada54585SCraig Topper; RV64-NEXT:    vor.vv v9, v9, v10
620ada54585SCraig Topper; RV64-NEXT:    vsll.vi v10, v8, 8
621ada54585SCraig Topper; RV64-NEXT:    lui a0, 4080
622ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
623ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 24
624ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v10
625ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v9
626ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 4
627ada54585SCraig Topper; RV64-NEXT:    lui a0, 61681
628ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -241
629ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
630ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
631ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
632ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
633ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 2
634ada54585SCraig Topper; RV64-NEXT:    lui a0, 209715
635ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 819
636ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
637ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
638ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
639ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
640ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 1
641ada54585SCraig Topper; RV64-NEXT:    lui a0, 349525
642ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 1365
643ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
644ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
645ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
646ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
647ada54585SCraig Topper; RV64-NEXT:    ret
648ada54585SCraig Topper  %a = call <vscale x 1 x i32> @llvm.bitreverse.nxv1i32(<vscale x 1 x i32> %va)
649ada54585SCraig Topper  ret <vscale x 1 x i32> %a
650ada54585SCraig Topper}
651ada54585SCraig Topperdeclare <vscale x 1 x i32> @llvm.bitreverse.nxv1i32(<vscale x 1 x i32>)
652ada54585SCraig Topper
653ada54585SCraig Topperdefine <vscale x 2 x i32> @bitreverse_nxv2i32(<vscale x 2 x i32> %va) {
654ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv2i32:
655ada54585SCraig Topper; RV32:       # %bb.0:
656ada54585SCraig Topper; RV32-NEXT:    vsetvli a0, zero, e32, m1, ta, mu
657ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 8
658ada54585SCraig Topper; RV32-NEXT:    lui a0, 16
659ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -256
660ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
661ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 24
662ada54585SCraig Topper; RV32-NEXT:    vor.vv v9, v9, v10
663ada54585SCraig Topper; RV32-NEXT:    vsll.vi v10, v8, 8
664ada54585SCraig Topper; RV32-NEXT:    lui a0, 4080
665ada54585SCraig Topper; RV32-NEXT:    vand.vx v10, v10, a0
666ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 24
667ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v10
668ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v9
669ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 4
670ada54585SCraig Topper; RV32-NEXT:    lui a0, 61681
671ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -241
672ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
673ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
674ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
675ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
676ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 2
677ada54585SCraig Topper; RV32-NEXT:    lui a0, 209715
678ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 819
679ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
680ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
681ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
682ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
683ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 1
684ada54585SCraig Topper; RV32-NEXT:    lui a0, 349525
685ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 1365
686ada54585SCraig Topper; RV32-NEXT:    vand.vx v9, v9, a0
687ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
688ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
689ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
690ada54585SCraig Topper; RV32-NEXT:    ret
691ada54585SCraig Topper;
692ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv2i32:
693ada54585SCraig Topper; RV64:       # %bb.0:
694ada54585SCraig Topper; RV64-NEXT:    vsetvli a0, zero, e32, m1, ta, mu
695ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 8
696ada54585SCraig Topper; RV64-NEXT:    lui a0, 16
697ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -256
698ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
699ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 24
700ada54585SCraig Topper; RV64-NEXT:    vor.vv v9, v9, v10
701ada54585SCraig Topper; RV64-NEXT:    vsll.vi v10, v8, 8
702ada54585SCraig Topper; RV64-NEXT:    lui a0, 4080
703ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
704ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 24
705ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v10
706ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v9
707ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 4
708ada54585SCraig Topper; RV64-NEXT:    lui a0, 61681
709ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -241
710ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
711ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
712ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
713ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
714ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 2
715ada54585SCraig Topper; RV64-NEXT:    lui a0, 209715
716ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 819
717ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
718ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
719ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
720ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
721ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 1
722ada54585SCraig Topper; RV64-NEXT:    lui a0, 349525
723ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 1365
724ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
725ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
726ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
727ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
728ada54585SCraig Topper; RV64-NEXT:    ret
729ada54585SCraig Topper  %a = call <vscale x 2 x i32> @llvm.bitreverse.nxv2i32(<vscale x 2 x i32> %va)
730ada54585SCraig Topper  ret <vscale x 2 x i32> %a
731ada54585SCraig Topper}
732ada54585SCraig Topperdeclare <vscale x 2 x i32> @llvm.bitreverse.nxv2i32(<vscale x 2 x i32>)
733ada54585SCraig Topper
734ada54585SCraig Topperdefine <vscale x 4 x i32> @bitreverse_nxv4i32(<vscale x 4 x i32> %va) {
735ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv4i32:
736ada54585SCraig Topper; RV32:       # %bb.0:
737ada54585SCraig Topper; RV32-NEXT:    vsetvli a0, zero, e32, m2, ta, mu
738ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 8
739ada54585SCraig Topper; RV32-NEXT:    lui a0, 16
740ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -256
741ada54585SCraig Topper; RV32-NEXT:    vand.vx v10, v10, a0
742ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 24
743ada54585SCraig Topper; RV32-NEXT:    vor.vv v10, v10, v12
744ada54585SCraig Topper; RV32-NEXT:    vsll.vi v12, v8, 8
745ada54585SCraig Topper; RV32-NEXT:    lui a0, 4080
746ada54585SCraig Topper; RV32-NEXT:    vand.vx v12, v12, a0
747ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 24
748ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v12
749ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v10
750ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 4
751ada54585SCraig Topper; RV32-NEXT:    lui a0, 61681
752ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -241
753ada54585SCraig Topper; RV32-NEXT:    vand.vx v10, v10, a0
754ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
755ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
756ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v10, v8
757ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 2
758ada54585SCraig Topper; RV32-NEXT:    lui a0, 209715
759ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 819
760ada54585SCraig Topper; RV32-NEXT:    vand.vx v10, v10, a0
761ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
762ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
763ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v10, v8
764ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 1
765ada54585SCraig Topper; RV32-NEXT:    lui a0, 349525
766ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 1365
767ada54585SCraig Topper; RV32-NEXT:    vand.vx v10, v10, a0
768ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
769ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
770ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v10, v8
771ada54585SCraig Topper; RV32-NEXT:    ret
772ada54585SCraig Topper;
773ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv4i32:
774ada54585SCraig Topper; RV64:       # %bb.0:
775ada54585SCraig Topper; RV64-NEXT:    vsetvli a0, zero, e32, m2, ta, mu
776ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 8
777ada54585SCraig Topper; RV64-NEXT:    lui a0, 16
778ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -256
779ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
780ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 24
781ada54585SCraig Topper; RV64-NEXT:    vor.vv v10, v10, v12
782ada54585SCraig Topper; RV64-NEXT:    vsll.vi v12, v8, 8
783ada54585SCraig Topper; RV64-NEXT:    lui a0, 4080
784ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a0
785ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 24
786ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v12
787ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v10
788ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 4
789ada54585SCraig Topper; RV64-NEXT:    lui a0, 61681
790ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -241
791ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
792ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
793ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
794ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v10, v8
795ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 2
796ada54585SCraig Topper; RV64-NEXT:    lui a0, 209715
797ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 819
798ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
799ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
800ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
801ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v10, v8
802ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 1
803ada54585SCraig Topper; RV64-NEXT:    lui a0, 349525
804ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 1365
805ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
806ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
807ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
808ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v10, v8
809ada54585SCraig Topper; RV64-NEXT:    ret
810ada54585SCraig Topper  %a = call <vscale x 4 x i32> @llvm.bitreverse.nxv4i32(<vscale x 4 x i32> %va)
811ada54585SCraig Topper  ret <vscale x 4 x i32> %a
812ada54585SCraig Topper}
813ada54585SCraig Topperdeclare <vscale x 4 x i32> @llvm.bitreverse.nxv4i32(<vscale x 4 x i32>)
814ada54585SCraig Topper
815ada54585SCraig Topperdefine <vscale x 8 x i32> @bitreverse_nxv8i32(<vscale x 8 x i32> %va) {
816ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv8i32:
817ada54585SCraig Topper; RV32:       # %bb.0:
818ada54585SCraig Topper; RV32-NEXT:    vsetvli a0, zero, e32, m4, ta, mu
819ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 8
820ada54585SCraig Topper; RV32-NEXT:    lui a0, 16
821ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -256
822ada54585SCraig Topper; RV32-NEXT:    vand.vx v12, v12, a0
823ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v16, v8, 24
824ada54585SCraig Topper; RV32-NEXT:    vor.vv v12, v12, v16
825ada54585SCraig Topper; RV32-NEXT:    vsll.vi v16, v8, 8
826ada54585SCraig Topper; RV32-NEXT:    lui a0, 4080
827ada54585SCraig Topper; RV32-NEXT:    vand.vx v16, v16, a0
828ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 24
829ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v16
830ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v12
831ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 4
832ada54585SCraig Topper; RV32-NEXT:    lui a0, 61681
833ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -241
834ada54585SCraig Topper; RV32-NEXT:    vand.vx v12, v12, a0
835ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
836ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
837ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v12, v8
838ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 2
839ada54585SCraig Topper; RV32-NEXT:    lui a0, 209715
840ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 819
841ada54585SCraig Topper; RV32-NEXT:    vand.vx v12, v12, a0
842ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
843ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
844ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v12, v8
845ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 1
846ada54585SCraig Topper; RV32-NEXT:    lui a0, 349525
847ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 1365
848ada54585SCraig Topper; RV32-NEXT:    vand.vx v12, v12, a0
849ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
850ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
851ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v12, v8
852ada54585SCraig Topper; RV32-NEXT:    ret
853ada54585SCraig Topper;
854ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv8i32:
855ada54585SCraig Topper; RV64:       # %bb.0:
856ada54585SCraig Topper; RV64-NEXT:    vsetvli a0, zero, e32, m4, ta, mu
857ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 8
858ada54585SCraig Topper; RV64-NEXT:    lui a0, 16
859ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -256
860ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a0
861ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 24
862ada54585SCraig Topper; RV64-NEXT:    vor.vv v12, v12, v16
863ada54585SCraig Topper; RV64-NEXT:    vsll.vi v16, v8, 8
864ada54585SCraig Topper; RV64-NEXT:    lui a0, 4080
865ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a0
866ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 24
867ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v16
868ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v12
869ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 4
870ada54585SCraig Topper; RV64-NEXT:    lui a0, 61681
871ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -241
872ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a0
873ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
874ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
875ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v12, v8
876ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 2
877ada54585SCraig Topper; RV64-NEXT:    lui a0, 209715
878ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 819
879ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a0
880ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
881ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
882ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v12, v8
883ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 1
884ada54585SCraig Topper; RV64-NEXT:    lui a0, 349525
885ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 1365
886ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a0
887ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
888ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
889ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v12, v8
890ada54585SCraig Topper; RV64-NEXT:    ret
891ada54585SCraig Topper  %a = call <vscale x 8 x i32> @llvm.bitreverse.nxv8i32(<vscale x 8 x i32> %va)
892ada54585SCraig Topper  ret <vscale x 8 x i32> %a
893ada54585SCraig Topper}
894ada54585SCraig Topperdeclare <vscale x 8 x i32> @llvm.bitreverse.nxv8i32(<vscale x 8 x i32>)
895ada54585SCraig Topper
896ada54585SCraig Topperdefine <vscale x 16 x i32> @bitreverse_nxv16i32(<vscale x 16 x i32> %va) {
897ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv16i32:
898ada54585SCraig Topper; RV32:       # %bb.0:
899ada54585SCraig Topper; RV32-NEXT:    vsetvli a0, zero, e32, m8, ta, mu
900ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v16, v8, 8
901ada54585SCraig Topper; RV32-NEXT:    lui a0, 16
902ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -256
903ada54585SCraig Topper; RV32-NEXT:    vand.vx v16, v16, a0
904ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v24, v8, 24
905ada54585SCraig Topper; RV32-NEXT:    vor.vv v16, v16, v24
906ada54585SCraig Topper; RV32-NEXT:    vsll.vi v24, v8, 8
907ada54585SCraig Topper; RV32-NEXT:    lui a0, 4080
908ada54585SCraig Topper; RV32-NEXT:    vand.vx v24, v24, a0
909ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 24
910ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v24
911ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v16
912ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v16, v8, 4
913ada54585SCraig Topper; RV32-NEXT:    lui a0, 61681
914ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, -241
915ada54585SCraig Topper; RV32-NEXT:    vand.vx v16, v16, a0
916ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
917ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
918ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v16, v8
919ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v16, v8, 2
920ada54585SCraig Topper; RV32-NEXT:    lui a0, 209715
921ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 819
922ada54585SCraig Topper; RV32-NEXT:    vand.vx v16, v16, a0
923ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
924ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
925ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v16, v8
926ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v16, v8, 1
927ada54585SCraig Topper; RV32-NEXT:    lui a0, 349525
928ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 1365
929ada54585SCraig Topper; RV32-NEXT:    vand.vx v16, v16, a0
930ada54585SCraig Topper; RV32-NEXT:    vand.vx v8, v8, a0
931ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
932ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v16, v8
933ada54585SCraig Topper; RV32-NEXT:    ret
934ada54585SCraig Topper;
935ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv16i32:
936ada54585SCraig Topper; RV64:       # %bb.0:
937ada54585SCraig Topper; RV64-NEXT:    vsetvli a0, zero, e32, m8, ta, mu
938ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 8
939ada54585SCraig Topper; RV64-NEXT:    lui a0, 16
940ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -256
941ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a0
942ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v24, v8, 24
943ada54585SCraig Topper; RV64-NEXT:    vor.vv v16, v16, v24
944ada54585SCraig Topper; RV64-NEXT:    vsll.vi v24, v8, 8
945ada54585SCraig Topper; RV64-NEXT:    lui a0, 4080
946ada54585SCraig Topper; RV64-NEXT:    vand.vx v24, v24, a0
947ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 24
948ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v24
949ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v16
950ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 4
951ada54585SCraig Topper; RV64-NEXT:    lui a0, 61681
952ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, -241
953ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a0
954ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
955ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
956ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v16, v8
957ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 2
958ada54585SCraig Topper; RV64-NEXT:    lui a0, 209715
959ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 819
960ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a0
961ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
962ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
963ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v16, v8
964ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 1
965ada54585SCraig Topper; RV64-NEXT:    lui a0, 349525
966ada54585SCraig Topper; RV64-NEXT:    addiw a0, a0, 1365
967ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a0
968ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
969ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
970ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v16, v8
971ada54585SCraig Topper; RV64-NEXT:    ret
972ada54585SCraig Topper  %a = call <vscale x 16 x i32> @llvm.bitreverse.nxv16i32(<vscale x 16 x i32> %va)
973ada54585SCraig Topper  ret <vscale x 16 x i32> %a
974ada54585SCraig Topper}
975ada54585SCraig Topperdeclare <vscale x 16 x i32> @llvm.bitreverse.nxv16i32(<vscale x 16 x i32>)
976ada54585SCraig Topper
977ada54585SCraig Topperdefine <vscale x 1 x i64> @bitreverse_nxv1i64(<vscale x 1 x i64> %va) {
978ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv1i64:
979ada54585SCraig Topper; RV32:       # %bb.0:
980ada54585SCraig Topper; RV32-NEXT:    addi sp, sp, -16
981ada54585SCraig Topper; RV32-NEXT:    .cfi_def_cfa_offset 16
982ada54585SCraig Topper; RV32-NEXT:    sw zero, 12(sp)
983ada54585SCraig Topper; RV32-NEXT:    lui a0, 1044480
984ada54585SCraig Topper; RV32-NEXT:    sw a0, 8(sp)
985ada54585SCraig Topper; RV32-NEXT:    lui a0, 4080
986ada54585SCraig Topper; RV32-NEXT:    sw a0, 12(sp)
987ada54585SCraig Topper; RV32-NEXT:    sw zero, 8(sp)
988af0ecfccSwangpc; RV32-NEXT:    li a1, 255
989ada54585SCraig Topper; RV32-NEXT:    sw a1, 12(sp)
990ada54585SCraig Topper; RV32-NEXT:    lui a1, 16
991ada54585SCraig Topper; RV32-NEXT:    addi a1, a1, -256
992ada54585SCraig Topper; RV32-NEXT:    sw a1, 12(sp)
993ada54585SCraig Topper; RV32-NEXT:    lui a2, 61681
994ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, -241
995ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
996ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
997ada54585SCraig Topper; RV32-NEXT:    lui a2, 209715
998ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, 819
999ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
1000ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
1001ada54585SCraig Topper; RV32-NEXT:    lui a2, 349525
1002ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, 1365
1003ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
1004ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
1005af0ecfccSwangpc; RV32-NEXT:    li a2, 56
1006ada54585SCraig Topper; RV32-NEXT:    vsetvli a3, zero, e64, m1, ta, mu
1007ada54585SCraig Topper; RV32-NEXT:    vsrl.vx v9, v8, a2
1008af0ecfccSwangpc; RV32-NEXT:    li a3, 40
1009ada54585SCraig Topper; RV32-NEXT:    vsrl.vx v10, v8, a3
1010ada54585SCraig Topper; RV32-NEXT:    vand.vx v10, v10, a1
1011ada54585SCraig Topper; RV32-NEXT:    vor.vv v9, v10, v9
1012ada54585SCraig Topper; RV32-NEXT:    addi a1, sp, 8
1013ada54585SCraig Topper; RV32-NEXT:    vlse64.v v10, (a1), zero
1014ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v11, v8, 24
1015ada54585SCraig Topper; RV32-NEXT:    vand.vx v11, v11, a0
1016ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 8
1017ada54585SCraig Topper; RV32-NEXT:    vand.vv v10, v12, v10
1018ada54585SCraig Topper; RV32-NEXT:    vor.vv v10, v10, v11
1019*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v11, (a1), zero
1020ada54585SCraig Topper; RV32-NEXT:    vor.vv v9, v10, v9
1021ada54585SCraig Topper; RV32-NEXT:    vsll.vx v10, v8, a2
1022ada54585SCraig Topper; RV32-NEXT:    vsll.vx v12, v8, a3
1023ada54585SCraig Topper; RV32-NEXT:    vand.vv v11, v12, v11
1024*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v12, (a1), zero
1025ada54585SCraig Topper; RV32-NEXT:    vor.vv v10, v10, v11
1026*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v11, (a1), zero
1027ada54585SCraig Topper; RV32-NEXT:    vsll.vi v13, v8, 8
1028ada54585SCraig Topper; RV32-NEXT:    vand.vv v12, v13, v12
1029ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 24
1030ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v11
1031ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v12
1032*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v11, (a1), zero
1033ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v10, v8
1034ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v9
1035ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 4
1036ada54585SCraig Topper; RV32-NEXT:    vand.vv v9, v9, v11
1037ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v11
1038*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v10, (a1), zero
1039ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
1040ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
1041ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 2
1042ada54585SCraig Topper; RV32-NEXT:    vand.vv v9, v9, v10
1043ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v10
1044*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v10, (a1), zero
1045ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
1046ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
1047ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v9, v8, 1
1048ada54585SCraig Topper; RV32-NEXT:    vand.vv v9, v9, v10
1049ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v10
1050ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
1051ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v9, v8
1052ada54585SCraig Topper; RV32-NEXT:    addi sp, sp, 16
1053ada54585SCraig Topper; RV32-NEXT:    ret
1054ada54585SCraig Topper;
1055ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv1i64:
1056ada54585SCraig Topper; RV64:       # %bb.0:
1057af0ecfccSwangpc; RV64-NEXT:    li a0, 56
1058ada54585SCraig Topper; RV64-NEXT:    vsetvli a1, zero, e64, m1, ta, mu
1059ada54585SCraig Topper; RV64-NEXT:    vsrl.vx v9, v8, a0
1060af0ecfccSwangpc; RV64-NEXT:    li a1, 40
1061ada54585SCraig Topper; RV64-NEXT:    vsrl.vx v10, v8, a1
1062ada54585SCraig Topper; RV64-NEXT:    lui a2, 16
1063ada54585SCraig Topper; RV64-NEXT:    addiw a2, a2, -256
1064ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a2
1065ada54585SCraig Topper; RV64-NEXT:    vor.vv v9, v10, v9
1066ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 24
1067ada54585SCraig Topper; RV64-NEXT:    lui a2, 4080
1068ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a2
1069ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v11, v8, 8
1070af0ecfccSwangpc; RV64-NEXT:    li a2, 255
1071ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 24
1072ada54585SCraig Topper; RV64-NEXT:    vand.vx v11, v11, a3
1073ada54585SCraig Topper; RV64-NEXT:    vor.vv v10, v11, v10
1074ada54585SCraig Topper; RV64-NEXT:    vor.vv v9, v10, v9
1075ada54585SCraig Topper; RV64-NEXT:    vsll.vi v10, v8, 8
1076ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 32
1077ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a3
1078ada54585SCraig Topper; RV64-NEXT:    vsll.vi v11, v8, 24
1079ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 40
1080ada54585SCraig Topper; RV64-NEXT:    vand.vx v11, v11, a3
1081ada54585SCraig Topper; RV64-NEXT:    vor.vv v10, v11, v10
1082ada54585SCraig Topper; RV64-NEXT:    vsll.vx v11, v8, a0
1083ada54585SCraig Topper; RV64-NEXT:    vsll.vx v8, v8, a1
1084ada54585SCraig Topper; RV64-NEXT:    slli a0, a2, 48
1085ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
1086ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v11, v8
108741454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI18_0)
108841454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI18_0)(a0)
1089ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v10
1090ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v9
1091ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 4
1092ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
1093ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
109441454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI18_1)
109541454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI18_1)(a0)
1096ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
1097ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
1098ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 2
1099ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
1100ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
110141454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI18_2)
110241454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI18_2)(a0)
1103ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
1104ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
1105ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v9, v8, 1
1106ada54585SCraig Topper; RV64-NEXT:    vand.vx v9, v9, a0
1107ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
1108ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
1109ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v9, v8
1110ada54585SCraig Topper; RV64-NEXT:    ret
1111ada54585SCraig Topper  %a = call <vscale x 1 x i64> @llvm.bitreverse.nxv1i64(<vscale x 1 x i64> %va)
1112ada54585SCraig Topper  ret <vscale x 1 x i64> %a
1113ada54585SCraig Topper}
1114ada54585SCraig Topperdeclare <vscale x 1 x i64> @llvm.bitreverse.nxv1i64(<vscale x 1 x i64>)
1115ada54585SCraig Topper
1116ada54585SCraig Topperdefine <vscale x 2 x i64> @bitreverse_nxv2i64(<vscale x 2 x i64> %va) {
1117ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv2i64:
1118ada54585SCraig Topper; RV32:       # %bb.0:
1119ada54585SCraig Topper; RV32-NEXT:    addi sp, sp, -16
1120ada54585SCraig Topper; RV32-NEXT:    .cfi_def_cfa_offset 16
1121ada54585SCraig Topper; RV32-NEXT:    sw zero, 12(sp)
1122ada54585SCraig Topper; RV32-NEXT:    lui a0, 1044480
1123ada54585SCraig Topper; RV32-NEXT:    sw a0, 8(sp)
1124ada54585SCraig Topper; RV32-NEXT:    lui a0, 4080
1125ada54585SCraig Topper; RV32-NEXT:    sw a0, 12(sp)
1126ada54585SCraig Topper; RV32-NEXT:    sw zero, 8(sp)
1127af0ecfccSwangpc; RV32-NEXT:    li a1, 255
1128ada54585SCraig Topper; RV32-NEXT:    sw a1, 12(sp)
1129ada54585SCraig Topper; RV32-NEXT:    lui a1, 16
1130ada54585SCraig Topper; RV32-NEXT:    addi a1, a1, -256
1131ada54585SCraig Topper; RV32-NEXT:    sw a1, 12(sp)
1132ada54585SCraig Topper; RV32-NEXT:    lui a2, 61681
1133ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, -241
1134ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
1135ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
1136ada54585SCraig Topper; RV32-NEXT:    lui a2, 209715
1137ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, 819
1138ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
1139ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
1140ada54585SCraig Topper; RV32-NEXT:    lui a2, 349525
1141ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, 1365
1142ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
1143ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
1144af0ecfccSwangpc; RV32-NEXT:    li a2, 56
1145ada54585SCraig Topper; RV32-NEXT:    vsetvli a3, zero, e64, m2, ta, mu
1146ada54585SCraig Topper; RV32-NEXT:    vsrl.vx v10, v8, a2
1147af0ecfccSwangpc; RV32-NEXT:    li a3, 40
1148ada54585SCraig Topper; RV32-NEXT:    vsrl.vx v12, v8, a3
1149ada54585SCraig Topper; RV32-NEXT:    vand.vx v12, v12, a1
1150ada54585SCraig Topper; RV32-NEXT:    vor.vv v10, v12, v10
1151ada54585SCraig Topper; RV32-NEXT:    addi a1, sp, 8
1152ada54585SCraig Topper; RV32-NEXT:    vlse64.v v12, (a1), zero
1153ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v14, v8, 24
1154ada54585SCraig Topper; RV32-NEXT:    vand.vx v14, v14, a0
1155ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v16, v8, 8
1156ada54585SCraig Topper; RV32-NEXT:    vand.vv v12, v16, v12
1157ada54585SCraig Topper; RV32-NEXT:    vor.vv v12, v12, v14
1158*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v14, (a1), zero
1159ada54585SCraig Topper; RV32-NEXT:    vor.vv v10, v12, v10
1160ada54585SCraig Topper; RV32-NEXT:    vsll.vx v12, v8, a2
1161ada54585SCraig Topper; RV32-NEXT:    vsll.vx v16, v8, a3
1162ada54585SCraig Topper; RV32-NEXT:    vand.vv v14, v16, v14
1163*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v16, (a1), zero
1164ada54585SCraig Topper; RV32-NEXT:    vor.vv v12, v12, v14
1165*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v14, (a1), zero
1166ada54585SCraig Topper; RV32-NEXT:    vsll.vi v18, v8, 8
1167ada54585SCraig Topper; RV32-NEXT:    vand.vv v16, v18, v16
1168ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 24
1169ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v14
1170ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v16
1171*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v14, (a1), zero
1172ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v12, v8
1173ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v10
1174ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 4
1175ada54585SCraig Topper; RV32-NEXT:    vand.vv v10, v10, v14
1176ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v14
1177*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v12, (a1), zero
1178ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
1179ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v10, v8
1180ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 2
1181ada54585SCraig Topper; RV32-NEXT:    vand.vv v10, v10, v12
1182ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v12
1183*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v12, (a1), zero
1184ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
1185ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v10, v8
1186ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v10, v8, 1
1187ada54585SCraig Topper; RV32-NEXT:    vand.vv v10, v10, v12
1188ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v12
1189ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
1190ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v10, v8
1191ada54585SCraig Topper; RV32-NEXT:    addi sp, sp, 16
1192ada54585SCraig Topper; RV32-NEXT:    ret
1193ada54585SCraig Topper;
1194ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv2i64:
1195ada54585SCraig Topper; RV64:       # %bb.0:
1196af0ecfccSwangpc; RV64-NEXT:    li a0, 56
1197ada54585SCraig Topper; RV64-NEXT:    vsetvli a1, zero, e64, m2, ta, mu
1198ada54585SCraig Topper; RV64-NEXT:    vsrl.vx v10, v8, a0
1199af0ecfccSwangpc; RV64-NEXT:    li a1, 40
1200ada54585SCraig Topper; RV64-NEXT:    vsrl.vx v12, v8, a1
1201ada54585SCraig Topper; RV64-NEXT:    lui a2, 16
1202ada54585SCraig Topper; RV64-NEXT:    addiw a2, a2, -256
1203ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a2
1204ada54585SCraig Topper; RV64-NEXT:    vor.vv v10, v12, v10
1205ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 24
1206ada54585SCraig Topper; RV64-NEXT:    lui a2, 4080
1207ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a2
1208ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v14, v8, 8
1209af0ecfccSwangpc; RV64-NEXT:    li a2, 255
1210ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 24
1211ada54585SCraig Topper; RV64-NEXT:    vand.vx v14, v14, a3
1212ada54585SCraig Topper; RV64-NEXT:    vor.vv v12, v14, v12
1213ada54585SCraig Topper; RV64-NEXT:    vor.vv v10, v12, v10
1214ada54585SCraig Topper; RV64-NEXT:    vsll.vi v12, v8, 8
1215ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 32
1216ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a3
1217ada54585SCraig Topper; RV64-NEXT:    vsll.vi v14, v8, 24
1218ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 40
1219ada54585SCraig Topper; RV64-NEXT:    vand.vx v14, v14, a3
1220ada54585SCraig Topper; RV64-NEXT:    vor.vv v12, v14, v12
1221ada54585SCraig Topper; RV64-NEXT:    vsll.vx v14, v8, a0
1222ada54585SCraig Topper; RV64-NEXT:    vsll.vx v8, v8, a1
1223ada54585SCraig Topper; RV64-NEXT:    slli a0, a2, 48
1224ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
1225ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v14, v8
122641454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI19_0)
122741454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI19_0)(a0)
1228ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v12
1229ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v10
1230ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 4
1231ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
1232ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
123341454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI19_1)
123441454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI19_1)(a0)
1235ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
1236ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v10, v8
1237ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 2
1238ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
1239ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
124041454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI19_2)
124141454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI19_2)(a0)
1242ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
1243ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v10, v8
1244ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v10, v8, 1
1245ada54585SCraig Topper; RV64-NEXT:    vand.vx v10, v10, a0
1246ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
1247ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
1248ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v10, v8
1249ada54585SCraig Topper; RV64-NEXT:    ret
1250ada54585SCraig Topper  %a = call <vscale x 2 x i64> @llvm.bitreverse.nxv2i64(<vscale x 2 x i64> %va)
1251ada54585SCraig Topper  ret <vscale x 2 x i64> %a
1252ada54585SCraig Topper}
1253ada54585SCraig Topperdeclare <vscale x 2 x i64> @llvm.bitreverse.nxv2i64(<vscale x 2 x i64>)
1254ada54585SCraig Topper
1255ada54585SCraig Topperdefine <vscale x 4 x i64> @bitreverse_nxv4i64(<vscale x 4 x i64> %va) {
1256ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv4i64:
1257ada54585SCraig Topper; RV32:       # %bb.0:
1258ada54585SCraig Topper; RV32-NEXT:    addi sp, sp, -16
1259ada54585SCraig Topper; RV32-NEXT:    .cfi_def_cfa_offset 16
1260ada54585SCraig Topper; RV32-NEXT:    sw zero, 12(sp)
1261ada54585SCraig Topper; RV32-NEXT:    lui a0, 1044480
1262ada54585SCraig Topper; RV32-NEXT:    sw a0, 8(sp)
1263ada54585SCraig Topper; RV32-NEXT:    lui a0, 4080
1264ada54585SCraig Topper; RV32-NEXT:    sw a0, 12(sp)
1265ada54585SCraig Topper; RV32-NEXT:    sw zero, 8(sp)
1266af0ecfccSwangpc; RV32-NEXT:    li a1, 255
1267ada54585SCraig Topper; RV32-NEXT:    sw a1, 12(sp)
1268ada54585SCraig Topper; RV32-NEXT:    lui a1, 16
1269ada54585SCraig Topper; RV32-NEXT:    addi a1, a1, -256
1270ada54585SCraig Topper; RV32-NEXT:    sw a1, 12(sp)
1271ada54585SCraig Topper; RV32-NEXT:    lui a2, 61681
1272ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, -241
1273ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
1274ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
1275ada54585SCraig Topper; RV32-NEXT:    lui a2, 209715
1276ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, 819
1277ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
1278ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
1279ada54585SCraig Topper; RV32-NEXT:    lui a2, 349525
1280ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, 1365
1281ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
1282ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
1283af0ecfccSwangpc; RV32-NEXT:    li a2, 56
1284ada54585SCraig Topper; RV32-NEXT:    vsetvli a3, zero, e64, m4, ta, mu
1285ada54585SCraig Topper; RV32-NEXT:    vsrl.vx v12, v8, a2
1286af0ecfccSwangpc; RV32-NEXT:    li a3, 40
1287ada54585SCraig Topper; RV32-NEXT:    vsrl.vx v16, v8, a3
1288ada54585SCraig Topper; RV32-NEXT:    vand.vx v16, v16, a1
1289ada54585SCraig Topper; RV32-NEXT:    vor.vv v12, v16, v12
1290ada54585SCraig Topper; RV32-NEXT:    addi a1, sp, 8
1291ada54585SCraig Topper; RV32-NEXT:    vlse64.v v16, (a1), zero
1292ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v20, v8, 24
1293ada54585SCraig Topper; RV32-NEXT:    vand.vx v20, v20, a0
1294ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v24, v8, 8
1295ada54585SCraig Topper; RV32-NEXT:    vand.vv v16, v24, v16
1296ada54585SCraig Topper; RV32-NEXT:    vor.vv v16, v16, v20
1297*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v20, (a1), zero
1298ada54585SCraig Topper; RV32-NEXT:    vor.vv v12, v16, v12
1299ada54585SCraig Topper; RV32-NEXT:    vsll.vx v16, v8, a2
1300ada54585SCraig Topper; RV32-NEXT:    vsll.vx v24, v8, a3
1301ada54585SCraig Topper; RV32-NEXT:    vand.vv v20, v24, v20
1302*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v24, (a1), zero
1303ada54585SCraig Topper; RV32-NEXT:    vor.vv v16, v16, v20
1304*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v20, (a1), zero
1305ada54585SCraig Topper; RV32-NEXT:    vsll.vi v28, v8, 8
1306ada54585SCraig Topper; RV32-NEXT:    vand.vv v24, v28, v24
1307ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 24
1308ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v20
1309ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v24
1310*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v20, (a1), zero
1311ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v16, v8
1312ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v12
1313ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 4
1314ada54585SCraig Topper; RV32-NEXT:    vand.vv v12, v12, v20
1315ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v20
1316*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v16, (a1), zero
1317ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
1318ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v12, v8
1319ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 2
1320ada54585SCraig Topper; RV32-NEXT:    vand.vv v12, v12, v16
1321ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v16
1322*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v16, (a1), zero
1323ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
1324ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v12, v8
1325ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v12, v8, 1
1326ada54585SCraig Topper; RV32-NEXT:    vand.vv v12, v12, v16
1327ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v16
1328ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
1329ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v12, v8
1330ada54585SCraig Topper; RV32-NEXT:    addi sp, sp, 16
1331ada54585SCraig Topper; RV32-NEXT:    ret
1332ada54585SCraig Topper;
1333ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv4i64:
1334ada54585SCraig Topper; RV64:       # %bb.0:
1335af0ecfccSwangpc; RV64-NEXT:    li a0, 56
1336ada54585SCraig Topper; RV64-NEXT:    vsetvli a1, zero, e64, m4, ta, mu
1337ada54585SCraig Topper; RV64-NEXT:    vsrl.vx v12, v8, a0
1338af0ecfccSwangpc; RV64-NEXT:    li a1, 40
1339ada54585SCraig Topper; RV64-NEXT:    vsrl.vx v16, v8, a1
1340ada54585SCraig Topper; RV64-NEXT:    lui a2, 16
1341ada54585SCraig Topper; RV64-NEXT:    addiw a2, a2, -256
1342ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a2
1343ada54585SCraig Topper; RV64-NEXT:    vor.vv v12, v16, v12
1344ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 24
1345ada54585SCraig Topper; RV64-NEXT:    lui a2, 4080
1346ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a2
1347ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v20, v8, 8
1348af0ecfccSwangpc; RV64-NEXT:    li a2, 255
1349ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 24
1350ada54585SCraig Topper; RV64-NEXT:    vand.vx v20, v20, a3
1351ada54585SCraig Topper; RV64-NEXT:    vor.vv v16, v20, v16
1352ada54585SCraig Topper; RV64-NEXT:    vor.vv v12, v16, v12
1353ada54585SCraig Topper; RV64-NEXT:    vsll.vi v16, v8, 8
1354ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 32
1355ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a3
1356ada54585SCraig Topper; RV64-NEXT:    vsll.vi v20, v8, 24
1357ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 40
1358ada54585SCraig Topper; RV64-NEXT:    vand.vx v20, v20, a3
1359ada54585SCraig Topper; RV64-NEXT:    vor.vv v16, v20, v16
1360ada54585SCraig Topper; RV64-NEXT:    vsll.vx v20, v8, a0
1361ada54585SCraig Topper; RV64-NEXT:    vsll.vx v8, v8, a1
1362ada54585SCraig Topper; RV64-NEXT:    slli a0, a2, 48
1363ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
1364ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v20, v8
136541454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI20_0)
136641454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI20_0)(a0)
1367ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v16
1368ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v12
1369ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 4
1370ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a0
1371ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
137241454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI20_1)
137341454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI20_1)(a0)
1374ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
1375ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v12, v8
1376ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 2
1377ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a0
1378ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
137941454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI20_2)
138041454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI20_2)(a0)
1381ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
1382ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v12, v8
1383ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v12, v8, 1
1384ada54585SCraig Topper; RV64-NEXT:    vand.vx v12, v12, a0
1385ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
1386ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
1387ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v12, v8
1388ada54585SCraig Topper; RV64-NEXT:    ret
1389ada54585SCraig Topper  %a = call <vscale x 4 x i64> @llvm.bitreverse.nxv4i64(<vscale x 4 x i64> %va)
1390ada54585SCraig Topper  ret <vscale x 4 x i64> %a
1391ada54585SCraig Topper}
1392ada54585SCraig Topperdeclare <vscale x 4 x i64> @llvm.bitreverse.nxv4i64(<vscale x 4 x i64>)
1393ada54585SCraig Topper
1394ada54585SCraig Topperdefine <vscale x 8 x i64> @bitreverse_nxv8i64(<vscale x 8 x i64> %va) {
1395ada54585SCraig Topper; RV32-LABEL: bitreverse_nxv8i64:
1396ada54585SCraig Topper; RV32:       # %bb.0:
1397ada54585SCraig Topper; RV32-NEXT:    addi sp, sp, -16
1398ada54585SCraig Topper; RV32-NEXT:    .cfi_def_cfa_offset 16
1399ada54585SCraig Topper; RV32-NEXT:    csrr a0, vlenb
1400ada54585SCraig Topper; RV32-NEXT:    slli a0, a0, 4
1401ada54585SCraig Topper; RV32-NEXT:    sub sp, sp, a0
1402ada54585SCraig Topper; RV32-NEXT:    sw zero, 12(sp)
1403ada54585SCraig Topper; RV32-NEXT:    lui a0, 1044480
1404ada54585SCraig Topper; RV32-NEXT:    sw a0, 8(sp)
1405ada54585SCraig Topper; RV32-NEXT:    lui a0, 4080
1406ada54585SCraig Topper; RV32-NEXT:    sw a0, 12(sp)
1407ada54585SCraig Topper; RV32-NEXT:    sw zero, 8(sp)
1408af0ecfccSwangpc; RV32-NEXT:    li a1, 255
1409ada54585SCraig Topper; RV32-NEXT:    sw a1, 12(sp)
1410ada54585SCraig Topper; RV32-NEXT:    lui a1, 16
1411ada54585SCraig Topper; RV32-NEXT:    addi a1, a1, -256
1412ada54585SCraig Topper; RV32-NEXT:    sw a1, 12(sp)
1413ada54585SCraig Topper; RV32-NEXT:    lui a2, 61681
1414ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, -241
1415ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
1416ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
1417ada54585SCraig Topper; RV32-NEXT:    lui a2, 209715
1418ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, 819
1419ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
1420ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
1421ada54585SCraig Topper; RV32-NEXT:    lui a2, 349525
1422ada54585SCraig Topper; RV32-NEXT:    addi a2, a2, 1365
1423ada54585SCraig Topper; RV32-NEXT:    sw a2, 12(sp)
1424ada54585SCraig Topper; RV32-NEXT:    sw a2, 8(sp)
1425af0ecfccSwangpc; RV32-NEXT:    li a2, 56
1426ada54585SCraig Topper; RV32-NEXT:    vsetvli a3, zero, e64, m8, ta, mu
1427af0ecfccSwangpc; RV32-NEXT:    li a3, 40
1428*c06d0b4dSluxufan; RV32-NEXT:    vsrl.vx v16, v8, a3
1429*c06d0b4dSluxufan; RV32-NEXT:    vand.vx v16, v16, a1
1430*c06d0b4dSluxufan; RV32-NEXT:    addi a1, sp, 8
1431*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v24, (a1), zero
1432*c06d0b4dSluxufan; RV32-NEXT:    vsrl.vx v0, v8, a2
1433*c06d0b4dSluxufan; RV32-NEXT:    vor.vv v16, v16, v0
1434*c06d0b4dSluxufan; RV32-NEXT:    csrr a4, vlenb
1435*c06d0b4dSluxufan; RV32-NEXT:    slli a4, a4, 3
1436*c06d0b4dSluxufan; RV32-NEXT:    add a4, sp, a4
1437*c06d0b4dSluxufan; RV32-NEXT:    addi a4, a4, 16
1438*c06d0b4dSluxufan; RV32-NEXT:    vs8r.v v16, (a4) # Unknown-size Folded Spill
1439*c06d0b4dSluxufan; RV32-NEXT:    vsrl.vi v0, v8, 8
1440*c06d0b4dSluxufan; RV32-NEXT:    vand.vv v24, v0, v24
1441ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v0, v8, 24
1442ada54585SCraig Topper; RV32-NEXT:    vand.vx v0, v0, a0
1443*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v16, (a1), zero
1444ada54585SCraig Topper; RV32-NEXT:    vor.vv v24, v24, v0
1445ada54585SCraig Topper; RV32-NEXT:    csrr a0, vlenb
1446ada54585SCraig Topper; RV32-NEXT:    slli a0, a0, 3
1447ada54585SCraig Topper; RV32-NEXT:    add a0, sp, a0
1448ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 16
1449ada54585SCraig Topper; RV32-NEXT:    vl8re8.v v0, (a0) # Unknown-size Folded Reload
1450ada54585SCraig Topper; RV32-NEXT:    vor.vv v24, v24, v0
1451ada54585SCraig Topper; RV32-NEXT:    csrr a0, vlenb
1452ada54585SCraig Topper; RV32-NEXT:    slli a0, a0, 3
1453ada54585SCraig Topper; RV32-NEXT:    add a0, sp, a0
1454ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 16
1455ada54585SCraig Topper; RV32-NEXT:    vs8r.v v24, (a0) # Unknown-size Folded Spill
1456ada54585SCraig Topper; RV32-NEXT:    vsll.vx v24, v8, a3
1457ada54585SCraig Topper; RV32-NEXT:    vand.vv v16, v24, v16
1458ada54585SCraig Topper; RV32-NEXT:    vsll.vx v24, v8, a2
1459*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v0, (a1), zero
1460ada54585SCraig Topper; RV32-NEXT:    vor.vv v16, v24, v16
1461ada54585SCraig Topper; RV32-NEXT:    addi a0, sp, 16
1462ada54585SCraig Topper; RV32-NEXT:    vs8r.v v16, (a0) # Unknown-size Folded Spill
1463*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v16, (a1), zero
1464ada54585SCraig Topper; RV32-NEXT:    vsll.vi v24, v8, 8
1465ada54585SCraig Topper; RV32-NEXT:    vand.vv v24, v24, v0
1466ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 24
1467ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v16
1468ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v24
1469*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v16, (a1), zero
1470ada54585SCraig Topper; RV32-NEXT:    addi a0, sp, 16
1471ada54585SCraig Topper; RV32-NEXT:    vl8re8.v v24, (a0) # Unknown-size Folded Reload
1472ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v24, v8
1473ada54585SCraig Topper; RV32-NEXT:    csrr a0, vlenb
1474ada54585SCraig Topper; RV32-NEXT:    slli a0, a0, 3
1475ada54585SCraig Topper; RV32-NEXT:    add a0, sp, a0
1476ada54585SCraig Topper; RV32-NEXT:    addi a0, a0, 16
1477ada54585SCraig Topper; RV32-NEXT:    vl8re8.v v24, (a0) # Unknown-size Folded Reload
1478ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v8, v24
1479ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v24, v8, 4
1480ada54585SCraig Topper; RV32-NEXT:    vand.vv v24, v24, v16
1481ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v16
1482*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v16, (a1), zero
1483ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 4
1484ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v24, v8
1485ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v24, v8, 2
1486ada54585SCraig Topper; RV32-NEXT:    vand.vv v24, v24, v16
1487ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v16
1488*c06d0b4dSluxufan; RV32-NEXT:    vlse64.v v16, (a1), zero
1489ada54585SCraig Topper; RV32-NEXT:    vsll.vi v8, v8, 2
1490ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v24, v8
1491ada54585SCraig Topper; RV32-NEXT:    vsrl.vi v24, v8, 1
1492ada54585SCraig Topper; RV32-NEXT:    vand.vv v24, v24, v16
1493ada54585SCraig Topper; RV32-NEXT:    vand.vv v8, v8, v16
1494ada54585SCraig Topper; RV32-NEXT:    vadd.vv v8, v8, v8
1495ada54585SCraig Topper; RV32-NEXT:    vor.vv v8, v24, v8
1496ada54585SCraig Topper; RV32-NEXT:    csrr a0, vlenb
1497ada54585SCraig Topper; RV32-NEXT:    slli a0, a0, 4
1498ada54585SCraig Topper; RV32-NEXT:    add sp, sp, a0
1499ada54585SCraig Topper; RV32-NEXT:    addi sp, sp, 16
1500ada54585SCraig Topper; RV32-NEXT:    ret
1501ada54585SCraig Topper;
1502ada54585SCraig Topper; RV64-LABEL: bitreverse_nxv8i64:
1503ada54585SCraig Topper; RV64:       # %bb.0:
1504af0ecfccSwangpc; RV64-NEXT:    li a0, 56
1505ada54585SCraig Topper; RV64-NEXT:    vsetvli a1, zero, e64, m8, ta, mu
1506ada54585SCraig Topper; RV64-NEXT:    vsrl.vx v16, v8, a0
1507af0ecfccSwangpc; RV64-NEXT:    li a1, 40
1508ada54585SCraig Topper; RV64-NEXT:    vsrl.vx v24, v8, a1
1509ada54585SCraig Topper; RV64-NEXT:    lui a2, 16
1510ada54585SCraig Topper; RV64-NEXT:    addiw a2, a2, -256
1511ada54585SCraig Topper; RV64-NEXT:    vand.vx v24, v24, a2
1512ada54585SCraig Topper; RV64-NEXT:    vor.vv v16, v24, v16
1513ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v24, v8, 24
1514ada54585SCraig Topper; RV64-NEXT:    lui a2, 4080
1515ada54585SCraig Topper; RV64-NEXT:    vand.vx v24, v24, a2
1516ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v0, v8, 8
1517af0ecfccSwangpc; RV64-NEXT:    li a2, 255
1518ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 24
1519ada54585SCraig Topper; RV64-NEXT:    vand.vx v0, v0, a3
1520ada54585SCraig Topper; RV64-NEXT:    vor.vv v24, v0, v24
1521ada54585SCraig Topper; RV64-NEXT:    vor.vv v16, v24, v16
1522ada54585SCraig Topper; RV64-NEXT:    vsll.vi v24, v8, 8
1523ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 32
1524ada54585SCraig Topper; RV64-NEXT:    vand.vx v24, v24, a3
1525ada54585SCraig Topper; RV64-NEXT:    vsll.vi v0, v8, 24
1526ada54585SCraig Topper; RV64-NEXT:    slli a3, a2, 40
1527ada54585SCraig Topper; RV64-NEXT:    vand.vx v0, v0, a3
1528ada54585SCraig Topper; RV64-NEXT:    vor.vv v24, v0, v24
1529ada54585SCraig Topper; RV64-NEXT:    vsll.vx v0, v8, a0
1530ada54585SCraig Topper; RV64-NEXT:    vsll.vx v8, v8, a1
1531ada54585SCraig Topper; RV64-NEXT:    slli a0, a2, 48
1532ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
1533ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v0, v8
153441454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI21_0)
153541454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI21_0)(a0)
1536ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v24
1537ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v8, v16
1538ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 4
1539ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a0
1540ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
154141454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI21_1)
154241454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI21_1)(a0)
1543ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 4
1544ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v16, v8
1545ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 2
1546ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a0
1547ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
154841454ab2Swangpc; RV64-NEXT:    lui a0, %hi(.LCPI21_2)
154941454ab2Swangpc; RV64-NEXT:    ld a0, %lo(.LCPI21_2)(a0)
1550ada54585SCraig Topper; RV64-NEXT:    vsll.vi v8, v8, 2
1551ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v16, v8
1552ada54585SCraig Topper; RV64-NEXT:    vsrl.vi v16, v8, 1
1553ada54585SCraig Topper; RV64-NEXT:    vand.vx v16, v16, a0
1554ada54585SCraig Topper; RV64-NEXT:    vand.vx v8, v8, a0
1555ada54585SCraig Topper; RV64-NEXT:    vadd.vv v8, v8, v8
1556ada54585SCraig Topper; RV64-NEXT:    vor.vv v8, v16, v8
1557ada54585SCraig Topper; RV64-NEXT:    ret
1558ada54585SCraig Topper  %a = call <vscale x 8 x i64> @llvm.bitreverse.nxv8i64(<vscale x 8 x i64> %va)
1559ada54585SCraig Topper  ret <vscale x 8 x i64> %a
1560ada54585SCraig Topper}
1561ada54585SCraig Topperdeclare <vscale x 8 x i64> @llvm.bitreverse.nxv8i64(<vscale x 8 x i64>)
1562