1 #include "ARMBaseInstrInfo.h"
2 #include "ARMSubtarget.h"
3 #include "ARMTargetMachine.h"
4 #include "llvm/Support/TargetRegistry.h"
5 #include "llvm/Support/TargetSelect.h"
6 #include "llvm/Target/TargetMachine.h"
7 #include "llvm/Target/TargetOptions.h"
8 
9 #include "gtest/gtest.h"
10 
11 using namespace llvm;
12 
13 TEST(MachineInstructionDoubleWidthResult, IsCorrect) {
14   using namespace ARM;
15 
16   auto DoubleWidthResult = [](unsigned Opcode) {
17     switch (Opcode) {
18     default:
19       break;
20     case MVE_VMULLBp16:
21     case MVE_VMULLBp8:
22     case MVE_VMULLBs16:
23     case MVE_VMULLBs32:
24     case MVE_VMULLBs8:
25     case MVE_VMULLBu16:
26     case MVE_VMULLBu32:
27     case MVE_VMULLBu8:
28     case MVE_VMULLTp16:
29     case MVE_VMULLTp8:
30     case MVE_VMULLTs16:
31     case MVE_VMULLTs32:
32     case MVE_VMULLTs8:
33     case MVE_VMULLTu16:
34     case MVE_VMULLTu32:
35     case MVE_VMULLTu8:
36     case MVE_VQDMULL_qr_s16bh:
37     case MVE_VQDMULL_qr_s16th:
38     case MVE_VQDMULL_qr_s32bh:
39     case MVE_VQDMULL_qr_s32th:
40     case MVE_VQDMULLs16bh:
41     case MVE_VQDMULLs16th:
42     case MVE_VQDMULLs32bh:
43     case MVE_VQDMULLs32th:
44     case MVE_VMOVLs16bh:
45     case MVE_VMOVLs16th:
46     case MVE_VMOVLs8bh:
47     case MVE_VMOVLs8th:
48     case MVE_VMOVLu16bh:
49     case MVE_VMOVLu16th:
50     case MVE_VMOVLu8bh:
51     case MVE_VMOVLu8th:
52     case MVE_VSHLL_imms16bh:
53     case MVE_VSHLL_imms16th:
54     case MVE_VSHLL_imms8bh:
55     case MVE_VSHLL_imms8th:
56     case MVE_VSHLL_immu16bh:
57     case MVE_VSHLL_immu16th:
58     case MVE_VSHLL_immu8bh:
59     case MVE_VSHLL_immu8th:
60     case MVE_VSHLL_lws16bh:
61     case MVE_VSHLL_lws16th:
62     case MVE_VSHLL_lws8bh:
63     case MVE_VSHLL_lws8th:
64     case MVE_VSHLL_lwu16bh:
65     case MVE_VSHLL_lwu16th:
66     case MVE_VSHLL_lwu8bh:
67     case MVE_VSHLL_lwu8th:
68       return true;
69     }
70     return false;
71   };
72 
73   LLVMInitializeARMTargetInfo();
74   LLVMInitializeARMTarget();
75   LLVMInitializeARMTargetMC();
76 
77   auto TT(Triple::normalize("thumbv8.1m.main-none-none-eabi"));
78   std::string Error;
79   const Target *T = TargetRegistry::lookupTarget(TT, Error);
80   if (!T) {
81     dbgs() << Error;
82     return;
83   }
84 
85   TargetOptions Options;
86   auto TM = std::unique_ptr<LLVMTargetMachine>(
87     static_cast<LLVMTargetMachine*>(
88       T->createTargetMachine(TT, "generic", "", Options, None, None,
89                              CodeGenOpt::Default)));
90   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
91                   std::string(TM->getTargetFeatureString()),
92                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
93   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
94   auto MII = TM->getMCInstrInfo();
95 
96   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
97     const MCInstrDesc &Desc = TII->get(i);
98 
99     uint64_t Flags = Desc.TSFlags;
100     if ((Flags & ARMII::DomainMask) != ARMII::DomainMVE)
101       continue;
102 
103     bool Valid = (Flags & ARMII::DoubleWidthResult) != 0;
104     ASSERT_EQ(DoubleWidthResult(i), Valid)
105               << MII->getName(i)
106               << ": mismatched expectation for tail-predicated safety\n";
107   }
108 }
109 
110 TEST(MachineInstructionHorizontalReduction, IsCorrect) {
111   using namespace ARM;
112 
113   auto HorizontalReduction = [](unsigned Opcode) {
114     switch (Opcode) {
115     default:
116       break;
117     case MVE_VABAVs16:
118     case MVE_VABAVs32:
119     case MVE_VABAVs8:
120     case MVE_VABAVu16:
121     case MVE_VABAVu32:
122     case MVE_VABAVu8:
123     case MVE_VADDLVs32acc:
124     case MVE_VADDLVs32no_acc:
125     case MVE_VADDLVu32acc:
126     case MVE_VADDLVu32no_acc:
127     case MVE_VADDVs16acc:
128     case MVE_VADDVs16no_acc:
129     case MVE_VADDVs32acc:
130     case MVE_VADDVs32no_acc:
131     case MVE_VADDVs8acc:
132     case MVE_VADDVs8no_acc:
133     case MVE_VADDVu16acc:
134     case MVE_VADDVu16no_acc:
135     case MVE_VADDVu32acc:
136     case MVE_VADDVu32no_acc:
137     case MVE_VADDVu8acc:
138     case MVE_VADDVu8no_acc:
139     case MVE_VMAXAVs16:
140     case MVE_VMAXAVs32:
141     case MVE_VMAXAVs8:
142     case MVE_VMAXNMAVf16:
143     case MVE_VMAXNMAVf32:
144     case MVE_VMAXNMVf16:
145     case MVE_VMAXNMVf32:
146     case MVE_VMAXVs16:
147     case MVE_VMAXVs32:
148     case MVE_VMAXVs8:
149     case MVE_VMAXVu16:
150     case MVE_VMAXVu32:
151     case MVE_VMAXVu8:
152     case MVE_VMINAVs16:
153     case MVE_VMINAVs32:
154     case MVE_VMINAVs8:
155     case MVE_VMINNMAVf16:
156     case MVE_VMINNMAVf32:
157     case MVE_VMINNMVf16:
158     case MVE_VMINNMVf32:
159     case MVE_VMINVs16:
160     case MVE_VMINVs32:
161     case MVE_VMINVs8:
162     case MVE_VMINVu16:
163     case MVE_VMINVu32:
164     case MVE_VMINVu8:
165     case MVE_VMLADAVas16:
166     case MVE_VMLADAVas32:
167     case MVE_VMLADAVas8:
168     case MVE_VMLADAVau16:
169     case MVE_VMLADAVau32:
170     case MVE_VMLADAVau8:
171     case MVE_VMLADAVaxs16:
172     case MVE_VMLADAVaxs32:
173     case MVE_VMLADAVaxs8:
174     case MVE_VMLADAVs16:
175     case MVE_VMLADAVs32:
176     case MVE_VMLADAVs8:
177     case MVE_VMLADAVu16:
178     case MVE_VMLADAVu32:
179     case MVE_VMLADAVu8:
180     case MVE_VMLADAVxs16:
181     case MVE_VMLADAVxs32:
182     case MVE_VMLADAVxs8:
183     case MVE_VMLALDAVas16:
184     case MVE_VMLALDAVas32:
185     case MVE_VMLALDAVau16:
186     case MVE_VMLALDAVau32:
187     case MVE_VMLALDAVaxs16:
188     case MVE_VMLALDAVaxs32:
189     case MVE_VMLALDAVs16:
190     case MVE_VMLALDAVs32:
191     case MVE_VMLALDAVu16:
192     case MVE_VMLALDAVu32:
193     case MVE_VMLALDAVxs16:
194     case MVE_VMLALDAVxs32:
195     case MVE_VMLSDAVas16:
196     case MVE_VMLSDAVas32:
197     case MVE_VMLSDAVas8:
198     case MVE_VMLSDAVaxs16:
199     case MVE_VMLSDAVaxs32:
200     case MVE_VMLSDAVaxs8:
201     case MVE_VMLSDAVs16:
202     case MVE_VMLSDAVs32:
203     case MVE_VMLSDAVs8:
204     case MVE_VMLSDAVxs16:
205     case MVE_VMLSDAVxs32:
206     case MVE_VMLSDAVxs8:
207     case MVE_VMLSLDAVas16:
208     case MVE_VMLSLDAVas32:
209     case MVE_VMLSLDAVaxs16:
210     case MVE_VMLSLDAVaxs32:
211     case MVE_VMLSLDAVs16:
212     case MVE_VMLSLDAVs32:
213     case MVE_VMLSLDAVxs16:
214     case MVE_VMLSLDAVxs32:
215     case MVE_VRMLALDAVHas32:
216     case MVE_VRMLALDAVHau32:
217     case MVE_VRMLALDAVHaxs32:
218     case MVE_VRMLALDAVHs32:
219     case MVE_VRMLALDAVHu32:
220     case MVE_VRMLALDAVHxs32:
221     case MVE_VRMLSLDAVHas32:
222     case MVE_VRMLSLDAVHaxs32:
223     case MVE_VRMLSLDAVHs32:
224     case MVE_VRMLSLDAVHxs32:
225       return true;
226     }
227     return false;
228   };
229 
230   LLVMInitializeARMTargetInfo();
231   LLVMInitializeARMTarget();
232   LLVMInitializeARMTargetMC();
233 
234   auto TT(Triple::normalize("thumbv8.1m.main-none-none-eabi"));
235   std::string Error;
236   const Target *T = TargetRegistry::lookupTarget(TT, Error);
237   if (!T) {
238     dbgs() << Error;
239     return;
240   }
241 
242   TargetOptions Options;
243   auto TM = std::unique_ptr<LLVMTargetMachine>(
244     static_cast<LLVMTargetMachine*>(
245       T->createTargetMachine(TT, "generic", "", Options, None, None,
246                              CodeGenOpt::Default)));
247   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
248                   std::string(TM->getTargetFeatureString()),
249                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
250   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
251   auto MII = TM->getMCInstrInfo();
252 
253   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
254     const MCInstrDesc &Desc = TII->get(i);
255 
256     uint64_t Flags = Desc.TSFlags;
257     if ((Flags & ARMII::DomainMask) != ARMII::DomainMVE)
258       continue;
259     bool Valid = (Flags & ARMII::HorizontalReduction) != 0;
260     ASSERT_EQ(HorizontalReduction(i), Valid)
261               << MII->getName(i)
262               << ": mismatched expectation for tail-predicated safety\n";
263   }
264 }
265 
266 TEST(MachineInstructionRetainsPreviousHalfElement, IsCorrect) {
267   using namespace ARM;
268 
269   auto RetainsPreviousHalfElement = [](unsigned Opcode) {
270     switch (Opcode) {
271     default:
272       break;
273     case MVE_VMOVNi16bh:
274     case MVE_VMOVNi16th:
275     case MVE_VMOVNi32bh:
276     case MVE_VMOVNi32th:
277     case MVE_VQMOVNs16bh:
278     case MVE_VQMOVNs16th:
279     case MVE_VQMOVNs32bh:
280     case MVE_VQMOVNs32th:
281     case MVE_VQMOVNu16bh:
282     case MVE_VQMOVNu16th:
283     case MVE_VQMOVNu32bh:
284     case MVE_VQMOVNu32th:
285     case MVE_VQMOVUNs16bh:
286     case MVE_VQMOVUNs16th:
287     case MVE_VQMOVUNs32bh:
288     case MVE_VQMOVUNs32th:
289     case MVE_VQRSHRNbhs16:
290     case MVE_VQRSHRNbhs32:
291     case MVE_VQRSHRNbhu16:
292     case MVE_VQRSHRNbhu32:
293     case MVE_VQRSHRNths16:
294     case MVE_VQRSHRNths32:
295     case MVE_VQRSHRNthu16:
296     case MVE_VQRSHRNthu32:
297     case MVE_VQRSHRUNs16bh:
298     case MVE_VQRSHRUNs16th:
299     case MVE_VQRSHRUNs32bh:
300     case MVE_VQRSHRUNs32th:
301     case MVE_VQSHRNbhs16:
302     case MVE_VQSHRNbhs32:
303     case MVE_VQSHRNbhu16:
304     case MVE_VQSHRNbhu32:
305     case MVE_VQSHRNths16:
306     case MVE_VQSHRNths32:
307     case MVE_VQSHRNthu16:
308     case MVE_VQSHRNthu32:
309     case MVE_VQSHRUNs16bh:
310     case MVE_VQSHRUNs16th:
311     case MVE_VQSHRUNs32bh:
312     case MVE_VQSHRUNs32th:
313     case MVE_VRSHRNi16bh:
314     case MVE_VRSHRNi16th:
315     case MVE_VRSHRNi32bh:
316     case MVE_VRSHRNi32th:
317     case MVE_VSHRNi16bh:
318     case MVE_VSHRNi16th:
319     case MVE_VSHRNi32bh:
320     case MVE_VSHRNi32th:
321     case MVE_VCVTf16f32bh:
322     case MVE_VCVTf16f32th:
323     case MVE_VCVTf32f16bh:
324     case MVE_VCVTf32f16th:
325       return true;
326     }
327     return false;
328   };
329 
330   LLVMInitializeARMTargetInfo();
331   LLVMInitializeARMTarget();
332   LLVMInitializeARMTargetMC();
333 
334   auto TT(Triple::normalize("thumbv8.1m.main-none-none-eabi"));
335   std::string Error;
336   const Target *T = TargetRegistry::lookupTarget(TT, Error);
337   if (!T) {
338     dbgs() << Error;
339     return;
340   }
341 
342   TargetOptions Options;
343   auto TM = std::unique_ptr<LLVMTargetMachine>(
344     static_cast<LLVMTargetMachine*>(
345       T->createTargetMachine(TT, "generic", "", Options, None, None,
346                              CodeGenOpt::Default)));
347   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
348                   std::string(TM->getTargetFeatureString()),
349                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
350   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
351   auto MII = TM->getMCInstrInfo();
352 
353   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
354     const MCInstrDesc &Desc = TII->get(i);
355 
356     uint64_t Flags = Desc.TSFlags;
357     if ((Flags & ARMII::DomainMask) != ARMII::DomainMVE)
358       continue;
359 
360     bool Valid = (Flags & ARMII::RetainsPreviousHalfElement) != 0;
361     ASSERT_EQ(RetainsPreviousHalfElement(i), Valid)
362               << MII->getName(i)
363               << ": mismatched expectation for tail-predicated safety\n";
364   }
365 }
366 // Test for instructions that aren't immediately obviously valid within a
367 // tail-predicated loop. This should be marked up in their tablegen
368 // descriptions. Currently we, conservatively, disallow:
369 // - cross beat carries.
370 // - complex operations.
371 // - horizontal operations with exchange.
372 // - byte swapping.
373 // - interleaved memory instructions.
374 // TODO: Add to this list once we can handle them safely.
375 TEST(MachineInstrValidTailPredication, IsCorrect) {
376 
377   using namespace ARM;
378 
379   auto IsValidTPOpcode = [](unsigned Opcode) {
380     switch (Opcode) {
381     default:
382       return false;
383     case MVE_ASRLi:
384     case MVE_ASRLr:
385     case MVE_LSRL:
386     case MVE_LSLLi:
387     case MVE_LSLLr:
388     case MVE_SQRSHR:
389     case MVE_SQRSHRL:
390     case MVE_SQSHL:
391     case MVE_SQSHLL:
392     case MVE_SRSHR:
393     case MVE_SRSHRL:
394     case MVE_UQRSHL:
395     case MVE_UQRSHLL:
396     case MVE_UQSHL:
397     case MVE_UQSHLL:
398     case MVE_URSHR:
399     case MVE_URSHRL:
400     case MVE_VABDf16:
401     case MVE_VABDf32:
402     case MVE_VABDs16:
403     case MVE_VABDs32:
404     case MVE_VABDs8:
405     case MVE_VABDu16:
406     case MVE_VABDu32:
407     case MVE_VABDu8:
408     case MVE_VABSf16:
409     case MVE_VABSf32:
410     case MVE_VABSs16:
411     case MVE_VABSs32:
412     case MVE_VABSs8:
413     case MVE_VADD_qr_f16:
414     case MVE_VADD_qr_f32:
415     case MVE_VADD_qr_i16:
416     case MVE_VADD_qr_i32:
417     case MVE_VADD_qr_i8:
418     case MVE_VADDVs16acc:
419     case MVE_VADDVs16no_acc:
420     case MVE_VADDVs32acc:
421     case MVE_VADDVs32no_acc:
422     case MVE_VADDVs8acc:
423     case MVE_VADDVs8no_acc:
424     case MVE_VADDVu16acc:
425     case MVE_VADDVu16no_acc:
426     case MVE_VADDVu32acc:
427     case MVE_VADDVu32no_acc:
428     case MVE_VADDVu8acc:
429     case MVE_VADDVu8no_acc:
430     case MVE_VADDf16:
431     case MVE_VADDf32:
432     case MVE_VADDi16:
433     case MVE_VADDi32:
434     case MVE_VADDi8:
435     case MVE_VAND:
436     case MVE_VBIC:
437     case MVE_VBICimmi16:
438     case MVE_VBICimmi32:
439     case MVE_VBRSR16:
440     case MVE_VBRSR32:
441     case MVE_VBRSR8:
442     case MVE_VCLSs16:
443     case MVE_VCLSs32:
444     case MVE_VCLSs8:
445     case MVE_VCLZs16:
446     case MVE_VCLZs32:
447     case MVE_VCLZs8:
448     case MVE_VCMPf16:
449     case MVE_VCMPf16r:
450     case MVE_VCMPf32:
451     case MVE_VCMPf32r:
452     case MVE_VCMPi16:
453     case MVE_VCMPi16r:
454     case MVE_VCMPi32:
455     case MVE_VCMPi32r:
456     case MVE_VCMPi8:
457     case MVE_VCMPi8r:
458     case MVE_VCMPs16:
459     case MVE_VCMPs16r:
460     case MVE_VCMPs32:
461     case MVE_VCMPs32r:
462     case MVE_VCMPs8:
463     case MVE_VCMPs8r:
464     case MVE_VCMPu16:
465     case MVE_VCMPu16r:
466     case MVE_VCMPu32:
467     case MVE_VCMPu32r:
468     case MVE_VCMPu8:
469     case MVE_VCMPu8r:
470     case MVE_VCTP16:
471     case MVE_VCTP32:
472     case MVE_VCTP64:
473     case MVE_VCTP8:
474     case MVE_VCVTf16s16_fix:
475     case MVE_VCVTf16s16n:
476     case MVE_VCVTf16u16_fix:
477     case MVE_VCVTf16u16n:
478     case MVE_VCVTf32s32_fix:
479     case MVE_VCVTf32s32n:
480     case MVE_VCVTf32u32_fix:
481     case MVE_VCVTf32u32n:
482     case MVE_VCVTs16f16_fix:
483     case MVE_VCVTs16f16a:
484     case MVE_VCVTs16f16m:
485     case MVE_VCVTs16f16n:
486     case MVE_VCVTs16f16p:
487     case MVE_VCVTs16f16z:
488     case MVE_VCVTs32f32_fix:
489     case MVE_VCVTs32f32a:
490     case MVE_VCVTs32f32m:
491     case MVE_VCVTs32f32n:
492     case MVE_VCVTs32f32p:
493     case MVE_VCVTs32f32z:
494     case MVE_VCVTu16f16_fix:
495     case MVE_VCVTu16f16a:
496     case MVE_VCVTu16f16m:
497     case MVE_VCVTu16f16n:
498     case MVE_VCVTu16f16p:
499     case MVE_VCVTu16f16z:
500     case MVE_VCVTu32f32_fix:
501     case MVE_VCVTu32f32a:
502     case MVE_VCVTu32f32m:
503     case MVE_VCVTu32f32n:
504     case MVE_VCVTu32f32p:
505     case MVE_VCVTu32f32z:
506     case MVE_VDDUPu16:
507     case MVE_VDDUPu32:
508     case MVE_VDDUPu8:
509     case MVE_VDUP16:
510     case MVE_VDUP32:
511     case MVE_VDUP8:
512     case MVE_VDWDUPu16:
513     case MVE_VDWDUPu32:
514     case MVE_VDWDUPu8:
515     case MVE_VEOR:
516     case MVE_VFMA_qr_Sf16:
517     case MVE_VFMA_qr_Sf32:
518     case MVE_VFMA_qr_f16:
519     case MVE_VFMA_qr_f32:
520     case MVE_VFMAf16:
521     case MVE_VFMAf32:
522     case MVE_VFMSf16:
523     case MVE_VFMSf32:
524     case MVE_VMAXAs16:
525     case MVE_VMAXAs32:
526     case MVE_VMAXAs8:
527     case MVE_VMAXs16:
528     case MVE_VMAXs32:
529     case MVE_VMAXs8:
530     case MVE_VMAXu16:
531     case MVE_VMAXu32:
532     case MVE_VMAXu8:
533     case MVE_VMINAs16:
534     case MVE_VMINAs32:
535     case MVE_VMINAs8:
536     case MVE_VMINs16:
537     case MVE_VMINs32:
538     case MVE_VMINs8:
539     case MVE_VMINu16:
540     case MVE_VMINu32:
541     case MVE_VMINu8:
542     case MVE_VMLADAVas16:
543     case MVE_VMLADAVas32:
544     case MVE_VMLADAVas8:
545     case MVE_VMLADAVau16:
546     case MVE_VMLADAVau32:
547     case MVE_VMLADAVau8:
548     case MVE_VMLADAVs16:
549     case MVE_VMLADAVs32:
550     case MVE_VMLADAVs8:
551     case MVE_VMLADAVu16:
552     case MVE_VMLADAVu32:
553     case MVE_VMLADAVu8:
554     case MVE_VMLALDAVs16:
555     case MVE_VMLALDAVs32:
556     case MVE_VMLALDAVu16:
557     case MVE_VMLALDAVu32:
558     case MVE_VMLALDAVas16:
559     case MVE_VMLALDAVas32:
560     case MVE_VMLALDAVau16:
561     case MVE_VMLALDAVau32:
562     case MVE_VMLSDAVas16:
563     case MVE_VMLSDAVas32:
564     case MVE_VMLSDAVas8:
565     case MVE_VMLSDAVs16:
566     case MVE_VMLSDAVs32:
567     case MVE_VMLSDAVs8:
568     case MVE_VMLSLDAVas16:
569     case MVE_VMLSLDAVas32:
570     case MVE_VMLSLDAVs16:
571     case MVE_VMLSLDAVs32:
572     case MVE_VRMLALDAVHas32:
573     case MVE_VRMLALDAVHau32:
574     case MVE_VRMLALDAVHs32:
575     case MVE_VRMLALDAVHu32:
576     case MVE_VRMLSLDAVHas32:
577     case MVE_VRMLSLDAVHs32:
578     case MVE_VMLAS_qr_s16:
579     case MVE_VMLAS_qr_s32:
580     case MVE_VMLAS_qr_s8:
581     case MVE_VMLAS_qr_u16:
582     case MVE_VMLAS_qr_u32:
583     case MVE_VMLAS_qr_u8:
584     case MVE_VMLA_qr_s16:
585     case MVE_VMLA_qr_s32:
586     case MVE_VMLA_qr_s8:
587     case MVE_VMLA_qr_u16:
588     case MVE_VMLA_qr_u32:
589     case MVE_VMLA_qr_u8:
590     case MVE_VHADD_qr_s16:
591     case MVE_VHADD_qr_s32:
592     case MVE_VHADD_qr_s8:
593     case MVE_VHADD_qr_u16:
594     case MVE_VHADD_qr_u32:
595     case MVE_VHADD_qr_u8:
596     case MVE_VHADDs16:
597     case MVE_VHADDs32:
598     case MVE_VHADDs8:
599     case MVE_VHADDu16:
600     case MVE_VHADDu32:
601     case MVE_VHADDu8:
602     case MVE_VHSUB_qr_s16:
603     case MVE_VHSUB_qr_s32:
604     case MVE_VHSUB_qr_s8:
605     case MVE_VHSUB_qr_u16:
606     case MVE_VHSUB_qr_u32:
607     case MVE_VHSUB_qr_u8:
608     case MVE_VHSUBs16:
609     case MVE_VHSUBs32:
610     case MVE_VHSUBs8:
611     case MVE_VHSUBu16:
612     case MVE_VHSUBu32:
613     case MVE_VHSUBu8:
614     case MVE_VIDUPu16:
615     case MVE_VIDUPu32:
616     case MVE_VIDUPu8:
617     case MVE_VIWDUPu16:
618     case MVE_VIWDUPu32:
619     case MVE_VIWDUPu8:
620     case MVE_VLD20_8:
621     case MVE_VLD21_8:
622     case MVE_VLD20_16:
623     case MVE_VLD21_16:
624     case MVE_VLD20_32:
625     case MVE_VLD21_32:
626     case MVE_VLD20_8_wb:
627     case MVE_VLD21_8_wb:
628     case MVE_VLD20_16_wb:
629     case MVE_VLD21_16_wb:
630     case MVE_VLD20_32_wb:
631     case MVE_VLD21_32_wb:
632     case MVE_VLD40_8:
633     case MVE_VLD41_8:
634     case MVE_VLD42_8:
635     case MVE_VLD43_8:
636     case MVE_VLD40_16:
637     case MVE_VLD41_16:
638     case MVE_VLD42_16:
639     case MVE_VLD43_16:
640     case MVE_VLD40_32:
641     case MVE_VLD41_32:
642     case MVE_VLD42_32:
643     case MVE_VLD43_32:
644     case MVE_VLD40_8_wb:
645     case MVE_VLD41_8_wb:
646     case MVE_VLD42_8_wb:
647     case MVE_VLD43_8_wb:
648     case MVE_VLD40_16_wb:
649     case MVE_VLD41_16_wb:
650     case MVE_VLD42_16_wb:
651     case MVE_VLD43_16_wb:
652     case MVE_VLD40_32_wb:
653     case MVE_VLD41_32_wb:
654     case MVE_VLD42_32_wb:
655     case MVE_VLD43_32_wb:
656     case MVE_VLDRBS16:
657     case MVE_VLDRBS16_post:
658     case MVE_VLDRBS16_pre:
659     case MVE_VLDRBS16_rq:
660     case MVE_VLDRBS32:
661     case MVE_VLDRBS32_post:
662     case MVE_VLDRBS32_pre:
663     case MVE_VLDRBS32_rq:
664     case MVE_VLDRBU16:
665     case MVE_VLDRBU16_post:
666     case MVE_VLDRBU16_pre:
667     case MVE_VLDRBU16_rq:
668     case MVE_VLDRBU32:
669     case MVE_VLDRBU32_post:
670     case MVE_VLDRBU32_pre:
671     case MVE_VLDRBU32_rq:
672     case MVE_VLDRBU8:
673     case MVE_VLDRBU8_post:
674     case MVE_VLDRBU8_pre:
675     case MVE_VLDRBU8_rq:
676     case MVE_VLDRDU64_qi:
677     case MVE_VLDRDU64_qi_pre:
678     case MVE_VLDRDU64_rq:
679     case MVE_VLDRDU64_rq_u:
680     case MVE_VLDRHS32:
681     case MVE_VLDRHS32_post:
682     case MVE_VLDRHS32_pre:
683     case MVE_VLDRHS32_rq:
684     case MVE_VLDRHS32_rq_u:
685     case MVE_VLDRHU16:
686     case MVE_VLDRHU16_post:
687     case MVE_VLDRHU16_pre:
688     case MVE_VLDRHU16_rq:
689     case MVE_VLDRHU16_rq_u:
690     case MVE_VLDRHU32:
691     case MVE_VLDRHU32_post:
692     case MVE_VLDRHU32_pre:
693     case MVE_VLDRHU32_rq:
694     case MVE_VLDRHU32_rq_u:
695     case MVE_VLDRWU32:
696     case MVE_VLDRWU32_post:
697     case MVE_VLDRWU32_pre:
698     case MVE_VLDRWU32_qi:
699     case MVE_VLDRWU32_qi_pre:
700     case MVE_VLDRWU32_rq:
701     case MVE_VLDRWU32_rq_u:
702     case MVE_VMOVimmf32:
703     case MVE_VMOVimmi16:
704     case MVE_VMOVimmi32:
705     case MVE_VMOVimmi64:
706     case MVE_VMOVimmi8:
707     case MVE_VMOVNi16bh:
708     case MVE_VMOVNi16th:
709     case MVE_VMOVNi32bh:
710     case MVE_VMOVNi32th:
711     case MVE_VMULLBp16:
712     case MVE_VMULLBp8:
713     case MVE_VMULLBs16:
714     case MVE_VMULLBs32:
715     case MVE_VMULLBs8:
716     case MVE_VMULLBu16:
717     case MVE_VMULLBu32:
718     case MVE_VMULLBu8:
719     case MVE_VMULLTp16:
720     case MVE_VMULLTp8:
721     case MVE_VMULLTs16:
722     case MVE_VMULLTs32:
723     case MVE_VMULLTs8:
724     case MVE_VMULLTu16:
725     case MVE_VMULLTu32:
726     case MVE_VMULLTu8:
727     case MVE_VMUL_qr_f16:
728     case MVE_VMUL_qr_f32:
729     case MVE_VMUL_qr_i16:
730     case MVE_VMUL_qr_i32:
731     case MVE_VMUL_qr_i8:
732     case MVE_VMULf16:
733     case MVE_VMULf32:
734     case MVE_VMULi16:
735     case MVE_VMULi8:
736     case MVE_VMULi32:
737     case MVE_VMVN:
738     case MVE_VMVNimmi16:
739     case MVE_VMVNimmi32:
740     case MVE_VNEGf16:
741     case MVE_VNEGf32:
742     case MVE_VNEGs16:
743     case MVE_VNEGs32:
744     case MVE_VNEGs8:
745     case MVE_VORN:
746     case MVE_VORR:
747     case MVE_VORRimmi16:
748     case MVE_VORRimmi32:
749     case MVE_VPST:
750     case MVE_VQABSs16:
751     case MVE_VQABSs32:
752     case MVE_VQABSs8:
753     case MVE_VQADD_qr_s16:
754     case MVE_VQADD_qr_s32:
755     case MVE_VQADD_qr_s8:
756     case MVE_VQADD_qr_u16:
757     case MVE_VQADD_qr_u32:
758     case MVE_VQADD_qr_u8:
759     case MVE_VQADDs16:
760     case MVE_VQADDs32:
761     case MVE_VQADDs8:
762     case MVE_VQADDu16:
763     case MVE_VQADDu32:
764     case MVE_VQADDu8:
765     case MVE_VQDMULH_qr_s16:
766     case MVE_VQDMULH_qr_s32:
767     case MVE_VQDMULH_qr_s8:
768     case MVE_VQDMULHi16:
769     case MVE_VQDMULHi32:
770     case MVE_VQDMULHi8:
771     case MVE_VQDMULL_qr_s16bh:
772     case MVE_VQDMULL_qr_s16th:
773     case MVE_VQDMULL_qr_s32bh:
774     case MVE_VQDMULL_qr_s32th:
775     case MVE_VQDMULLs16bh:
776     case MVE_VQDMULLs16th:
777     case MVE_VQDMULLs32bh:
778     case MVE_VQDMULLs32th:
779     case MVE_VQRDMULH_qr_s16:
780     case MVE_VQRDMULH_qr_s32:
781     case MVE_VQRDMULH_qr_s8:
782     case MVE_VQRDMULHi16:
783     case MVE_VQRDMULHi32:
784     case MVE_VQRDMULHi8:
785     case MVE_VQNEGs16:
786     case MVE_VQNEGs32:
787     case MVE_VQNEGs8:
788     case MVE_VQMOVNs16bh:
789     case MVE_VQMOVNs16th:
790     case MVE_VQMOVNs32bh:
791     case MVE_VQMOVNs32th:
792     case MVE_VQMOVNu16bh:
793     case MVE_VQMOVNu16th:
794     case MVE_VQMOVNu32bh:
795     case MVE_VQMOVNu32th:
796     case MVE_VQMOVUNs16bh:
797     case MVE_VQMOVUNs16th:
798     case MVE_VQMOVUNs32bh:
799     case MVE_VQMOVUNs32th:
800     case MVE_VQRSHL_by_vecs16:
801     case MVE_VQRSHL_by_vecs32:
802     case MVE_VQRSHL_by_vecs8:
803     case MVE_VQRSHL_by_vecu16:
804     case MVE_VQRSHL_by_vecu32:
805     case MVE_VQRSHL_by_vecu8:
806     case MVE_VQRSHL_qrs16:
807     case MVE_VQRSHL_qrs32:
808     case MVE_VQRSHL_qrs8:
809     case MVE_VQRSHL_qru16:
810     case MVE_VQRSHL_qru8:
811     case MVE_VQRSHL_qru32:
812     case MVE_VQSHLU_imms16:
813     case MVE_VQSHLU_imms32:
814     case MVE_VQSHLU_imms8:
815     case MVE_VQSHLimms16:
816     case MVE_VQSHLimms32:
817     case MVE_VQSHLimms8:
818     case MVE_VQSHLimmu16:
819     case MVE_VQSHLimmu32:
820     case MVE_VQSHLimmu8:
821     case MVE_VQSHL_by_vecs16:
822     case MVE_VQSHL_by_vecs32:
823     case MVE_VQSHL_by_vecs8:
824     case MVE_VQSHL_by_vecu16:
825     case MVE_VQSHL_by_vecu32:
826     case MVE_VQSHL_by_vecu8:
827     case MVE_VQSHL_qrs16:
828     case MVE_VQSHL_qrs32:
829     case MVE_VQSHL_qrs8:
830     case MVE_VQSHL_qru16:
831     case MVE_VQSHL_qru32:
832     case MVE_VQSHL_qru8:
833     case MVE_VQRSHRNbhs16:
834     case MVE_VQRSHRNbhs32:
835     case MVE_VQRSHRNbhu16:
836     case MVE_VQRSHRNbhu32:
837     case MVE_VQRSHRNths16:
838     case MVE_VQRSHRNths32:
839     case MVE_VQRSHRNthu16:
840     case MVE_VQRSHRNthu32:
841     case MVE_VQRSHRUNs16bh:
842     case MVE_VQRSHRUNs16th:
843     case MVE_VQRSHRUNs32bh:
844     case MVE_VQRSHRUNs32th:
845     case MVE_VQSHRNbhs16:
846     case MVE_VQSHRNbhs32:
847     case MVE_VQSHRNbhu16:
848     case MVE_VQSHRNbhu32:
849     case MVE_VQSHRNths16:
850     case MVE_VQSHRNths32:
851     case MVE_VQSHRNthu16:
852     case MVE_VQSHRNthu32:
853     case MVE_VQSHRUNs16bh:
854     case MVE_VQSHRUNs16th:
855     case MVE_VQSHRUNs32bh:
856     case MVE_VQSHRUNs32th:
857     case MVE_VQSUB_qr_s16:
858     case MVE_VQSUB_qr_s32:
859     case MVE_VQSUB_qr_s8:
860     case MVE_VQSUB_qr_u16:
861     case MVE_VQSUB_qr_u32:
862     case MVE_VQSUB_qr_u8:
863     case MVE_VQSUBs16:
864     case MVE_VQSUBs32:
865     case MVE_VQSUBs8:
866     case MVE_VQSUBu16:
867     case MVE_VQSUBu32:
868     case MVE_VQSUBu8:
869     case MVE_VRHADDs16:
870     case MVE_VRHADDs32:
871     case MVE_VRHADDs8:
872     case MVE_VRHADDu16:
873     case MVE_VRHADDu32:
874     case MVE_VRHADDu8:
875     case MVE_VRINTf16A:
876     case MVE_VRINTf16M:
877     case MVE_VRINTf16N:
878     case MVE_VRINTf16P:
879     case MVE_VRINTf16X:
880     case MVE_VRINTf16Z:
881     case MVE_VRINTf32A:
882     case MVE_VRINTf32M:
883     case MVE_VRINTf32N:
884     case MVE_VRINTf32P:
885     case MVE_VRINTf32X:
886     case MVE_VRINTf32Z:
887     case MVE_VRSHL_by_vecs16:
888     case MVE_VRSHL_by_vecs32:
889     case MVE_VRSHL_by_vecs8:
890     case MVE_VRSHL_by_vecu16:
891     case MVE_VRSHL_by_vecu32:
892     case MVE_VRSHL_by_vecu8:
893     case MVE_VRSHL_qrs16:
894     case MVE_VRSHL_qrs32:
895     case MVE_VRSHL_qrs8:
896     case MVE_VRSHL_qru16:
897     case MVE_VRSHL_qru32:
898     case MVE_VRSHL_qru8:
899     case MVE_VRSHR_imms16:
900     case MVE_VRSHR_imms32:
901     case MVE_VRSHR_imms8:
902     case MVE_VRSHR_immu16:
903     case MVE_VRSHR_immu32:
904     case MVE_VRSHR_immu8:
905     case MVE_VRSHRNi16bh:
906     case MVE_VRSHRNi16th:
907     case MVE_VRSHRNi32bh:
908     case MVE_VRSHRNi32th:
909     case MVE_VSHL_by_vecs16:
910     case MVE_VSHL_by_vecs32:
911     case MVE_VSHL_by_vecs8:
912     case MVE_VSHL_by_vecu16:
913     case MVE_VSHL_by_vecu32:
914     case MVE_VSHL_by_vecu8:
915     case MVE_VSHL_immi16:
916     case MVE_VSHL_immi32:
917     case MVE_VSHL_immi8:
918     case MVE_VSHL_qrs16:
919     case MVE_VSHL_qrs32:
920     case MVE_VSHL_qrs8:
921     case MVE_VSHL_qru16:
922     case MVE_VSHL_qru32:
923     case MVE_VSHL_qru8:
924     case MVE_VSHR_imms16:
925     case MVE_VSHR_imms32:
926     case MVE_VSHR_imms8:
927     case MVE_VSHR_immu16:
928     case MVE_VSHR_immu32:
929     case MVE_VSHR_immu8:
930     case MVE_VSHRNi16bh:
931     case MVE_VSHRNi16th:
932     case MVE_VSHRNi32bh:
933     case MVE_VSHRNi32th:
934     case MVE_VSLIimm16:
935     case MVE_VSLIimm32:
936     case MVE_VSLIimm8:
937     case MVE_VSRIimm16:
938     case MVE_VSRIimm32:
939     case MVE_VSRIimm8:
940     case MVE_VSTRB16:
941     case MVE_VSTRB16_post:
942     case MVE_VSTRB16_pre:
943     case MVE_VSTRB16_rq:
944     case MVE_VSTRB32:
945     case MVE_VSTRB32_post:
946     case MVE_VSTRB32_pre:
947     case MVE_VSTRB32_rq:
948     case MVE_VSTRB8_rq:
949     case MVE_VSTRBU8:
950     case MVE_VSTRBU8_post:
951     case MVE_VSTRBU8_pre:
952     case MVE_VSTRD64_qi:
953     case MVE_VSTRD64_qi_pre:
954     case MVE_VSTRD64_rq:
955     case MVE_VSTRD64_rq_u:
956     case MVE_VSTRH16_rq:
957     case MVE_VSTRH16_rq_u:
958     case MVE_VSTRH32:
959     case MVE_VSTRH32_post:
960     case MVE_VSTRH32_pre:
961     case MVE_VSTRH32_rq:
962     case MVE_VSTRH32_rq_u:
963     case MVE_VSTRHU16:
964     case MVE_VSTRHU16_post:
965     case MVE_VSTRHU16_pre:
966     case MVE_VSTRW32_qi:
967     case MVE_VSTRW32_qi_pre:
968     case MVE_VSTRW32_rq:
969     case MVE_VSTRW32_rq_u:
970     case MVE_VSTRWU32:
971     case MVE_VSTRWU32_post:
972     case MVE_VSTRWU32_pre:
973     case MVE_VSUB_qr_f16:
974     case MVE_VSUB_qr_f32:
975     case MVE_VSUB_qr_i16:
976     case MVE_VSUB_qr_i32:
977     case MVE_VSUB_qr_i8:
978     case MVE_VSUBf16:
979     case MVE_VSUBf32:
980     case MVE_VSUBi16:
981     case MVE_VSUBi32:
982     case MVE_VSUBi8:
983     case VLDR_P0_off:
984     case VLDR_P0_post:
985     case VLDR_P0_pre:
986     case VLDR_VPR_off:
987     case VLDR_VPR_post:
988     case VLDR_VPR_pre:
989     case VSTR_P0_off:
990     case VSTR_P0_post:
991     case VSTR_P0_pre:
992     case VSTR_VPR_off:
993     case VSTR_VPR_post:
994     case VSTR_VPR_pre:
995     case VMRS_P0:
996     case VMRS_VPR:
997       return true;
998     }
999   };
1000 
1001   LLVMInitializeARMTargetInfo();
1002   LLVMInitializeARMTarget();
1003   LLVMInitializeARMTargetMC();
1004 
1005   auto TT(Triple::normalize("thumbv8.1m.main-none-none-eabi"));
1006   std::string Error;
1007   const Target *T = TargetRegistry::lookupTarget(TT, Error);
1008   if (!T) {
1009     dbgs() << Error;
1010     return;
1011   }
1012 
1013   TargetOptions Options;
1014   auto TM = std::unique_ptr<LLVMTargetMachine>(
1015     static_cast<LLVMTargetMachine*>(
1016       T->createTargetMachine(TT, "generic", "", Options, None, None,
1017                              CodeGenOpt::Default)));
1018   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
1019                   std::string(TM->getTargetFeatureString()),
1020                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
1021 
1022   auto MII = TM->getMCInstrInfo();
1023   for (unsigned i = 0; i < ARM::INSTRUCTION_LIST_END; ++i) {
1024     uint64_t Flags = MII->get(i).TSFlags;
1025     if ((Flags & ARMII::DomainMask) != ARMII::DomainMVE)
1026       continue;
1027     bool Valid = (Flags & ARMII::ValidForTailPredication) != 0;
1028     ASSERT_EQ(IsValidTPOpcode(i), Valid)
1029               << MII->getName(i)
1030               << ": mismatched expectation for tail-predicated safety\n";
1031   }
1032 }
1033 
1034 TEST(MachineInstr, HasSideEffects) {
1035   using namespace ARM;
1036   std::set<unsigned> UnpredictableOpcodes = {
1037       // MVE Instructions
1038       MVE_VCTP8,
1039       MVE_VCTP16,
1040       MVE_VCTP32,
1041       MVE_VCTP64,
1042       MVE_VPST,
1043       MVE_VPTv16i8,
1044       MVE_VPTv8i16,
1045       MVE_VPTv4i32,
1046       MVE_VPTv16i8r,
1047       MVE_VPTv8i16r,
1048       MVE_VPTv4i32r,
1049       MVE_VPTv16s8,
1050       MVE_VPTv8s16,
1051       MVE_VPTv4s32,
1052       MVE_VPTv16s8r,
1053       MVE_VPTv8s16r,
1054       MVE_VPTv4s32r,
1055       MVE_VPTv16u8,
1056       MVE_VPTv8u16,
1057       MVE_VPTv4u32,
1058       MVE_VPTv16u8r,
1059       MVE_VPTv8u16r,
1060       MVE_VPTv4u32r,
1061       MVE_VPTv8f16,
1062       MVE_VPTv4f32,
1063       MVE_VPTv8f16r,
1064       MVE_VPTv4f32r,
1065       MVE_VADC,
1066       MVE_VADCI,
1067       MVE_VSBC,
1068       MVE_VSBCI,
1069       MVE_VSHLC,
1070       // FP Instructions
1071       FLDMXIA,
1072       FLDMXDB_UPD,
1073       FLDMXIA_UPD,
1074       FSTMXDB_UPD,
1075       FSTMXIA,
1076       FSTMXIA_UPD,
1077       VLDR_FPCXTNS_off,
1078       VLDR_FPCXTNS_off,
1079       VLDR_FPCXTNS_post,
1080       VLDR_FPCXTNS_pre,
1081       VLDR_FPCXTS_off,
1082       VLDR_FPCXTS_post,
1083       VLDR_FPCXTS_pre,
1084       VLDR_FPSCR_NZCVQC_off,
1085       VLDR_FPSCR_NZCVQC_post,
1086       VLDR_FPSCR_NZCVQC_pre,
1087       VLDR_FPSCR_off,
1088       VLDR_FPSCR_post,
1089       VLDR_FPSCR_pre,
1090       VLDR_P0_off,
1091       VLDR_P0_post,
1092       VLDR_P0_pre,
1093       VLDR_VPR_off,
1094       VLDR_VPR_post,
1095       VLDR_VPR_pre,
1096       VLLDM,
1097       VLSTM,
1098       VMRS,
1099       VMRS_FPCXTNS,
1100       VMRS_FPCXTS,
1101       VMRS_FPEXC,
1102       VMRS_FPINST,
1103       VMRS_FPINST2,
1104       VMRS_FPSCR_NZCVQC,
1105       VMRS_FPSID,
1106       VMRS_MVFR0,
1107       VMRS_MVFR1,
1108       VMRS_MVFR2,
1109       VMRS_P0,
1110       VMRS_VPR,
1111       VMSR,
1112       VMSR_FPCXTNS,
1113       VMSR_FPCXTS,
1114       VMSR_FPEXC,
1115       VMSR_FPINST,
1116       VMSR_FPINST2,
1117       VMSR_FPSCR_NZCVQC,
1118       VMSR_FPSID,
1119       VMSR_P0,
1120       VMSR_VPR,
1121       VSCCLRMD,
1122       VSCCLRMS,
1123       VSTR_FPCXTNS_off,
1124       VSTR_FPCXTNS_post,
1125       VSTR_FPCXTNS_pre,
1126       VSTR_FPCXTS_off,
1127       VSTR_FPCXTS_post,
1128       VSTR_FPCXTS_pre,
1129       VSTR_FPSCR_NZCVQC_off,
1130       VSTR_FPSCR_NZCVQC_post,
1131       VSTR_FPSCR_NZCVQC_pre,
1132       VSTR_FPSCR_off,
1133       VSTR_FPSCR_post,
1134       VSTR_FPSCR_pre,
1135       VSTR_P0_off,
1136       VSTR_P0_post,
1137       VSTR_P0_pre,
1138       VSTR_VPR_off,
1139       VSTR_VPR_post,
1140       VSTR_VPR_pre,
1141   };
1142 
1143   LLVMInitializeARMTargetInfo();
1144   LLVMInitializeARMTarget();
1145   LLVMInitializeARMTargetMC();
1146 
1147   auto TT(Triple::normalize("thumbv8.1m.main-none-none-eabi"));
1148   std::string Error;
1149   const Target *T = TargetRegistry::lookupTarget(TT, Error);
1150   if (!T) {
1151     dbgs() << Error;
1152     return;
1153   }
1154 
1155   TargetOptions Options;
1156   auto TM = std::unique_ptr<LLVMTargetMachine>(
1157       static_cast<LLVMTargetMachine *>(T->createTargetMachine(
1158           TT, "generic", "", Options, None, None, CodeGenOpt::Default)));
1159   ARMSubtarget ST(TM->getTargetTriple(), std::string(TM->getTargetCPU()),
1160                   std::string(TM->getTargetFeatureString()),
1161                   *static_cast<const ARMBaseTargetMachine *>(TM.get()), false);
1162   const ARMBaseInstrInfo *TII = ST.getInstrInfo();
1163   auto MII = TM->getMCInstrInfo();
1164 
1165   for (unsigned Op = 0; Op < ARM::INSTRUCTION_LIST_END; ++Op) {
1166     const MCInstrDesc &Desc = TII->get(Op);
1167     if ((Desc.TSFlags &
1168          (ARMII::DomainMVE | ARMII::DomainVFP | ARMII::DomainNEONA8)) == 0)
1169       continue;
1170     if (UnpredictableOpcodes.count(Op))
1171       continue;
1172 
1173     ASSERT_FALSE(Desc.hasUnmodeledSideEffects())
1174         << MII->getName(Op) << " has unexpected side effects";
1175   }
1176 }
1177