1 //===-- MachineVerifier.cpp - Machine Code Verifier -----------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // Pass to verify generated machine code. The following is checked: 11 // 12 // Operand counts: All explicit operands must be present. 13 // 14 // Register classes: All physical and virtual register operands must be 15 // compatible with the register class required by the instruction descriptor. 16 // 17 // Register live intervals: Registers must be defined only once, and must be 18 // defined before use. 19 // 20 // The machine code verifier is enabled from LLVMTargetMachine.cpp with the 21 // command-line option -verify-machineinstrs, or by defining the environment 22 // variable LLVM_VERIFY_MACHINEINSTRS to the name of a file that will receive 23 // the verifier errors. 24 //===----------------------------------------------------------------------===// 25 26 #include "llvm/CodeGen/Passes.h" 27 #include "llvm/ADT/DenseSet.h" 28 #include "llvm/ADT/DepthFirstIterator.h" 29 #include "llvm/ADT/SetOperations.h" 30 #include "llvm/ADT/SmallVector.h" 31 #include "llvm/Analysis/EHPersonalities.h" 32 #include "llvm/CodeGen/LiveIntervalAnalysis.h" 33 #include "llvm/CodeGen/LiveStackAnalysis.h" 34 #include "llvm/CodeGen/LiveVariables.h" 35 #include "llvm/CodeGen/MachineFrameInfo.h" 36 #include "llvm/CodeGen/MachineFunctionPass.h" 37 #include "llvm/CodeGen/MachineMemOperand.h" 38 #include "llvm/CodeGen/MachineRegisterInfo.h" 39 #include "llvm/IR/BasicBlock.h" 40 #include "llvm/IR/InlineAsm.h" 41 #include "llvm/IR/Instructions.h" 42 #include "llvm/MC/MCAsmInfo.h" 43 #include "llvm/Support/Debug.h" 44 #include "llvm/Support/ErrorHandling.h" 45 #include "llvm/Support/FileSystem.h" 46 #include "llvm/Support/raw_ostream.h" 47 #include "llvm/Target/TargetInstrInfo.h" 48 #include "llvm/Target/TargetMachine.h" 49 #include "llvm/Target/TargetRegisterInfo.h" 50 #include "llvm/Target/TargetSubtargetInfo.h" 51 using namespace llvm; 52 53 namespace { 54 struct MachineVerifier { 55 56 MachineVerifier(Pass *pass, const char *b) : 57 PASS(pass), 58 Banner(b) 59 {} 60 61 unsigned verify(MachineFunction &MF); 62 63 Pass *const PASS; 64 const char *Banner; 65 const MachineFunction *MF; 66 const TargetMachine *TM; 67 const TargetInstrInfo *TII; 68 const TargetRegisterInfo *TRI; 69 const MachineRegisterInfo *MRI; 70 71 unsigned foundErrors; 72 73 // Avoid querying the MachineFunctionProperties for each operand. 74 bool isFunctionRegBankSelected; 75 bool isFunctionSelected; 76 77 typedef SmallVector<unsigned, 16> RegVector; 78 typedef SmallVector<const uint32_t*, 4> RegMaskVector; 79 typedef DenseSet<unsigned> RegSet; 80 typedef DenseMap<unsigned, const MachineInstr*> RegMap; 81 typedef SmallPtrSet<const MachineBasicBlock*, 8> BlockSet; 82 83 const MachineInstr *FirstTerminator; 84 BlockSet FunctionBlocks; 85 86 BitVector regsReserved; 87 RegSet regsLive; 88 RegVector regsDefined, regsDead, regsKilled; 89 RegMaskVector regMasks; 90 RegSet regsLiveInButUnused; 91 92 SlotIndex lastIndex; 93 94 // Add Reg and any sub-registers to RV 95 void addRegWithSubRegs(RegVector &RV, unsigned Reg) { 96 RV.push_back(Reg); 97 if (TargetRegisterInfo::isPhysicalRegister(Reg)) 98 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) 99 RV.push_back(*SubRegs); 100 } 101 102 struct BBInfo { 103 // Is this MBB reachable from the MF entry point? 104 bool reachable; 105 106 // Vregs that must be live in because they are used without being 107 // defined. Map value is the user. 108 RegMap vregsLiveIn; 109 110 // Regs killed in MBB. They may be defined again, and will then be in both 111 // regsKilled and regsLiveOut. 112 RegSet regsKilled; 113 114 // Regs defined in MBB and live out. Note that vregs passing through may 115 // be live out without being mentioned here. 116 RegSet regsLiveOut; 117 118 // Vregs that pass through MBB untouched. This set is disjoint from 119 // regsKilled and regsLiveOut. 120 RegSet vregsPassed; 121 122 // Vregs that must pass through MBB because they are needed by a successor 123 // block. This set is disjoint from regsLiveOut. 124 RegSet vregsRequired; 125 126 // Set versions of block's predecessor and successor lists. 127 BlockSet Preds, Succs; 128 129 BBInfo() : reachable(false) {} 130 131 // Add register to vregsPassed if it belongs there. Return true if 132 // anything changed. 133 bool addPassed(unsigned Reg) { 134 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 135 return false; 136 if (regsKilled.count(Reg) || regsLiveOut.count(Reg)) 137 return false; 138 return vregsPassed.insert(Reg).second; 139 } 140 141 // Same for a full set. 142 bool addPassed(const RegSet &RS) { 143 bool changed = false; 144 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I) 145 if (addPassed(*I)) 146 changed = true; 147 return changed; 148 } 149 150 // Add register to vregsRequired if it belongs there. Return true if 151 // anything changed. 152 bool addRequired(unsigned Reg) { 153 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 154 return false; 155 if (regsLiveOut.count(Reg)) 156 return false; 157 return vregsRequired.insert(Reg).second; 158 } 159 160 // Same for a full set. 161 bool addRequired(const RegSet &RS) { 162 bool changed = false; 163 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I) 164 if (addRequired(*I)) 165 changed = true; 166 return changed; 167 } 168 169 // Same for a full map. 170 bool addRequired(const RegMap &RM) { 171 bool changed = false; 172 for (RegMap::const_iterator I = RM.begin(), E = RM.end(); I != E; ++I) 173 if (addRequired(I->first)) 174 changed = true; 175 return changed; 176 } 177 178 // Live-out registers are either in regsLiveOut or vregsPassed. 179 bool isLiveOut(unsigned Reg) const { 180 return regsLiveOut.count(Reg) || vregsPassed.count(Reg); 181 } 182 }; 183 184 // Extra register info per MBB. 185 DenseMap<const MachineBasicBlock*, BBInfo> MBBInfoMap; 186 187 bool isReserved(unsigned Reg) { 188 return Reg < regsReserved.size() && regsReserved.test(Reg); 189 } 190 191 bool isAllocatable(unsigned Reg) { 192 return Reg < TRI->getNumRegs() && MRI->isAllocatable(Reg); 193 } 194 195 // Analysis information if available 196 LiveVariables *LiveVars; 197 LiveIntervals *LiveInts; 198 LiveStacks *LiveStks; 199 SlotIndexes *Indexes; 200 201 void visitMachineFunctionBefore(); 202 void visitMachineBasicBlockBefore(const MachineBasicBlock *MBB); 203 void visitMachineBundleBefore(const MachineInstr *MI); 204 void visitMachineInstrBefore(const MachineInstr *MI); 205 void visitMachineOperand(const MachineOperand *MO, unsigned MONum); 206 void visitMachineInstrAfter(const MachineInstr *MI); 207 void visitMachineBundleAfter(const MachineInstr *MI); 208 void visitMachineBasicBlockAfter(const MachineBasicBlock *MBB); 209 void visitMachineFunctionAfter(); 210 211 void report(const char *msg, const MachineFunction *MF); 212 void report(const char *msg, const MachineBasicBlock *MBB); 213 void report(const char *msg, const MachineInstr *MI); 214 void report(const char *msg, const MachineOperand *MO, unsigned MONum); 215 216 void report_context(const LiveInterval &LI) const; 217 void report_context(const LiveRange &LR, unsigned VRegUnit, 218 LaneBitmask LaneMask) const; 219 void report_context(const LiveRange::Segment &S) const; 220 void report_context(const VNInfo &VNI) const; 221 void report_context(SlotIndex Pos) const; 222 void report_context_liverange(const LiveRange &LR) const; 223 void report_context_lanemask(LaneBitmask LaneMask) const; 224 void report_context_vreg(unsigned VReg) const; 225 void report_context_vreg_regunit(unsigned VRegOrRegUnit) const; 226 227 void verifyInlineAsm(const MachineInstr *MI); 228 229 void checkLiveness(const MachineOperand *MO, unsigned MONum); 230 void checkLivenessAtUse(const MachineOperand *MO, unsigned MONum, 231 SlotIndex UseIdx, const LiveRange &LR, unsigned Reg, 232 LaneBitmask LaneMask = 0); 233 void checkLivenessAtDef(const MachineOperand *MO, unsigned MONum, 234 SlotIndex DefIdx, const LiveRange &LR, unsigned Reg, 235 LaneBitmask LaneMask = 0); 236 237 void markReachable(const MachineBasicBlock *MBB); 238 void calcRegsPassed(); 239 void checkPHIOps(const MachineBasicBlock *MBB); 240 241 void calcRegsRequired(); 242 void verifyLiveVariables(); 243 void verifyLiveIntervals(); 244 void verifyLiveInterval(const LiveInterval&); 245 void verifyLiveRangeValue(const LiveRange&, const VNInfo*, unsigned, 246 unsigned); 247 void verifyLiveRangeSegment(const LiveRange&, 248 const LiveRange::const_iterator I, unsigned, 249 unsigned); 250 void verifyLiveRange(const LiveRange&, unsigned, LaneBitmask LaneMask = 0); 251 252 void verifyStackFrame(); 253 254 void verifySlotIndexes() const; 255 void verifyProperties(const MachineFunction &MF); 256 }; 257 258 struct MachineVerifierPass : public MachineFunctionPass { 259 static char ID; // Pass ID, replacement for typeid 260 const std::string Banner; 261 262 MachineVerifierPass(const std::string &banner = nullptr) 263 : MachineFunctionPass(ID), Banner(banner) { 264 initializeMachineVerifierPassPass(*PassRegistry::getPassRegistry()); 265 } 266 267 void getAnalysisUsage(AnalysisUsage &AU) const override { 268 AU.setPreservesAll(); 269 MachineFunctionPass::getAnalysisUsage(AU); 270 } 271 272 bool runOnMachineFunction(MachineFunction &MF) override { 273 unsigned FoundErrors = MachineVerifier(this, Banner.c_str()).verify(MF); 274 if (FoundErrors) 275 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors."); 276 return false; 277 } 278 }; 279 280 } 281 282 char MachineVerifierPass::ID = 0; 283 INITIALIZE_PASS(MachineVerifierPass, "machineverifier", 284 "Verify generated machine code", false, false) 285 286 FunctionPass *llvm::createMachineVerifierPass(const std::string &Banner) { 287 return new MachineVerifierPass(Banner); 288 } 289 290 bool MachineFunction::verify(Pass *p, const char *Banner, bool AbortOnErrors) 291 const { 292 MachineFunction &MF = const_cast<MachineFunction&>(*this); 293 unsigned FoundErrors = MachineVerifier(p, Banner).verify(MF); 294 if (AbortOnErrors && FoundErrors) 295 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors."); 296 return FoundErrors == 0; 297 } 298 299 void MachineVerifier::verifySlotIndexes() const { 300 if (Indexes == nullptr) 301 return; 302 303 // Ensure the IdxMBB list is sorted by slot indexes. 304 SlotIndex Last; 305 for (SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin(), 306 E = Indexes->MBBIndexEnd(); I != E; ++I) { 307 assert(!Last.isValid() || I->first > Last); 308 Last = I->first; 309 } 310 } 311 312 void MachineVerifier::verifyProperties(const MachineFunction &MF) { 313 // If a pass has introduced virtual registers without clearing the 314 // NoVRegs property (or set it without allocating the vregs) 315 // then report an error. 316 if (MF.getProperties().hasProperty( 317 MachineFunctionProperties::Property::NoVRegs) && 318 MRI->getNumVirtRegs()) 319 report("Function has NoVRegs property but there are VReg operands", &MF); 320 } 321 322 unsigned MachineVerifier::verify(MachineFunction &MF) { 323 foundErrors = 0; 324 325 this->MF = &MF; 326 TM = &MF.getTarget(); 327 TII = MF.getSubtarget().getInstrInfo(); 328 TRI = MF.getSubtarget().getRegisterInfo(); 329 MRI = &MF.getRegInfo(); 330 331 isFunctionRegBankSelected = MF.getProperties().hasProperty( 332 MachineFunctionProperties::Property::RegBankSelected); 333 isFunctionSelected = MF.getProperties().hasProperty( 334 MachineFunctionProperties::Property::Selected); 335 336 LiveVars = nullptr; 337 LiveInts = nullptr; 338 LiveStks = nullptr; 339 Indexes = nullptr; 340 if (PASS) { 341 LiveInts = PASS->getAnalysisIfAvailable<LiveIntervals>(); 342 // We don't want to verify LiveVariables if LiveIntervals is available. 343 if (!LiveInts) 344 LiveVars = PASS->getAnalysisIfAvailable<LiveVariables>(); 345 LiveStks = PASS->getAnalysisIfAvailable<LiveStacks>(); 346 Indexes = PASS->getAnalysisIfAvailable<SlotIndexes>(); 347 } 348 349 verifySlotIndexes(); 350 351 verifyProperties(MF); 352 353 visitMachineFunctionBefore(); 354 for (MachineFunction::const_iterator MFI = MF.begin(), MFE = MF.end(); 355 MFI!=MFE; ++MFI) { 356 visitMachineBasicBlockBefore(&*MFI); 357 // Keep track of the current bundle header. 358 const MachineInstr *CurBundle = nullptr; 359 // Do we expect the next instruction to be part of the same bundle? 360 bool InBundle = false; 361 362 for (MachineBasicBlock::const_instr_iterator MBBI = MFI->instr_begin(), 363 MBBE = MFI->instr_end(); MBBI != MBBE; ++MBBI) { 364 if (MBBI->getParent() != &*MFI) { 365 report("Bad instruction parent pointer", &*MFI); 366 errs() << "Instruction: " << *MBBI; 367 continue; 368 } 369 370 // Check for consistent bundle flags. 371 if (InBundle && !MBBI->isBundledWithPred()) 372 report("Missing BundledPred flag, " 373 "BundledSucc was set on predecessor", 374 &*MBBI); 375 if (!InBundle && MBBI->isBundledWithPred()) 376 report("BundledPred flag is set, " 377 "but BundledSucc not set on predecessor", 378 &*MBBI); 379 380 // Is this a bundle header? 381 if (!MBBI->isInsideBundle()) { 382 if (CurBundle) 383 visitMachineBundleAfter(CurBundle); 384 CurBundle = &*MBBI; 385 visitMachineBundleBefore(CurBundle); 386 } else if (!CurBundle) 387 report("No bundle header", &*MBBI); 388 visitMachineInstrBefore(&*MBBI); 389 for (unsigned I = 0, E = MBBI->getNumOperands(); I != E; ++I) { 390 const MachineInstr &MI = *MBBI; 391 const MachineOperand &Op = MI.getOperand(I); 392 if (Op.getParent() != &MI) { 393 // Make sure to use correct addOperand / RemoveOperand / ChangeTo 394 // functions when replacing operands of a MachineInstr. 395 report("Instruction has operand with wrong parent set", &MI); 396 } 397 398 visitMachineOperand(&Op, I); 399 } 400 401 visitMachineInstrAfter(&*MBBI); 402 403 // Was this the last bundled instruction? 404 InBundle = MBBI->isBundledWithSucc(); 405 } 406 if (CurBundle) 407 visitMachineBundleAfter(CurBundle); 408 if (InBundle) 409 report("BundledSucc flag set on last instruction in block", &MFI->back()); 410 visitMachineBasicBlockAfter(&*MFI); 411 } 412 visitMachineFunctionAfter(); 413 414 // Clean up. 415 regsLive.clear(); 416 regsDefined.clear(); 417 regsDead.clear(); 418 regsKilled.clear(); 419 regMasks.clear(); 420 regsLiveInButUnused.clear(); 421 MBBInfoMap.clear(); 422 423 return foundErrors; 424 } 425 426 void MachineVerifier::report(const char *msg, const MachineFunction *MF) { 427 assert(MF); 428 errs() << '\n'; 429 if (!foundErrors++) { 430 if (Banner) 431 errs() << "# " << Banner << '\n'; 432 if (LiveInts != nullptr) 433 LiveInts->print(errs()); 434 else 435 MF->print(errs(), Indexes); 436 } 437 errs() << "*** Bad machine code: " << msg << " ***\n" 438 << "- function: " << MF->getName() << "\n"; 439 } 440 441 void MachineVerifier::report(const char *msg, const MachineBasicBlock *MBB) { 442 assert(MBB); 443 report(msg, MBB->getParent()); 444 errs() << "- basic block: BB#" << MBB->getNumber() 445 << ' ' << MBB->getName() 446 << " (" << (const void*)MBB << ')'; 447 if (Indexes) 448 errs() << " [" << Indexes->getMBBStartIdx(MBB) 449 << ';' << Indexes->getMBBEndIdx(MBB) << ')'; 450 errs() << '\n'; 451 } 452 453 void MachineVerifier::report(const char *msg, const MachineInstr *MI) { 454 assert(MI); 455 report(msg, MI->getParent()); 456 errs() << "- instruction: "; 457 if (Indexes && Indexes->hasIndex(*MI)) 458 errs() << Indexes->getInstructionIndex(*MI) << '\t'; 459 MI->print(errs(), /*SkipOpers=*/true); 460 errs() << '\n'; 461 } 462 463 void MachineVerifier::report(const char *msg, 464 const MachineOperand *MO, unsigned MONum) { 465 assert(MO); 466 report(msg, MO->getParent()); 467 errs() << "- operand " << MONum << ": "; 468 MO->print(errs(), TRI); 469 errs() << "\n"; 470 } 471 472 void MachineVerifier::report_context(SlotIndex Pos) const { 473 errs() << "- at: " << Pos << '\n'; 474 } 475 476 void MachineVerifier::report_context(const LiveInterval &LI) const { 477 errs() << "- interval: " << LI << '\n'; 478 } 479 480 void MachineVerifier::report_context(const LiveRange &LR, unsigned VRegUnit, 481 LaneBitmask LaneMask) const { 482 report_context_liverange(LR); 483 report_context_vreg_regunit(VRegUnit); 484 if (LaneMask != 0) 485 report_context_lanemask(LaneMask); 486 } 487 488 void MachineVerifier::report_context(const LiveRange::Segment &S) const { 489 errs() << "- segment: " << S << '\n'; 490 } 491 492 void MachineVerifier::report_context(const VNInfo &VNI) const { 493 errs() << "- ValNo: " << VNI.id << " (def " << VNI.def << ")\n"; 494 } 495 496 void MachineVerifier::report_context_liverange(const LiveRange &LR) const { 497 errs() << "- liverange: " << LR << '\n'; 498 } 499 500 void MachineVerifier::report_context_vreg(unsigned VReg) const { 501 errs() << "- v. register: " << PrintReg(VReg, TRI) << '\n'; 502 } 503 504 void MachineVerifier::report_context_vreg_regunit(unsigned VRegOrUnit) const { 505 if (TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) { 506 report_context_vreg(VRegOrUnit); 507 } else { 508 errs() << "- regunit: " << PrintRegUnit(VRegOrUnit, TRI) << '\n'; 509 } 510 } 511 512 void MachineVerifier::report_context_lanemask(LaneBitmask LaneMask) const { 513 errs() << "- lanemask: " << PrintLaneMask(LaneMask) << '\n'; 514 } 515 516 void MachineVerifier::markReachable(const MachineBasicBlock *MBB) { 517 BBInfo &MInfo = MBBInfoMap[MBB]; 518 if (!MInfo.reachable) { 519 MInfo.reachable = true; 520 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(), 521 SuE = MBB->succ_end(); SuI != SuE; ++SuI) 522 markReachable(*SuI); 523 } 524 } 525 526 void MachineVerifier::visitMachineFunctionBefore() { 527 lastIndex = SlotIndex(); 528 regsReserved = MRI->getReservedRegs(); 529 530 // A sub-register of a reserved register is also reserved 531 for (int Reg = regsReserved.find_first(); Reg>=0; 532 Reg = regsReserved.find_next(Reg)) { 533 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) { 534 // FIXME: This should probably be: 535 // assert(regsReserved.test(*SubRegs) && "Non-reserved sub-register"); 536 regsReserved.set(*SubRegs); 537 } 538 } 539 540 markReachable(&MF->front()); 541 542 // Build a set of the basic blocks in the function. 543 FunctionBlocks.clear(); 544 for (const auto &MBB : *MF) { 545 FunctionBlocks.insert(&MBB); 546 BBInfo &MInfo = MBBInfoMap[&MBB]; 547 548 MInfo.Preds.insert(MBB.pred_begin(), MBB.pred_end()); 549 if (MInfo.Preds.size() != MBB.pred_size()) 550 report("MBB has duplicate entries in its predecessor list.", &MBB); 551 552 MInfo.Succs.insert(MBB.succ_begin(), MBB.succ_end()); 553 if (MInfo.Succs.size() != MBB.succ_size()) 554 report("MBB has duplicate entries in its successor list.", &MBB); 555 } 556 557 // Check that the register use lists are sane. 558 MRI->verifyUseLists(); 559 560 verifyStackFrame(); 561 } 562 563 // Does iterator point to a and b as the first two elements? 564 static bool matchPair(MachineBasicBlock::const_succ_iterator i, 565 const MachineBasicBlock *a, const MachineBasicBlock *b) { 566 if (*i == a) 567 return *++i == b; 568 if (*i == b) 569 return *++i == a; 570 return false; 571 } 572 573 void 574 MachineVerifier::visitMachineBasicBlockBefore(const MachineBasicBlock *MBB) { 575 FirstTerminator = nullptr; 576 577 if (!MF->getProperties().hasProperty( 578 MachineFunctionProperties::Property::NoPHIs)) { 579 // If this block has allocatable physical registers live-in, check that 580 // it is an entry block or landing pad. 581 for (const auto &LI : MBB->liveins()) { 582 if (isAllocatable(LI.PhysReg) && !MBB->isEHPad() && 583 MBB->getIterator() != MBB->getParent()->begin()) { 584 report("MBB has allocable live-in, but isn't entry or landing-pad.", MBB); 585 } 586 } 587 } 588 589 // Count the number of landing pad successors. 590 SmallPtrSet<MachineBasicBlock*, 4> LandingPadSuccs; 591 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(), 592 E = MBB->succ_end(); I != E; ++I) { 593 if ((*I)->isEHPad()) 594 LandingPadSuccs.insert(*I); 595 if (!FunctionBlocks.count(*I)) 596 report("MBB has successor that isn't part of the function.", MBB); 597 if (!MBBInfoMap[*I].Preds.count(MBB)) { 598 report("Inconsistent CFG", MBB); 599 errs() << "MBB is not in the predecessor list of the successor BB#" 600 << (*I)->getNumber() << ".\n"; 601 } 602 } 603 604 // Check the predecessor list. 605 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(), 606 E = MBB->pred_end(); I != E; ++I) { 607 if (!FunctionBlocks.count(*I)) 608 report("MBB has predecessor that isn't part of the function.", MBB); 609 if (!MBBInfoMap[*I].Succs.count(MBB)) { 610 report("Inconsistent CFG", MBB); 611 errs() << "MBB is not in the successor list of the predecessor BB#" 612 << (*I)->getNumber() << ".\n"; 613 } 614 } 615 616 const MCAsmInfo *AsmInfo = TM->getMCAsmInfo(); 617 const BasicBlock *BB = MBB->getBasicBlock(); 618 const Function *Fn = MF->getFunction(); 619 if (LandingPadSuccs.size() > 1 && 620 !(AsmInfo && 621 AsmInfo->getExceptionHandlingType() == ExceptionHandling::SjLj && 622 BB && isa<SwitchInst>(BB->getTerminator())) && 623 !isFuncletEHPersonality(classifyEHPersonality(Fn->getPersonalityFn()))) 624 report("MBB has more than one landing pad successor", MBB); 625 626 // Call AnalyzeBranch. If it succeeds, there several more conditions to check. 627 MachineBasicBlock *TBB = nullptr, *FBB = nullptr; 628 SmallVector<MachineOperand, 4> Cond; 629 if (!TII->analyzeBranch(*const_cast<MachineBasicBlock *>(MBB), TBB, FBB, 630 Cond)) { 631 // Ok, AnalyzeBranch thinks it knows what's going on with this block. Let's 632 // check whether its answers match up with reality. 633 if (!TBB && !FBB) { 634 // Block falls through to its successor. 635 MachineFunction::const_iterator MBBI = MBB->getIterator(); 636 ++MBBI; 637 if (MBBI == MF->end()) { 638 // It's possible that the block legitimately ends with a noreturn 639 // call or an unreachable, in which case it won't actually fall 640 // out the bottom of the function. 641 } else if (MBB->succ_size() == LandingPadSuccs.size()) { 642 // It's possible that the block legitimately ends with a noreturn 643 // call or an unreachable, in which case it won't actuall fall 644 // out of the block. 645 } else if (MBB->succ_size() != 1+LandingPadSuccs.size()) { 646 report("MBB exits via unconditional fall-through but doesn't have " 647 "exactly one CFG successor!", MBB); 648 } else if (!MBB->isSuccessor(&*MBBI)) { 649 report("MBB exits via unconditional fall-through but its successor " 650 "differs from its CFG successor!", MBB); 651 } 652 if (!MBB->empty() && MBB->back().isBarrier() && 653 !TII->isPredicated(MBB->back())) { 654 report("MBB exits via unconditional fall-through but ends with a " 655 "barrier instruction!", MBB); 656 } 657 if (!Cond.empty()) { 658 report("MBB exits via unconditional fall-through but has a condition!", 659 MBB); 660 } 661 } else if (TBB && !FBB && Cond.empty()) { 662 // Block unconditionally branches somewhere. 663 // If the block has exactly one successor, that happens to be a 664 // landingpad, accept it as valid control flow. 665 if (MBB->succ_size() != 1+LandingPadSuccs.size() && 666 (MBB->succ_size() != 1 || LandingPadSuccs.size() != 1 || 667 *MBB->succ_begin() != *LandingPadSuccs.begin())) { 668 report("MBB exits via unconditional branch but doesn't have " 669 "exactly one CFG successor!", MBB); 670 } else if (!MBB->isSuccessor(TBB)) { 671 report("MBB exits via unconditional branch but the CFG " 672 "successor doesn't match the actual successor!", MBB); 673 } 674 if (MBB->empty()) { 675 report("MBB exits via unconditional branch but doesn't contain " 676 "any instructions!", MBB); 677 } else if (!MBB->back().isBarrier()) { 678 report("MBB exits via unconditional branch but doesn't end with a " 679 "barrier instruction!", MBB); 680 } else if (!MBB->back().isTerminator()) { 681 report("MBB exits via unconditional branch but the branch isn't a " 682 "terminator instruction!", MBB); 683 } 684 } else if (TBB && !FBB && !Cond.empty()) { 685 // Block conditionally branches somewhere, otherwise falls through. 686 MachineFunction::const_iterator MBBI = MBB->getIterator(); 687 ++MBBI; 688 if (MBBI == MF->end()) { 689 report("MBB conditionally falls through out of function!", MBB); 690 } else if (MBB->succ_size() == 1) { 691 // A conditional branch with only one successor is weird, but allowed. 692 if (&*MBBI != TBB) 693 report("MBB exits via conditional branch/fall-through but only has " 694 "one CFG successor!", MBB); 695 else if (TBB != *MBB->succ_begin()) 696 report("MBB exits via conditional branch/fall-through but the CFG " 697 "successor don't match the actual successor!", MBB); 698 } else if (MBB->succ_size() != 2) { 699 report("MBB exits via conditional branch/fall-through but doesn't have " 700 "exactly two CFG successors!", MBB); 701 } else if (!matchPair(MBB->succ_begin(), TBB, &*MBBI)) { 702 report("MBB exits via conditional branch/fall-through but the CFG " 703 "successors don't match the actual successors!", MBB); 704 } 705 if (MBB->empty()) { 706 report("MBB exits via conditional branch/fall-through but doesn't " 707 "contain any instructions!", MBB); 708 } else if (MBB->back().isBarrier()) { 709 report("MBB exits via conditional branch/fall-through but ends with a " 710 "barrier instruction!", MBB); 711 } else if (!MBB->back().isTerminator()) { 712 report("MBB exits via conditional branch/fall-through but the branch " 713 "isn't a terminator instruction!", MBB); 714 } 715 } else if (TBB && FBB) { 716 // Block conditionally branches somewhere, otherwise branches 717 // somewhere else. 718 if (MBB->succ_size() == 1) { 719 // A conditional branch with only one successor is weird, but allowed. 720 if (FBB != TBB) 721 report("MBB exits via conditional branch/branch through but only has " 722 "one CFG successor!", MBB); 723 else if (TBB != *MBB->succ_begin()) 724 report("MBB exits via conditional branch/branch through but the CFG " 725 "successor don't match the actual successor!", MBB); 726 } else if (MBB->succ_size() != 2) { 727 report("MBB exits via conditional branch/branch but doesn't have " 728 "exactly two CFG successors!", MBB); 729 } else if (!matchPair(MBB->succ_begin(), TBB, FBB)) { 730 report("MBB exits via conditional branch/branch but the CFG " 731 "successors don't match the actual successors!", MBB); 732 } 733 if (MBB->empty()) { 734 report("MBB exits via conditional branch/branch but doesn't " 735 "contain any instructions!", MBB); 736 } else if (!MBB->back().isBarrier()) { 737 report("MBB exits via conditional branch/branch but doesn't end with a " 738 "barrier instruction!", MBB); 739 } else if (!MBB->back().isTerminator()) { 740 report("MBB exits via conditional branch/branch but the branch " 741 "isn't a terminator instruction!", MBB); 742 } 743 if (Cond.empty()) { 744 report("MBB exits via conditinal branch/branch but there's no " 745 "condition!", MBB); 746 } 747 } else { 748 report("AnalyzeBranch returned invalid data!", MBB); 749 } 750 } 751 752 regsLive.clear(); 753 for (const auto &LI : MBB->liveins()) { 754 if (!TargetRegisterInfo::isPhysicalRegister(LI.PhysReg)) { 755 report("MBB live-in list contains non-physical register", MBB); 756 continue; 757 } 758 for (MCSubRegIterator SubRegs(LI.PhysReg, TRI, /*IncludeSelf=*/true); 759 SubRegs.isValid(); ++SubRegs) 760 regsLive.insert(*SubRegs); 761 } 762 regsLiveInButUnused = regsLive; 763 764 const MachineFrameInfo &MFI = MF->getFrameInfo(); 765 BitVector PR = MFI.getPristineRegs(*MF); 766 for (int I = PR.find_first(); I>0; I = PR.find_next(I)) { 767 for (MCSubRegIterator SubRegs(I, TRI, /*IncludeSelf=*/true); 768 SubRegs.isValid(); ++SubRegs) 769 regsLive.insert(*SubRegs); 770 } 771 772 regsKilled.clear(); 773 regsDefined.clear(); 774 775 if (Indexes) 776 lastIndex = Indexes->getMBBStartIdx(MBB); 777 } 778 779 // This function gets called for all bundle headers, including normal 780 // stand-alone unbundled instructions. 781 void MachineVerifier::visitMachineBundleBefore(const MachineInstr *MI) { 782 if (Indexes && Indexes->hasIndex(*MI)) { 783 SlotIndex idx = Indexes->getInstructionIndex(*MI); 784 if (!(idx > lastIndex)) { 785 report("Instruction index out of order", MI); 786 errs() << "Last instruction was at " << lastIndex << '\n'; 787 } 788 lastIndex = idx; 789 } 790 791 // Ensure non-terminators don't follow terminators. 792 // Ignore predicated terminators formed by if conversion. 793 // FIXME: If conversion shouldn't need to violate this rule. 794 if (MI->isTerminator() && !TII->isPredicated(*MI)) { 795 if (!FirstTerminator) 796 FirstTerminator = MI; 797 } else if (FirstTerminator) { 798 report("Non-terminator instruction after the first terminator", MI); 799 errs() << "First terminator was:\t" << *FirstTerminator; 800 } 801 } 802 803 // The operands on an INLINEASM instruction must follow a template. 804 // Verify that the flag operands make sense. 805 void MachineVerifier::verifyInlineAsm(const MachineInstr *MI) { 806 // The first two operands on INLINEASM are the asm string and global flags. 807 if (MI->getNumOperands() < 2) { 808 report("Too few operands on inline asm", MI); 809 return; 810 } 811 if (!MI->getOperand(0).isSymbol()) 812 report("Asm string must be an external symbol", MI); 813 if (!MI->getOperand(1).isImm()) 814 report("Asm flags must be an immediate", MI); 815 // Allowed flags are Extra_HasSideEffects = 1, Extra_IsAlignStack = 2, 816 // Extra_AsmDialect = 4, Extra_MayLoad = 8, and Extra_MayStore = 16, 817 // and Extra_IsConvergent = 32. 818 if (!isUInt<6>(MI->getOperand(1).getImm())) 819 report("Unknown asm flags", &MI->getOperand(1), 1); 820 821 static_assert(InlineAsm::MIOp_FirstOperand == 2, "Asm format changed"); 822 823 unsigned OpNo = InlineAsm::MIOp_FirstOperand; 824 unsigned NumOps; 825 for (unsigned e = MI->getNumOperands(); OpNo < e; OpNo += NumOps) { 826 const MachineOperand &MO = MI->getOperand(OpNo); 827 // There may be implicit ops after the fixed operands. 828 if (!MO.isImm()) 829 break; 830 NumOps = 1 + InlineAsm::getNumOperandRegisters(MO.getImm()); 831 } 832 833 if (OpNo > MI->getNumOperands()) 834 report("Missing operands in last group", MI); 835 836 // An optional MDNode follows the groups. 837 if (OpNo < MI->getNumOperands() && MI->getOperand(OpNo).isMetadata()) 838 ++OpNo; 839 840 // All trailing operands must be implicit registers. 841 for (unsigned e = MI->getNumOperands(); OpNo < e; ++OpNo) { 842 const MachineOperand &MO = MI->getOperand(OpNo); 843 if (!MO.isReg() || !MO.isImplicit()) 844 report("Expected implicit register after groups", &MO, OpNo); 845 } 846 } 847 848 void MachineVerifier::visitMachineInstrBefore(const MachineInstr *MI) { 849 const MCInstrDesc &MCID = MI->getDesc(); 850 if (MI->getNumOperands() < MCID.getNumOperands()) { 851 report("Too few operands", MI); 852 errs() << MCID.getNumOperands() << " operands expected, but " 853 << MI->getNumOperands() << " given.\n"; 854 } 855 856 if (MI->isPHI() && MF->getProperties().hasProperty( 857 MachineFunctionProperties::Property::NoPHIs)) 858 report("Found PHI instruction with NoPHIs property set", MI); 859 860 // Check the tied operands. 861 if (MI->isInlineAsm()) 862 verifyInlineAsm(MI); 863 864 // Check the MachineMemOperands for basic consistency. 865 for (MachineInstr::mmo_iterator I = MI->memoperands_begin(), 866 E = MI->memoperands_end(); I != E; ++I) { 867 if ((*I)->isLoad() && !MI->mayLoad()) 868 report("Missing mayLoad flag", MI); 869 if ((*I)->isStore() && !MI->mayStore()) 870 report("Missing mayStore flag", MI); 871 } 872 873 // Debug values must not have a slot index. 874 // Other instructions must have one, unless they are inside a bundle. 875 if (LiveInts) { 876 bool mapped = !LiveInts->isNotInMIMap(*MI); 877 if (MI->isDebugValue()) { 878 if (mapped) 879 report("Debug instruction has a slot index", MI); 880 } else if (MI->isInsideBundle()) { 881 if (mapped) 882 report("Instruction inside bundle has a slot index", MI); 883 } else { 884 if (!mapped) 885 report("Missing slot index", MI); 886 } 887 } 888 889 // Check types. 890 const unsigned NumTypes = MI->getNumTypes(); 891 if (isPreISelGenericOpcode(MCID.getOpcode())) { 892 if (isFunctionSelected) 893 report("Unexpected generic instruction in a Selected function", MI); 894 895 if (NumTypes == 0) 896 report("Generic instruction must have a type", MI); 897 } else { 898 if (NumTypes != 0) 899 report("Non-generic instruction cannot have a type", MI); 900 } 901 902 // Generic opcodes must not have physical register operands. 903 if (isPreISelGenericOpcode(MCID.getOpcode()) && 904 MCID.getOpcode() != TargetOpcode::G_TYPE) { 905 for (auto &Op : MI->operands()) { 906 if (Op.isReg() && TargetRegisterInfo::isPhysicalRegister(Op.getReg())) 907 report("Generic instruction cannot have physical register", MI); 908 } 909 } 910 911 StringRef ErrorInfo; 912 if (!TII->verifyInstruction(*MI, ErrorInfo)) 913 report(ErrorInfo.data(), MI); 914 } 915 916 void 917 MachineVerifier::visitMachineOperand(const MachineOperand *MO, unsigned MONum) { 918 const MachineInstr *MI = MO->getParent(); 919 const MCInstrDesc &MCID = MI->getDesc(); 920 unsigned NumDefs = MCID.getNumDefs(); 921 if (MCID.getOpcode() == TargetOpcode::PATCHPOINT) 922 NumDefs = (MONum == 0 && MO->isReg()) ? NumDefs : 0; 923 924 // The first MCID.NumDefs operands must be explicit register defines 925 if (MONum < NumDefs) { 926 const MCOperandInfo &MCOI = MCID.OpInfo[MONum]; 927 if (!MO->isReg()) 928 report("Explicit definition must be a register", MO, MONum); 929 else if (!MO->isDef() && !MCOI.isOptionalDef()) 930 report("Explicit definition marked as use", MO, MONum); 931 else if (MO->isImplicit()) 932 report("Explicit definition marked as implicit", MO, MONum); 933 } else if (MONum < MCID.getNumOperands()) { 934 const MCOperandInfo &MCOI = MCID.OpInfo[MONum]; 935 // Don't check if it's the last operand in a variadic instruction. See, 936 // e.g., LDM_RET in the arm back end. 937 if (MO->isReg() && 938 !(MI->isVariadic() && MONum == MCID.getNumOperands()-1)) { 939 if (MO->isDef() && !MCOI.isOptionalDef()) 940 report("Explicit operand marked as def", MO, MONum); 941 if (MO->isImplicit()) 942 report("Explicit operand marked as implicit", MO, MONum); 943 } 944 945 int TiedTo = MCID.getOperandConstraint(MONum, MCOI::TIED_TO); 946 if (TiedTo != -1) { 947 if (!MO->isReg()) 948 report("Tied use must be a register", MO, MONum); 949 else if (!MO->isTied()) 950 report("Operand should be tied", MO, MONum); 951 else if (unsigned(TiedTo) != MI->findTiedOperandIdx(MONum)) 952 report("Tied def doesn't match MCInstrDesc", MO, MONum); 953 } else if (MO->isReg() && MO->isTied()) 954 report("Explicit operand should not be tied", MO, MONum); 955 } else { 956 // ARM adds %reg0 operands to indicate predicates. We'll allow that. 957 if (MO->isReg() && !MO->isImplicit() && !MI->isVariadic() && MO->getReg()) 958 report("Extra explicit operand on non-variadic instruction", MO, MONum); 959 } 960 961 switch (MO->getType()) { 962 case MachineOperand::MO_Register: { 963 const unsigned Reg = MO->getReg(); 964 if (!Reg) 965 return; 966 if (MRI->tracksLiveness() && !MI->isDebugValue()) 967 checkLiveness(MO, MONum); 968 969 // Verify the consistency of tied operands. 970 if (MO->isTied()) { 971 unsigned OtherIdx = MI->findTiedOperandIdx(MONum); 972 const MachineOperand &OtherMO = MI->getOperand(OtherIdx); 973 if (!OtherMO.isReg()) 974 report("Must be tied to a register", MO, MONum); 975 if (!OtherMO.isTied()) 976 report("Missing tie flags on tied operand", MO, MONum); 977 if (MI->findTiedOperandIdx(OtherIdx) != MONum) 978 report("Inconsistent tie links", MO, MONum); 979 if (MONum < MCID.getNumDefs()) { 980 if (OtherIdx < MCID.getNumOperands()) { 981 if (-1 == MCID.getOperandConstraint(OtherIdx, MCOI::TIED_TO)) 982 report("Explicit def tied to explicit use without tie constraint", 983 MO, MONum); 984 } else { 985 if (!OtherMO.isImplicit()) 986 report("Explicit def should be tied to implicit use", MO, MONum); 987 } 988 } 989 } 990 991 // Verify two-address constraints after leaving SSA form. 992 unsigned DefIdx; 993 if (!MRI->isSSA() && MO->isUse() && 994 MI->isRegTiedToDefOperand(MONum, &DefIdx) && 995 Reg != MI->getOperand(DefIdx).getReg()) 996 report("Two-address instruction operands must be identical", MO, MONum); 997 998 // Check register classes. 999 if (MONum < MCID.getNumOperands() && !MO->isImplicit()) { 1000 unsigned SubIdx = MO->getSubReg(); 1001 1002 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 1003 if (SubIdx) { 1004 report("Illegal subregister index for physical register", MO, MONum); 1005 return; 1006 } 1007 if (const TargetRegisterClass *DRC = 1008 TII->getRegClass(MCID, MONum, TRI, *MF)) { 1009 if (!DRC->contains(Reg)) { 1010 report("Illegal physical register for instruction", MO, MONum); 1011 errs() << TRI->getName(Reg) << " is not a " 1012 << TRI->getRegClassName(DRC) << " register.\n"; 1013 } 1014 } 1015 } else { 1016 // Virtual register. 1017 const TargetRegisterClass *RC = MRI->getRegClassOrNull(Reg); 1018 if (!RC) { 1019 // This is a generic virtual register. 1020 1021 // If we're post-Select, we can't have gvregs anymore. 1022 if (isFunctionSelected) { 1023 report("Generic virtual register invalid in a Selected function", 1024 MO, MONum); 1025 return; 1026 } 1027 1028 // The gvreg must have a size and it must not have a SubIdx. 1029 unsigned Size = MRI->getSize(Reg); 1030 if (!Size) { 1031 report("Generic virtual register must have a size", MO, MONum); 1032 return; 1033 } 1034 1035 const RegisterBank *RegBank = MRI->getRegBankOrNull(Reg); 1036 1037 // If we're post-RegBankSelect, the gvreg must have a bank. 1038 if (!RegBank && isFunctionRegBankSelected) { 1039 report("Generic virtual register must have a bank in a " 1040 "RegBankSelected function", 1041 MO, MONum); 1042 return; 1043 } 1044 1045 // Make sure the register fits into its register bank if any. 1046 if (RegBank && RegBank->getSize() < Size) { 1047 report("Register bank is too small for virtual register", MO, 1048 MONum); 1049 errs() << "Register bank " << RegBank->getName() << " too small(" 1050 << RegBank->getSize() << ") to fit " << Size << "-bits\n"; 1051 return; 1052 } 1053 if (SubIdx) { 1054 report("Generic virtual register does not subregister index", MO, MONum); 1055 return; 1056 } 1057 break; 1058 } 1059 if (SubIdx) { 1060 const TargetRegisterClass *SRC = 1061 TRI->getSubClassWithSubReg(RC, SubIdx); 1062 if (!SRC) { 1063 report("Invalid subregister index for virtual register", MO, MONum); 1064 errs() << "Register class " << TRI->getRegClassName(RC) 1065 << " does not support subreg index " << SubIdx << "\n"; 1066 return; 1067 } 1068 if (RC != SRC) { 1069 report("Invalid register class for subregister index", MO, MONum); 1070 errs() << "Register class " << TRI->getRegClassName(RC) 1071 << " does not fully support subreg index " << SubIdx << "\n"; 1072 return; 1073 } 1074 } 1075 if (const TargetRegisterClass *DRC = 1076 TII->getRegClass(MCID, MONum, TRI, *MF)) { 1077 if (SubIdx) { 1078 const TargetRegisterClass *SuperRC = 1079 TRI->getLargestLegalSuperClass(RC, *MF); 1080 if (!SuperRC) { 1081 report("No largest legal super class exists.", MO, MONum); 1082 return; 1083 } 1084 DRC = TRI->getMatchingSuperRegClass(SuperRC, DRC, SubIdx); 1085 if (!DRC) { 1086 report("No matching super-reg register class.", MO, MONum); 1087 return; 1088 } 1089 } 1090 if (!RC->hasSuperClassEq(DRC)) { 1091 report("Illegal virtual register for instruction", MO, MONum); 1092 errs() << "Expected a " << TRI->getRegClassName(DRC) 1093 << " register, but got a " << TRI->getRegClassName(RC) 1094 << " register\n"; 1095 } 1096 } 1097 } 1098 } 1099 break; 1100 } 1101 1102 case MachineOperand::MO_RegisterMask: 1103 regMasks.push_back(MO->getRegMask()); 1104 break; 1105 1106 case MachineOperand::MO_MachineBasicBlock: 1107 if (MI->isPHI() && !MO->getMBB()->isSuccessor(MI->getParent())) 1108 report("PHI operand is not in the CFG", MO, MONum); 1109 break; 1110 1111 case MachineOperand::MO_FrameIndex: 1112 if (LiveStks && LiveStks->hasInterval(MO->getIndex()) && 1113 LiveInts && !LiveInts->isNotInMIMap(*MI)) { 1114 int FI = MO->getIndex(); 1115 LiveInterval &LI = LiveStks->getInterval(FI); 1116 SlotIndex Idx = LiveInts->getInstructionIndex(*MI); 1117 1118 bool stores = MI->mayStore(); 1119 bool loads = MI->mayLoad(); 1120 // For a memory-to-memory move, we need to check if the frame 1121 // index is used for storing or loading, by inspecting the 1122 // memory operands. 1123 if (stores && loads) { 1124 for (auto *MMO : MI->memoperands()) { 1125 const PseudoSourceValue *PSV = MMO->getPseudoValue(); 1126 if (PSV == nullptr) continue; 1127 const FixedStackPseudoSourceValue *Value = 1128 dyn_cast<FixedStackPseudoSourceValue>(PSV); 1129 if (Value == nullptr) continue; 1130 if (Value->getFrameIndex() != FI) continue; 1131 1132 if (MMO->isStore()) 1133 loads = false; 1134 else 1135 stores = false; 1136 break; 1137 } 1138 if (loads == stores) 1139 report("Missing fixed stack memoperand.", MI); 1140 } 1141 if (loads && !LI.liveAt(Idx.getRegSlot(true))) { 1142 report("Instruction loads from dead spill slot", MO, MONum); 1143 errs() << "Live stack: " << LI << '\n'; 1144 } 1145 if (stores && !LI.liveAt(Idx.getRegSlot())) { 1146 report("Instruction stores to dead spill slot", MO, MONum); 1147 errs() << "Live stack: " << LI << '\n'; 1148 } 1149 } 1150 break; 1151 1152 default: 1153 break; 1154 } 1155 } 1156 1157 void MachineVerifier::checkLivenessAtUse(const MachineOperand *MO, 1158 unsigned MONum, SlotIndex UseIdx, const LiveRange &LR, unsigned VRegOrUnit, 1159 LaneBitmask LaneMask) { 1160 LiveQueryResult LRQ = LR.Query(UseIdx); 1161 // Check if we have a segment at the use, note however that we only need one 1162 // live subregister range, the others may be dead. 1163 if (!LRQ.valueIn() && LaneMask == 0) { 1164 report("No live segment at use", MO, MONum); 1165 report_context_liverange(LR); 1166 report_context_vreg_regunit(VRegOrUnit); 1167 report_context(UseIdx); 1168 } 1169 if (MO->isKill() && !LRQ.isKill()) { 1170 report("Live range continues after kill flag", MO, MONum); 1171 report_context_liverange(LR); 1172 report_context_vreg_regunit(VRegOrUnit); 1173 if (LaneMask != 0) 1174 report_context_lanemask(LaneMask); 1175 report_context(UseIdx); 1176 } 1177 } 1178 1179 void MachineVerifier::checkLivenessAtDef(const MachineOperand *MO, 1180 unsigned MONum, SlotIndex DefIdx, const LiveRange &LR, unsigned VRegOrUnit, 1181 LaneBitmask LaneMask) { 1182 if (const VNInfo *VNI = LR.getVNInfoAt(DefIdx)) { 1183 assert(VNI && "NULL valno is not allowed"); 1184 if (VNI->def != DefIdx) { 1185 report("Inconsistent valno->def", MO, MONum); 1186 report_context_liverange(LR); 1187 report_context_vreg_regunit(VRegOrUnit); 1188 if (LaneMask != 0) 1189 report_context_lanemask(LaneMask); 1190 report_context(*VNI); 1191 report_context(DefIdx); 1192 } 1193 } else { 1194 report("No live segment at def", MO, MONum); 1195 report_context_liverange(LR); 1196 report_context_vreg_regunit(VRegOrUnit); 1197 if (LaneMask != 0) 1198 report_context_lanemask(LaneMask); 1199 report_context(DefIdx); 1200 } 1201 // Check that, if the dead def flag is present, LiveInts agree. 1202 if (MO->isDead()) { 1203 LiveQueryResult LRQ = LR.Query(DefIdx); 1204 if (!LRQ.isDeadDef()) { 1205 // In case of physregs we can have a non-dead definition on another 1206 // operand. 1207 bool otherDef = false; 1208 if (!TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) { 1209 const MachineInstr &MI = *MO->getParent(); 1210 for (const MachineOperand &MO : MI.operands()) { 1211 if (!MO.isReg() || !MO.isDef() || MO.isDead()) 1212 continue; 1213 unsigned Reg = MO.getReg(); 1214 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) { 1215 if (*Units == VRegOrUnit) { 1216 otherDef = true; 1217 break; 1218 } 1219 } 1220 } 1221 } 1222 1223 if (!otherDef) { 1224 report("Live range continues after dead def flag", MO, MONum); 1225 report_context_liverange(LR); 1226 report_context_vreg_regunit(VRegOrUnit); 1227 if (LaneMask != 0) 1228 report_context_lanemask(LaneMask); 1229 } 1230 } 1231 } 1232 } 1233 1234 void MachineVerifier::checkLiveness(const MachineOperand *MO, unsigned MONum) { 1235 const MachineInstr *MI = MO->getParent(); 1236 const unsigned Reg = MO->getReg(); 1237 1238 // Both use and def operands can read a register. 1239 if (MO->readsReg()) { 1240 regsLiveInButUnused.erase(Reg); 1241 1242 if (MO->isKill()) 1243 addRegWithSubRegs(regsKilled, Reg); 1244 1245 // Check that LiveVars knows this kill. 1246 if (LiveVars && TargetRegisterInfo::isVirtualRegister(Reg) && 1247 MO->isKill()) { 1248 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg); 1249 if (!is_contained(VI.Kills, MI)) 1250 report("Kill missing from LiveVariables", MO, MONum); 1251 } 1252 1253 // Check LiveInts liveness and kill. 1254 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) { 1255 SlotIndex UseIdx = LiveInts->getInstructionIndex(*MI); 1256 // Check the cached regunit intervals. 1257 if (TargetRegisterInfo::isPhysicalRegister(Reg) && !isReserved(Reg)) { 1258 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) { 1259 if (const LiveRange *LR = LiveInts->getCachedRegUnit(*Units)) 1260 checkLivenessAtUse(MO, MONum, UseIdx, *LR, *Units); 1261 } 1262 } 1263 1264 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 1265 if (LiveInts->hasInterval(Reg)) { 1266 // This is a virtual register interval. 1267 const LiveInterval &LI = LiveInts->getInterval(Reg); 1268 checkLivenessAtUse(MO, MONum, UseIdx, LI, Reg); 1269 1270 if (LI.hasSubRanges() && !MO->isDef()) { 1271 unsigned SubRegIdx = MO->getSubReg(); 1272 LaneBitmask MOMask = SubRegIdx != 0 1273 ? TRI->getSubRegIndexLaneMask(SubRegIdx) 1274 : MRI->getMaxLaneMaskForVReg(Reg); 1275 LaneBitmask LiveInMask = 0; 1276 for (const LiveInterval::SubRange &SR : LI.subranges()) { 1277 if ((MOMask & SR.LaneMask) == 0) 1278 continue; 1279 checkLivenessAtUse(MO, MONum, UseIdx, SR, Reg, SR.LaneMask); 1280 LiveQueryResult LRQ = SR.Query(UseIdx); 1281 if (LRQ.valueIn()) 1282 LiveInMask |= SR.LaneMask; 1283 } 1284 // At least parts of the register has to be live at the use. 1285 if ((LiveInMask & MOMask) == 0) { 1286 report("No live subrange at use", MO, MONum); 1287 report_context(LI); 1288 report_context(UseIdx); 1289 } 1290 } 1291 } else { 1292 report("Virtual register has no live interval", MO, MONum); 1293 } 1294 } 1295 } 1296 1297 // Use of a dead register. 1298 if (!regsLive.count(Reg)) { 1299 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 1300 // Reserved registers may be used even when 'dead'. 1301 bool Bad = !isReserved(Reg); 1302 // We are fine if just any subregister has a defined value. 1303 if (Bad) { 1304 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); 1305 ++SubRegs) { 1306 if (regsLive.count(*SubRegs)) { 1307 Bad = false; 1308 break; 1309 } 1310 } 1311 } 1312 // If there is an additional implicit-use of a super register we stop 1313 // here. By definition we are fine if the super register is not 1314 // (completely) dead, if the complete super register is dead we will 1315 // get a report for its operand. 1316 if (Bad) { 1317 for (const MachineOperand &MOP : MI->uses()) { 1318 if (!MOP.isReg()) 1319 continue; 1320 if (!MOP.isImplicit()) 1321 continue; 1322 for (MCSubRegIterator SubRegs(MOP.getReg(), TRI); SubRegs.isValid(); 1323 ++SubRegs) { 1324 if (*SubRegs == Reg) { 1325 Bad = false; 1326 break; 1327 } 1328 } 1329 } 1330 } 1331 if (Bad) 1332 report("Using an undefined physical register", MO, MONum); 1333 } else if (MRI->def_empty(Reg)) { 1334 report("Reading virtual register without a def", MO, MONum); 1335 } else { 1336 BBInfo &MInfo = MBBInfoMap[MI->getParent()]; 1337 // We don't know which virtual registers are live in, so only complain 1338 // if vreg was killed in this MBB. Otherwise keep track of vregs that 1339 // must be live in. PHI instructions are handled separately. 1340 if (MInfo.regsKilled.count(Reg)) 1341 report("Using a killed virtual register", MO, MONum); 1342 else if (!MI->isPHI()) 1343 MInfo.vregsLiveIn.insert(std::make_pair(Reg, MI)); 1344 } 1345 } 1346 } 1347 1348 if (MO->isDef()) { 1349 // Register defined. 1350 // TODO: verify that earlyclobber ops are not used. 1351 if (MO->isDead()) 1352 addRegWithSubRegs(regsDead, Reg); 1353 else 1354 addRegWithSubRegs(regsDefined, Reg); 1355 1356 // Verify SSA form. 1357 if (MRI->isSSA() && TargetRegisterInfo::isVirtualRegister(Reg) && 1358 std::next(MRI->def_begin(Reg)) != MRI->def_end()) 1359 report("Multiple virtual register defs in SSA form", MO, MONum); 1360 1361 // Check LiveInts for a live segment, but only for virtual registers. 1362 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) { 1363 SlotIndex DefIdx = LiveInts->getInstructionIndex(*MI); 1364 DefIdx = DefIdx.getRegSlot(MO->isEarlyClobber()); 1365 1366 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 1367 if (LiveInts->hasInterval(Reg)) { 1368 const LiveInterval &LI = LiveInts->getInterval(Reg); 1369 checkLivenessAtDef(MO, MONum, DefIdx, LI, Reg); 1370 1371 if (LI.hasSubRanges()) { 1372 unsigned SubRegIdx = MO->getSubReg(); 1373 LaneBitmask MOMask = SubRegIdx != 0 1374 ? TRI->getSubRegIndexLaneMask(SubRegIdx) 1375 : MRI->getMaxLaneMaskForVReg(Reg); 1376 for (const LiveInterval::SubRange &SR : LI.subranges()) { 1377 if ((SR.LaneMask & MOMask) == 0) 1378 continue; 1379 checkLivenessAtDef(MO, MONum, DefIdx, SR, Reg, SR.LaneMask); 1380 } 1381 } 1382 } else { 1383 report("Virtual register has no Live interval", MO, MONum); 1384 } 1385 } 1386 } 1387 } 1388 } 1389 1390 void MachineVerifier::visitMachineInstrAfter(const MachineInstr *MI) { 1391 } 1392 1393 // This function gets called after visiting all instructions in a bundle. The 1394 // argument points to the bundle header. 1395 // Normal stand-alone instructions are also considered 'bundles', and this 1396 // function is called for all of them. 1397 void MachineVerifier::visitMachineBundleAfter(const MachineInstr *MI) { 1398 BBInfo &MInfo = MBBInfoMap[MI->getParent()]; 1399 set_union(MInfo.regsKilled, regsKilled); 1400 set_subtract(regsLive, regsKilled); regsKilled.clear(); 1401 // Kill any masked registers. 1402 while (!regMasks.empty()) { 1403 const uint32_t *Mask = regMasks.pop_back_val(); 1404 for (RegSet::iterator I = regsLive.begin(), E = regsLive.end(); I != E; ++I) 1405 if (TargetRegisterInfo::isPhysicalRegister(*I) && 1406 MachineOperand::clobbersPhysReg(Mask, *I)) 1407 regsDead.push_back(*I); 1408 } 1409 set_subtract(regsLive, regsDead); regsDead.clear(); 1410 set_union(regsLive, regsDefined); regsDefined.clear(); 1411 } 1412 1413 void 1414 MachineVerifier::visitMachineBasicBlockAfter(const MachineBasicBlock *MBB) { 1415 MBBInfoMap[MBB].regsLiveOut = regsLive; 1416 regsLive.clear(); 1417 1418 if (Indexes) { 1419 SlotIndex stop = Indexes->getMBBEndIdx(MBB); 1420 if (!(stop > lastIndex)) { 1421 report("Block ends before last instruction index", MBB); 1422 errs() << "Block ends at " << stop 1423 << " last instruction was at " << lastIndex << '\n'; 1424 } 1425 lastIndex = stop; 1426 } 1427 } 1428 1429 // Calculate the largest possible vregsPassed sets. These are the registers that 1430 // can pass through an MBB live, but may not be live every time. It is assumed 1431 // that all vregsPassed sets are empty before the call. 1432 void MachineVerifier::calcRegsPassed() { 1433 // First push live-out regs to successors' vregsPassed. Remember the MBBs that 1434 // have any vregsPassed. 1435 SmallPtrSet<const MachineBasicBlock*, 8> todo; 1436 for (const auto &MBB : *MF) { 1437 BBInfo &MInfo = MBBInfoMap[&MBB]; 1438 if (!MInfo.reachable) 1439 continue; 1440 for (MachineBasicBlock::const_succ_iterator SuI = MBB.succ_begin(), 1441 SuE = MBB.succ_end(); SuI != SuE; ++SuI) { 1442 BBInfo &SInfo = MBBInfoMap[*SuI]; 1443 if (SInfo.addPassed(MInfo.regsLiveOut)) 1444 todo.insert(*SuI); 1445 } 1446 } 1447 1448 // Iteratively push vregsPassed to successors. This will converge to the same 1449 // final state regardless of DenseSet iteration order. 1450 while (!todo.empty()) { 1451 const MachineBasicBlock *MBB = *todo.begin(); 1452 todo.erase(MBB); 1453 BBInfo &MInfo = MBBInfoMap[MBB]; 1454 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(), 1455 SuE = MBB->succ_end(); SuI != SuE; ++SuI) { 1456 if (*SuI == MBB) 1457 continue; 1458 BBInfo &SInfo = MBBInfoMap[*SuI]; 1459 if (SInfo.addPassed(MInfo.vregsPassed)) 1460 todo.insert(*SuI); 1461 } 1462 } 1463 } 1464 1465 // Calculate the set of virtual registers that must be passed through each basic 1466 // block in order to satisfy the requirements of successor blocks. This is very 1467 // similar to calcRegsPassed, only backwards. 1468 void MachineVerifier::calcRegsRequired() { 1469 // First push live-in regs to predecessors' vregsRequired. 1470 SmallPtrSet<const MachineBasicBlock*, 8> todo; 1471 for (const auto &MBB : *MF) { 1472 BBInfo &MInfo = MBBInfoMap[&MBB]; 1473 for (MachineBasicBlock::const_pred_iterator PrI = MBB.pred_begin(), 1474 PrE = MBB.pred_end(); PrI != PrE; ++PrI) { 1475 BBInfo &PInfo = MBBInfoMap[*PrI]; 1476 if (PInfo.addRequired(MInfo.vregsLiveIn)) 1477 todo.insert(*PrI); 1478 } 1479 } 1480 1481 // Iteratively push vregsRequired to predecessors. This will converge to the 1482 // same final state regardless of DenseSet iteration order. 1483 while (!todo.empty()) { 1484 const MachineBasicBlock *MBB = *todo.begin(); 1485 todo.erase(MBB); 1486 BBInfo &MInfo = MBBInfoMap[MBB]; 1487 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(), 1488 PrE = MBB->pred_end(); PrI != PrE; ++PrI) { 1489 if (*PrI == MBB) 1490 continue; 1491 BBInfo &SInfo = MBBInfoMap[*PrI]; 1492 if (SInfo.addRequired(MInfo.vregsRequired)) 1493 todo.insert(*PrI); 1494 } 1495 } 1496 } 1497 1498 // Check PHI instructions at the beginning of MBB. It is assumed that 1499 // calcRegsPassed has been run so BBInfo::isLiveOut is valid. 1500 void MachineVerifier::checkPHIOps(const MachineBasicBlock *MBB) { 1501 SmallPtrSet<const MachineBasicBlock*, 8> seen; 1502 for (const auto &BBI : *MBB) { 1503 if (!BBI.isPHI()) 1504 break; 1505 seen.clear(); 1506 1507 for (unsigned i = 1, e = BBI.getNumOperands(); i != e; i += 2) { 1508 unsigned Reg = BBI.getOperand(i).getReg(); 1509 const MachineBasicBlock *Pre = BBI.getOperand(i + 1).getMBB(); 1510 if (!Pre->isSuccessor(MBB)) 1511 continue; 1512 seen.insert(Pre); 1513 BBInfo &PrInfo = MBBInfoMap[Pre]; 1514 if (PrInfo.reachable && !PrInfo.isLiveOut(Reg)) 1515 report("PHI operand is not live-out from predecessor", 1516 &BBI.getOperand(i), i); 1517 } 1518 1519 // Did we see all predecessors? 1520 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(), 1521 PrE = MBB->pred_end(); PrI != PrE; ++PrI) { 1522 if (!seen.count(*PrI)) { 1523 report("Missing PHI operand", &BBI); 1524 errs() << "BB#" << (*PrI)->getNumber() 1525 << " is a predecessor according to the CFG.\n"; 1526 } 1527 } 1528 } 1529 } 1530 1531 void MachineVerifier::visitMachineFunctionAfter() { 1532 calcRegsPassed(); 1533 1534 for (const auto &MBB : *MF) { 1535 BBInfo &MInfo = MBBInfoMap[&MBB]; 1536 1537 // Skip unreachable MBBs. 1538 if (!MInfo.reachable) 1539 continue; 1540 1541 checkPHIOps(&MBB); 1542 } 1543 1544 // Now check liveness info if available 1545 calcRegsRequired(); 1546 1547 // Check for killed virtual registers that should be live out. 1548 for (const auto &MBB : *MF) { 1549 BBInfo &MInfo = MBBInfoMap[&MBB]; 1550 for (RegSet::iterator 1551 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E; 1552 ++I) 1553 if (MInfo.regsKilled.count(*I)) { 1554 report("Virtual register killed in block, but needed live out.", &MBB); 1555 errs() << "Virtual register " << PrintReg(*I) 1556 << " is used after the block.\n"; 1557 } 1558 } 1559 1560 if (!MF->empty()) { 1561 BBInfo &MInfo = MBBInfoMap[&MF->front()]; 1562 for (RegSet::iterator 1563 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E; 1564 ++I) { 1565 report("Virtual register defs don't dominate all uses.", MF); 1566 report_context_vreg(*I); 1567 } 1568 } 1569 1570 if (LiveVars) 1571 verifyLiveVariables(); 1572 if (LiveInts) 1573 verifyLiveIntervals(); 1574 } 1575 1576 void MachineVerifier::verifyLiveVariables() { 1577 assert(LiveVars && "Don't call verifyLiveVariables without LiveVars"); 1578 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) { 1579 unsigned Reg = TargetRegisterInfo::index2VirtReg(i); 1580 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg); 1581 for (const auto &MBB : *MF) { 1582 BBInfo &MInfo = MBBInfoMap[&MBB]; 1583 1584 // Our vregsRequired should be identical to LiveVariables' AliveBlocks 1585 if (MInfo.vregsRequired.count(Reg)) { 1586 if (!VI.AliveBlocks.test(MBB.getNumber())) { 1587 report("LiveVariables: Block missing from AliveBlocks", &MBB); 1588 errs() << "Virtual register " << PrintReg(Reg) 1589 << " must be live through the block.\n"; 1590 } 1591 } else { 1592 if (VI.AliveBlocks.test(MBB.getNumber())) { 1593 report("LiveVariables: Block should not be in AliveBlocks", &MBB); 1594 errs() << "Virtual register " << PrintReg(Reg) 1595 << " is not needed live through the block.\n"; 1596 } 1597 } 1598 } 1599 } 1600 } 1601 1602 void MachineVerifier::verifyLiveIntervals() { 1603 assert(LiveInts && "Don't call verifyLiveIntervals without LiveInts"); 1604 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) { 1605 unsigned Reg = TargetRegisterInfo::index2VirtReg(i); 1606 1607 // Spilling and splitting may leave unused registers around. Skip them. 1608 if (MRI->reg_nodbg_empty(Reg)) 1609 continue; 1610 1611 if (!LiveInts->hasInterval(Reg)) { 1612 report("Missing live interval for virtual register", MF); 1613 errs() << PrintReg(Reg, TRI) << " still has defs or uses\n"; 1614 continue; 1615 } 1616 1617 const LiveInterval &LI = LiveInts->getInterval(Reg); 1618 assert(Reg == LI.reg && "Invalid reg to interval mapping"); 1619 verifyLiveInterval(LI); 1620 } 1621 1622 // Verify all the cached regunit intervals. 1623 for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i) 1624 if (const LiveRange *LR = LiveInts->getCachedRegUnit(i)) 1625 verifyLiveRange(*LR, i); 1626 } 1627 1628 void MachineVerifier::verifyLiveRangeValue(const LiveRange &LR, 1629 const VNInfo *VNI, unsigned Reg, 1630 LaneBitmask LaneMask) { 1631 if (VNI->isUnused()) 1632 return; 1633 1634 const VNInfo *DefVNI = LR.getVNInfoAt(VNI->def); 1635 1636 if (!DefVNI) { 1637 report("Value not live at VNInfo def and not marked unused", MF); 1638 report_context(LR, Reg, LaneMask); 1639 report_context(*VNI); 1640 return; 1641 } 1642 1643 if (DefVNI != VNI) { 1644 report("Live segment at def has different VNInfo", MF); 1645 report_context(LR, Reg, LaneMask); 1646 report_context(*VNI); 1647 return; 1648 } 1649 1650 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(VNI->def); 1651 if (!MBB) { 1652 report("Invalid VNInfo definition index", MF); 1653 report_context(LR, Reg, LaneMask); 1654 report_context(*VNI); 1655 return; 1656 } 1657 1658 if (VNI->isPHIDef()) { 1659 if (VNI->def != LiveInts->getMBBStartIdx(MBB)) { 1660 report("PHIDef VNInfo is not defined at MBB start", MBB); 1661 report_context(LR, Reg, LaneMask); 1662 report_context(*VNI); 1663 } 1664 return; 1665 } 1666 1667 // Non-PHI def. 1668 const MachineInstr *MI = LiveInts->getInstructionFromIndex(VNI->def); 1669 if (!MI) { 1670 report("No instruction at VNInfo def index", MBB); 1671 report_context(LR, Reg, LaneMask); 1672 report_context(*VNI); 1673 return; 1674 } 1675 1676 if (Reg != 0) { 1677 bool hasDef = false; 1678 bool isEarlyClobber = false; 1679 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) { 1680 if (!MOI->isReg() || !MOI->isDef()) 1681 continue; 1682 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 1683 if (MOI->getReg() != Reg) 1684 continue; 1685 } else { 1686 if (!TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) || 1687 !TRI->hasRegUnit(MOI->getReg(), Reg)) 1688 continue; 1689 } 1690 if (LaneMask != 0 && 1691 (TRI->getSubRegIndexLaneMask(MOI->getSubReg()) & LaneMask) == 0) 1692 continue; 1693 hasDef = true; 1694 if (MOI->isEarlyClobber()) 1695 isEarlyClobber = true; 1696 } 1697 1698 if (!hasDef) { 1699 report("Defining instruction does not modify register", MI); 1700 report_context(LR, Reg, LaneMask); 1701 report_context(*VNI); 1702 } 1703 1704 // Early clobber defs begin at USE slots, but other defs must begin at 1705 // DEF slots. 1706 if (isEarlyClobber) { 1707 if (!VNI->def.isEarlyClobber()) { 1708 report("Early clobber def must be at an early-clobber slot", MBB); 1709 report_context(LR, Reg, LaneMask); 1710 report_context(*VNI); 1711 } 1712 } else if (!VNI->def.isRegister()) { 1713 report("Non-PHI, non-early clobber def must be at a register slot", MBB); 1714 report_context(LR, Reg, LaneMask); 1715 report_context(*VNI); 1716 } 1717 } 1718 } 1719 1720 void MachineVerifier::verifyLiveRangeSegment(const LiveRange &LR, 1721 const LiveRange::const_iterator I, 1722 unsigned Reg, LaneBitmask LaneMask) 1723 { 1724 const LiveRange::Segment &S = *I; 1725 const VNInfo *VNI = S.valno; 1726 assert(VNI && "Live segment has no valno"); 1727 1728 if (VNI->id >= LR.getNumValNums() || VNI != LR.getValNumInfo(VNI->id)) { 1729 report("Foreign valno in live segment", MF); 1730 report_context(LR, Reg, LaneMask); 1731 report_context(S); 1732 report_context(*VNI); 1733 } 1734 1735 if (VNI->isUnused()) { 1736 report("Live segment valno is marked unused", MF); 1737 report_context(LR, Reg, LaneMask); 1738 report_context(S); 1739 } 1740 1741 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(S.start); 1742 if (!MBB) { 1743 report("Bad start of live segment, no basic block", MF); 1744 report_context(LR, Reg, LaneMask); 1745 report_context(S); 1746 return; 1747 } 1748 SlotIndex MBBStartIdx = LiveInts->getMBBStartIdx(MBB); 1749 if (S.start != MBBStartIdx && S.start != VNI->def) { 1750 report("Live segment must begin at MBB entry or valno def", MBB); 1751 report_context(LR, Reg, LaneMask); 1752 report_context(S); 1753 } 1754 1755 const MachineBasicBlock *EndMBB = 1756 LiveInts->getMBBFromIndex(S.end.getPrevSlot()); 1757 if (!EndMBB) { 1758 report("Bad end of live segment, no basic block", MF); 1759 report_context(LR, Reg, LaneMask); 1760 report_context(S); 1761 return; 1762 } 1763 1764 // No more checks for live-out segments. 1765 if (S.end == LiveInts->getMBBEndIdx(EndMBB)) 1766 return; 1767 1768 // RegUnit intervals are allowed dead phis. 1769 if (!TargetRegisterInfo::isVirtualRegister(Reg) && VNI->isPHIDef() && 1770 S.start == VNI->def && S.end == VNI->def.getDeadSlot()) 1771 return; 1772 1773 // The live segment is ending inside EndMBB 1774 const MachineInstr *MI = 1775 LiveInts->getInstructionFromIndex(S.end.getPrevSlot()); 1776 if (!MI) { 1777 report("Live segment doesn't end at a valid instruction", EndMBB); 1778 report_context(LR, Reg, LaneMask); 1779 report_context(S); 1780 return; 1781 } 1782 1783 // The block slot must refer to a basic block boundary. 1784 if (S.end.isBlock()) { 1785 report("Live segment ends at B slot of an instruction", EndMBB); 1786 report_context(LR, Reg, LaneMask); 1787 report_context(S); 1788 } 1789 1790 if (S.end.isDead()) { 1791 // Segment ends on the dead slot. 1792 // That means there must be a dead def. 1793 if (!SlotIndex::isSameInstr(S.start, S.end)) { 1794 report("Live segment ending at dead slot spans instructions", EndMBB); 1795 report_context(LR, Reg, LaneMask); 1796 report_context(S); 1797 } 1798 } 1799 1800 // A live segment can only end at an early-clobber slot if it is being 1801 // redefined by an early-clobber def. 1802 if (S.end.isEarlyClobber()) { 1803 if (I+1 == LR.end() || (I+1)->start != S.end) { 1804 report("Live segment ending at early clobber slot must be " 1805 "redefined by an EC def in the same instruction", EndMBB); 1806 report_context(LR, Reg, LaneMask); 1807 report_context(S); 1808 } 1809 } 1810 1811 // The following checks only apply to virtual registers. Physreg liveness 1812 // is too weird to check. 1813 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 1814 // A live segment can end with either a redefinition, a kill flag on a 1815 // use, or a dead flag on a def. 1816 bool hasRead = false; 1817 bool hasSubRegDef = false; 1818 bool hasDeadDef = false; 1819 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) { 1820 if (!MOI->isReg() || MOI->getReg() != Reg) 1821 continue; 1822 unsigned Sub = MOI->getSubReg(); 1823 LaneBitmask SLM = Sub != 0 ? TRI->getSubRegIndexLaneMask(Sub) : ~0U; 1824 if (MOI->isDef()) { 1825 if (Sub != 0) { 1826 hasSubRegDef = true; 1827 // An operand vreg0:sub0<def> reads vreg0:sub1..n. Invert the lane 1828 // mask for subregister defs. Read-undef defs will be handled by 1829 // readsReg below. 1830 SLM = ~SLM; 1831 } 1832 if (MOI->isDead()) 1833 hasDeadDef = true; 1834 } 1835 if (LaneMask != 0 && !(LaneMask & SLM)) 1836 continue; 1837 if (MOI->readsReg()) 1838 hasRead = true; 1839 } 1840 if (S.end.isDead()) { 1841 // Make sure that the corresponding machine operand for a "dead" live 1842 // range has the dead flag. We cannot perform this check for subregister 1843 // liveranges as partially dead values are allowed. 1844 if (LaneMask == 0 && !hasDeadDef) { 1845 report("Instruction ending live segment on dead slot has no dead flag", 1846 MI); 1847 report_context(LR, Reg, LaneMask); 1848 report_context(S); 1849 } 1850 } else { 1851 if (!hasRead) { 1852 // When tracking subregister liveness, the main range must start new 1853 // values on partial register writes, even if there is no read. 1854 if (!MRI->shouldTrackSubRegLiveness(Reg) || LaneMask != 0 || 1855 !hasSubRegDef) { 1856 report("Instruction ending live segment doesn't read the register", 1857 MI); 1858 report_context(LR, Reg, LaneMask); 1859 report_context(S); 1860 } 1861 } 1862 } 1863 } 1864 1865 // Now check all the basic blocks in this live segment. 1866 MachineFunction::const_iterator MFI = MBB->getIterator(); 1867 // Is this live segment the beginning of a non-PHIDef VN? 1868 if (S.start == VNI->def && !VNI->isPHIDef()) { 1869 // Not live-in to any blocks. 1870 if (MBB == EndMBB) 1871 return; 1872 // Skip this block. 1873 ++MFI; 1874 } 1875 for (;;) { 1876 assert(LiveInts->isLiveInToMBB(LR, &*MFI)); 1877 // We don't know how to track physregs into a landing pad. 1878 if (!TargetRegisterInfo::isVirtualRegister(Reg) && 1879 MFI->isEHPad()) { 1880 if (&*MFI == EndMBB) 1881 break; 1882 ++MFI; 1883 continue; 1884 } 1885 1886 // Is VNI a PHI-def in the current block? 1887 bool IsPHI = VNI->isPHIDef() && 1888 VNI->def == LiveInts->getMBBStartIdx(&*MFI); 1889 1890 // Check that VNI is live-out of all predecessors. 1891 for (MachineBasicBlock::const_pred_iterator PI = MFI->pred_begin(), 1892 PE = MFI->pred_end(); PI != PE; ++PI) { 1893 SlotIndex PEnd = LiveInts->getMBBEndIdx(*PI); 1894 const VNInfo *PVNI = LR.getVNInfoBefore(PEnd); 1895 1896 // All predecessors must have a live-out value if this is not a 1897 // subregister liverange. 1898 if (!PVNI && LaneMask == 0) { 1899 report("Register not marked live out of predecessor", *PI); 1900 report_context(LR, Reg, LaneMask); 1901 report_context(*VNI); 1902 errs() << " live into BB#" << MFI->getNumber() 1903 << '@' << LiveInts->getMBBStartIdx(&*MFI) << ", not live before " 1904 << PEnd << '\n'; 1905 continue; 1906 } 1907 1908 // Only PHI-defs can take different predecessor values. 1909 if (!IsPHI && PVNI != VNI) { 1910 report("Different value live out of predecessor", *PI); 1911 report_context(LR, Reg, LaneMask); 1912 errs() << "Valno #" << PVNI->id << " live out of BB#" 1913 << (*PI)->getNumber() << '@' << PEnd << "\nValno #" << VNI->id 1914 << " live into BB#" << MFI->getNumber() << '@' 1915 << LiveInts->getMBBStartIdx(&*MFI) << '\n'; 1916 } 1917 } 1918 if (&*MFI == EndMBB) 1919 break; 1920 ++MFI; 1921 } 1922 } 1923 1924 void MachineVerifier::verifyLiveRange(const LiveRange &LR, unsigned Reg, 1925 LaneBitmask LaneMask) { 1926 for (const VNInfo *VNI : LR.valnos) 1927 verifyLiveRangeValue(LR, VNI, Reg, LaneMask); 1928 1929 for (LiveRange::const_iterator I = LR.begin(), E = LR.end(); I != E; ++I) 1930 verifyLiveRangeSegment(LR, I, Reg, LaneMask); 1931 } 1932 1933 void MachineVerifier::verifyLiveInterval(const LiveInterval &LI) { 1934 unsigned Reg = LI.reg; 1935 assert(TargetRegisterInfo::isVirtualRegister(Reg)); 1936 verifyLiveRange(LI, Reg); 1937 1938 LaneBitmask Mask = 0; 1939 LaneBitmask MaxMask = MRI->getMaxLaneMaskForVReg(Reg); 1940 for (const LiveInterval::SubRange &SR : LI.subranges()) { 1941 if ((Mask & SR.LaneMask) != 0) { 1942 report("Lane masks of sub ranges overlap in live interval", MF); 1943 report_context(LI); 1944 } 1945 if ((SR.LaneMask & ~MaxMask) != 0) { 1946 report("Subrange lanemask is invalid", MF); 1947 report_context(LI); 1948 } 1949 if (SR.empty()) { 1950 report("Subrange must not be empty", MF); 1951 report_context(SR, LI.reg, SR.LaneMask); 1952 } 1953 Mask |= SR.LaneMask; 1954 verifyLiveRange(SR, LI.reg, SR.LaneMask); 1955 if (!LI.covers(SR)) { 1956 report("A Subrange is not covered by the main range", MF); 1957 report_context(LI); 1958 } 1959 } 1960 1961 // Check the LI only has one connected component. 1962 ConnectedVNInfoEqClasses ConEQ(*LiveInts); 1963 unsigned NumComp = ConEQ.Classify(LI); 1964 if (NumComp > 1) { 1965 report("Multiple connected components in live interval", MF); 1966 report_context(LI); 1967 for (unsigned comp = 0; comp != NumComp; ++comp) { 1968 errs() << comp << ": valnos"; 1969 for (LiveInterval::const_vni_iterator I = LI.vni_begin(), 1970 E = LI.vni_end(); I!=E; ++I) 1971 if (comp == ConEQ.getEqClass(*I)) 1972 errs() << ' ' << (*I)->id; 1973 errs() << '\n'; 1974 } 1975 } 1976 } 1977 1978 namespace { 1979 // FrameSetup and FrameDestroy can have zero adjustment, so using a single 1980 // integer, we can't tell whether it is a FrameSetup or FrameDestroy if the 1981 // value is zero. 1982 // We use a bool plus an integer to capture the stack state. 1983 struct StackStateOfBB { 1984 StackStateOfBB() : EntryValue(0), ExitValue(0), EntryIsSetup(false), 1985 ExitIsSetup(false) { } 1986 StackStateOfBB(int EntryVal, int ExitVal, bool EntrySetup, bool ExitSetup) : 1987 EntryValue(EntryVal), ExitValue(ExitVal), EntryIsSetup(EntrySetup), 1988 ExitIsSetup(ExitSetup) { } 1989 // Can be negative, which means we are setting up a frame. 1990 int EntryValue; 1991 int ExitValue; 1992 bool EntryIsSetup; 1993 bool ExitIsSetup; 1994 }; 1995 } 1996 1997 /// Make sure on every path through the CFG, a FrameSetup <n> is always followed 1998 /// by a FrameDestroy <n>, stack adjustments are identical on all 1999 /// CFG edges to a merge point, and frame is destroyed at end of a return block. 2000 void MachineVerifier::verifyStackFrame() { 2001 unsigned FrameSetupOpcode = TII->getCallFrameSetupOpcode(); 2002 unsigned FrameDestroyOpcode = TII->getCallFrameDestroyOpcode(); 2003 2004 SmallVector<StackStateOfBB, 8> SPState; 2005 SPState.resize(MF->getNumBlockIDs()); 2006 SmallPtrSet<const MachineBasicBlock*, 8> Reachable; 2007 2008 // Visit the MBBs in DFS order. 2009 for (df_ext_iterator<const MachineFunction*, 2010 SmallPtrSet<const MachineBasicBlock*, 8> > 2011 DFI = df_ext_begin(MF, Reachable), DFE = df_ext_end(MF, Reachable); 2012 DFI != DFE; ++DFI) { 2013 const MachineBasicBlock *MBB = *DFI; 2014 2015 StackStateOfBB BBState; 2016 // Check the exit state of the DFS stack predecessor. 2017 if (DFI.getPathLength() >= 2) { 2018 const MachineBasicBlock *StackPred = DFI.getPath(DFI.getPathLength() - 2); 2019 assert(Reachable.count(StackPred) && 2020 "DFS stack predecessor is already visited.\n"); 2021 BBState.EntryValue = SPState[StackPred->getNumber()].ExitValue; 2022 BBState.EntryIsSetup = SPState[StackPred->getNumber()].ExitIsSetup; 2023 BBState.ExitValue = BBState.EntryValue; 2024 BBState.ExitIsSetup = BBState.EntryIsSetup; 2025 } 2026 2027 // Update stack state by checking contents of MBB. 2028 for (const auto &I : *MBB) { 2029 if (I.getOpcode() == FrameSetupOpcode) { 2030 // The first operand of a FrameOpcode should be i32. 2031 int Size = I.getOperand(0).getImm(); 2032 assert(Size >= 0 && 2033 "Value should be non-negative in FrameSetup and FrameDestroy.\n"); 2034 2035 if (BBState.ExitIsSetup) 2036 report("FrameSetup is after another FrameSetup", &I); 2037 BBState.ExitValue -= Size; 2038 BBState.ExitIsSetup = true; 2039 } 2040 2041 if (I.getOpcode() == FrameDestroyOpcode) { 2042 // The first operand of a FrameOpcode should be i32. 2043 int Size = I.getOperand(0).getImm(); 2044 assert(Size >= 0 && 2045 "Value should be non-negative in FrameSetup and FrameDestroy.\n"); 2046 2047 if (!BBState.ExitIsSetup) 2048 report("FrameDestroy is not after a FrameSetup", &I); 2049 int AbsSPAdj = BBState.ExitValue < 0 ? -BBState.ExitValue : 2050 BBState.ExitValue; 2051 if (BBState.ExitIsSetup && AbsSPAdj != Size) { 2052 report("FrameDestroy <n> is after FrameSetup <m>", &I); 2053 errs() << "FrameDestroy <" << Size << "> is after FrameSetup <" 2054 << AbsSPAdj << ">.\n"; 2055 } 2056 BBState.ExitValue += Size; 2057 BBState.ExitIsSetup = false; 2058 } 2059 } 2060 SPState[MBB->getNumber()] = BBState; 2061 2062 // Make sure the exit state of any predecessor is consistent with the entry 2063 // state. 2064 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(), 2065 E = MBB->pred_end(); I != E; ++I) { 2066 if (Reachable.count(*I) && 2067 (SPState[(*I)->getNumber()].ExitValue != BBState.EntryValue || 2068 SPState[(*I)->getNumber()].ExitIsSetup != BBState.EntryIsSetup)) { 2069 report("The exit stack state of a predecessor is inconsistent.", MBB); 2070 errs() << "Predecessor BB#" << (*I)->getNumber() << " has exit state (" 2071 << SPState[(*I)->getNumber()].ExitValue << ", " 2072 << SPState[(*I)->getNumber()].ExitIsSetup 2073 << "), while BB#" << MBB->getNumber() << " has entry state (" 2074 << BBState.EntryValue << ", " << BBState.EntryIsSetup << ").\n"; 2075 } 2076 } 2077 2078 // Make sure the entry state of any successor is consistent with the exit 2079 // state. 2080 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(), 2081 E = MBB->succ_end(); I != E; ++I) { 2082 if (Reachable.count(*I) && 2083 (SPState[(*I)->getNumber()].EntryValue != BBState.ExitValue || 2084 SPState[(*I)->getNumber()].EntryIsSetup != BBState.ExitIsSetup)) { 2085 report("The entry stack state of a successor is inconsistent.", MBB); 2086 errs() << "Successor BB#" << (*I)->getNumber() << " has entry state (" 2087 << SPState[(*I)->getNumber()].EntryValue << ", " 2088 << SPState[(*I)->getNumber()].EntryIsSetup 2089 << "), while BB#" << MBB->getNumber() << " has exit state (" 2090 << BBState.ExitValue << ", " << BBState.ExitIsSetup << ").\n"; 2091 } 2092 } 2093 2094 // Make sure a basic block with return ends with zero stack adjustment. 2095 if (!MBB->empty() && MBB->back().isReturn()) { 2096 if (BBState.ExitIsSetup) 2097 report("A return block ends with a FrameSetup.", MBB); 2098 if (BBState.ExitValue) 2099 report("A return block ends with a nonzero stack adjustment.", MBB); 2100 } 2101 } 2102 } 2103