1 //===-- MachineVerifier.cpp - Machine Code Verifier -----------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // Pass to verify generated machine code. The following is checked: 11 // 12 // Operand counts: All explicit operands must be present. 13 // 14 // Register classes: All physical and virtual register operands must be 15 // compatible with the register class required by the instruction descriptor. 16 // 17 // Register live intervals: Registers must be defined only once, and must be 18 // defined before use. 19 // 20 // The machine code verifier is enabled from LLVMTargetMachine.cpp with the 21 // command-line option -verify-machineinstrs, or by defining the environment 22 // variable LLVM_VERIFY_MACHINEINSTRS to the name of a file that will receive 23 // the verifier errors. 24 //===----------------------------------------------------------------------===// 25 26 #include "llvm/CodeGen/Passes.h" 27 #include "llvm/ADT/DenseSet.h" 28 #include "llvm/ADT/DepthFirstIterator.h" 29 #include "llvm/ADT/SetOperations.h" 30 #include "llvm/ADT/SmallVector.h" 31 #include "llvm/Analysis/EHPersonalities.h" 32 #include "llvm/CodeGen/LiveIntervalAnalysis.h" 33 #include "llvm/CodeGen/LiveStackAnalysis.h" 34 #include "llvm/CodeGen/LiveVariables.h" 35 #include "llvm/CodeGen/MachineFrameInfo.h" 36 #include "llvm/CodeGen/MachineFunctionPass.h" 37 #include "llvm/CodeGen/MachineMemOperand.h" 38 #include "llvm/CodeGen/MachineRegisterInfo.h" 39 #include "llvm/IR/BasicBlock.h" 40 #include "llvm/IR/InlineAsm.h" 41 #include "llvm/IR/Instructions.h" 42 #include "llvm/MC/MCAsmInfo.h" 43 #include "llvm/Support/Debug.h" 44 #include "llvm/Support/ErrorHandling.h" 45 #include "llvm/Support/FileSystem.h" 46 #include "llvm/Support/raw_ostream.h" 47 #include "llvm/Target/TargetInstrInfo.h" 48 #include "llvm/Target/TargetMachine.h" 49 #include "llvm/Target/TargetRegisterInfo.h" 50 #include "llvm/Target/TargetSubtargetInfo.h" 51 using namespace llvm; 52 53 namespace { 54 struct MachineVerifier { 55 56 MachineVerifier(Pass *pass, const char *b) : 57 PASS(pass), 58 Banner(b) 59 {} 60 61 unsigned verify(MachineFunction &MF); 62 63 Pass *const PASS; 64 const char *Banner; 65 const MachineFunction *MF; 66 const TargetMachine *TM; 67 const TargetInstrInfo *TII; 68 const TargetRegisterInfo *TRI; 69 const MachineRegisterInfo *MRI; 70 71 unsigned foundErrors; 72 73 // Avoid querying the MachineFunctionProperties for each operand. 74 bool isFunctionRegBankSelected; 75 bool isFunctionSelected; 76 77 typedef SmallVector<unsigned, 16> RegVector; 78 typedef SmallVector<const uint32_t*, 4> RegMaskVector; 79 typedef DenseSet<unsigned> RegSet; 80 typedef DenseMap<unsigned, const MachineInstr*> RegMap; 81 typedef SmallPtrSet<const MachineBasicBlock*, 8> BlockSet; 82 83 const MachineInstr *FirstTerminator; 84 BlockSet FunctionBlocks; 85 86 BitVector regsReserved; 87 RegSet regsLive; 88 RegVector regsDefined, regsDead, regsKilled; 89 RegMaskVector regMasks; 90 RegSet regsLiveInButUnused; 91 92 SlotIndex lastIndex; 93 94 // Add Reg and any sub-registers to RV 95 void addRegWithSubRegs(RegVector &RV, unsigned Reg) { 96 RV.push_back(Reg); 97 if (TargetRegisterInfo::isPhysicalRegister(Reg)) 98 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) 99 RV.push_back(*SubRegs); 100 } 101 102 struct BBInfo { 103 // Is this MBB reachable from the MF entry point? 104 bool reachable; 105 106 // Vregs that must be live in because they are used without being 107 // defined. Map value is the user. 108 RegMap vregsLiveIn; 109 110 // Regs killed in MBB. They may be defined again, and will then be in both 111 // regsKilled and regsLiveOut. 112 RegSet regsKilled; 113 114 // Regs defined in MBB and live out. Note that vregs passing through may 115 // be live out without being mentioned here. 116 RegSet regsLiveOut; 117 118 // Vregs that pass through MBB untouched. This set is disjoint from 119 // regsKilled and regsLiveOut. 120 RegSet vregsPassed; 121 122 // Vregs that must pass through MBB because they are needed by a successor 123 // block. This set is disjoint from regsLiveOut. 124 RegSet vregsRequired; 125 126 // Set versions of block's predecessor and successor lists. 127 BlockSet Preds, Succs; 128 129 BBInfo() : reachable(false) {} 130 131 // Add register to vregsPassed if it belongs there. Return true if 132 // anything changed. 133 bool addPassed(unsigned Reg) { 134 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 135 return false; 136 if (regsKilled.count(Reg) || regsLiveOut.count(Reg)) 137 return false; 138 return vregsPassed.insert(Reg).second; 139 } 140 141 // Same for a full set. 142 bool addPassed(const RegSet &RS) { 143 bool changed = false; 144 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I) 145 if (addPassed(*I)) 146 changed = true; 147 return changed; 148 } 149 150 // Add register to vregsRequired if it belongs there. Return true if 151 // anything changed. 152 bool addRequired(unsigned Reg) { 153 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 154 return false; 155 if (regsLiveOut.count(Reg)) 156 return false; 157 return vregsRequired.insert(Reg).second; 158 } 159 160 // Same for a full set. 161 bool addRequired(const RegSet &RS) { 162 bool changed = false; 163 for (RegSet::const_iterator I = RS.begin(), E = RS.end(); I != E; ++I) 164 if (addRequired(*I)) 165 changed = true; 166 return changed; 167 } 168 169 // Same for a full map. 170 bool addRequired(const RegMap &RM) { 171 bool changed = false; 172 for (RegMap::const_iterator I = RM.begin(), E = RM.end(); I != E; ++I) 173 if (addRequired(I->first)) 174 changed = true; 175 return changed; 176 } 177 178 // Live-out registers are either in regsLiveOut or vregsPassed. 179 bool isLiveOut(unsigned Reg) const { 180 return regsLiveOut.count(Reg) || vregsPassed.count(Reg); 181 } 182 }; 183 184 // Extra register info per MBB. 185 DenseMap<const MachineBasicBlock*, BBInfo> MBBInfoMap; 186 187 bool isReserved(unsigned Reg) { 188 return Reg < regsReserved.size() && regsReserved.test(Reg); 189 } 190 191 bool isAllocatable(unsigned Reg) { 192 return Reg < TRI->getNumRegs() && MRI->isAllocatable(Reg); 193 } 194 195 // Analysis information if available 196 LiveVariables *LiveVars; 197 LiveIntervals *LiveInts; 198 LiveStacks *LiveStks; 199 SlotIndexes *Indexes; 200 201 void visitMachineFunctionBefore(); 202 void visitMachineBasicBlockBefore(const MachineBasicBlock *MBB); 203 void visitMachineBundleBefore(const MachineInstr *MI); 204 void visitMachineInstrBefore(const MachineInstr *MI); 205 void visitMachineOperand(const MachineOperand *MO, unsigned MONum); 206 void visitMachineInstrAfter(const MachineInstr *MI); 207 void visitMachineBundleAfter(const MachineInstr *MI); 208 void visitMachineBasicBlockAfter(const MachineBasicBlock *MBB); 209 void visitMachineFunctionAfter(); 210 211 void report(const char *msg, const MachineFunction *MF); 212 void report(const char *msg, const MachineBasicBlock *MBB); 213 void report(const char *msg, const MachineInstr *MI); 214 void report(const char *msg, const MachineOperand *MO, unsigned MONum); 215 216 void report_context(const LiveInterval &LI) const; 217 void report_context(const LiveRange &LR, unsigned VRegUnit, 218 LaneBitmask LaneMask) const; 219 void report_context(const LiveRange::Segment &S) const; 220 void report_context(const VNInfo &VNI) const; 221 void report_context(SlotIndex Pos) const; 222 void report_context_liverange(const LiveRange &LR) const; 223 void report_context_lanemask(LaneBitmask LaneMask) const; 224 void report_context_vreg(unsigned VReg) const; 225 void report_context_vreg_regunit(unsigned VRegOrRegUnit) const; 226 227 void verifyInlineAsm(const MachineInstr *MI); 228 229 void checkLiveness(const MachineOperand *MO, unsigned MONum); 230 void checkLivenessAtUse(const MachineOperand *MO, unsigned MONum, 231 SlotIndex UseIdx, const LiveRange &LR, unsigned Reg, 232 LaneBitmask LaneMask = 0); 233 void checkLivenessAtDef(const MachineOperand *MO, unsigned MONum, 234 SlotIndex DefIdx, const LiveRange &LR, unsigned Reg, 235 LaneBitmask LaneMask = 0); 236 237 void markReachable(const MachineBasicBlock *MBB); 238 void calcRegsPassed(); 239 void checkPHIOps(const MachineBasicBlock *MBB); 240 241 void calcRegsRequired(); 242 void verifyLiveVariables(); 243 void verifyLiveIntervals(); 244 void verifyLiveInterval(const LiveInterval&); 245 void verifyLiveRangeValue(const LiveRange&, const VNInfo*, unsigned, 246 unsigned); 247 void verifyLiveRangeSegment(const LiveRange&, 248 const LiveRange::const_iterator I, unsigned, 249 unsigned); 250 void verifyLiveRange(const LiveRange&, unsigned, LaneBitmask LaneMask = 0); 251 252 void verifyStackFrame(); 253 254 void verifySlotIndexes() const; 255 void verifyProperties(const MachineFunction &MF); 256 }; 257 258 struct MachineVerifierPass : public MachineFunctionPass { 259 static char ID; // Pass ID, replacement for typeid 260 const std::string Banner; 261 262 MachineVerifierPass(const std::string &banner = nullptr) 263 : MachineFunctionPass(ID), Banner(banner) { 264 initializeMachineVerifierPassPass(*PassRegistry::getPassRegistry()); 265 } 266 267 void getAnalysisUsage(AnalysisUsage &AU) const override { 268 AU.setPreservesAll(); 269 MachineFunctionPass::getAnalysisUsage(AU); 270 } 271 272 bool runOnMachineFunction(MachineFunction &MF) override { 273 unsigned FoundErrors = MachineVerifier(this, Banner.c_str()).verify(MF); 274 if (FoundErrors) 275 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors."); 276 return false; 277 } 278 }; 279 280 } 281 282 char MachineVerifierPass::ID = 0; 283 INITIALIZE_PASS(MachineVerifierPass, "machineverifier", 284 "Verify generated machine code", false, false) 285 286 FunctionPass *llvm::createMachineVerifierPass(const std::string &Banner) { 287 return new MachineVerifierPass(Banner); 288 } 289 290 bool MachineFunction::verify(Pass *p, const char *Banner, bool AbortOnErrors) 291 const { 292 MachineFunction &MF = const_cast<MachineFunction&>(*this); 293 unsigned FoundErrors = MachineVerifier(p, Banner).verify(MF); 294 if (AbortOnErrors && FoundErrors) 295 report_fatal_error("Found "+Twine(FoundErrors)+" machine code errors."); 296 return FoundErrors == 0; 297 } 298 299 void MachineVerifier::verifySlotIndexes() const { 300 if (Indexes == nullptr) 301 return; 302 303 // Ensure the IdxMBB list is sorted by slot indexes. 304 SlotIndex Last; 305 for (SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin(), 306 E = Indexes->MBBIndexEnd(); I != E; ++I) { 307 assert(!Last.isValid() || I->first > Last); 308 Last = I->first; 309 } 310 } 311 312 void MachineVerifier::verifyProperties(const MachineFunction &MF) { 313 // If a pass has introduced virtual registers without clearing the 314 // NoVRegs property (or set it without allocating the vregs) 315 // then report an error. 316 if (MF.getProperties().hasProperty( 317 MachineFunctionProperties::Property::NoVRegs) && 318 MRI->getNumVirtRegs()) 319 report("Function has NoVRegs property but there are VReg operands", &MF); 320 } 321 322 unsigned MachineVerifier::verify(MachineFunction &MF) { 323 foundErrors = 0; 324 325 this->MF = &MF; 326 TM = &MF.getTarget(); 327 TII = MF.getSubtarget().getInstrInfo(); 328 TRI = MF.getSubtarget().getRegisterInfo(); 329 MRI = &MF.getRegInfo(); 330 331 isFunctionRegBankSelected = MF.getProperties().hasProperty( 332 MachineFunctionProperties::Property::RegBankSelected); 333 isFunctionSelected = MF.getProperties().hasProperty( 334 MachineFunctionProperties::Property::Selected); 335 336 LiveVars = nullptr; 337 LiveInts = nullptr; 338 LiveStks = nullptr; 339 Indexes = nullptr; 340 if (PASS) { 341 LiveInts = PASS->getAnalysisIfAvailable<LiveIntervals>(); 342 // We don't want to verify LiveVariables if LiveIntervals is available. 343 if (!LiveInts) 344 LiveVars = PASS->getAnalysisIfAvailable<LiveVariables>(); 345 LiveStks = PASS->getAnalysisIfAvailable<LiveStacks>(); 346 Indexes = PASS->getAnalysisIfAvailable<SlotIndexes>(); 347 } 348 349 verifySlotIndexes(); 350 351 verifyProperties(MF); 352 353 visitMachineFunctionBefore(); 354 for (MachineFunction::const_iterator MFI = MF.begin(), MFE = MF.end(); 355 MFI!=MFE; ++MFI) { 356 visitMachineBasicBlockBefore(&*MFI); 357 // Keep track of the current bundle header. 358 const MachineInstr *CurBundle = nullptr; 359 // Do we expect the next instruction to be part of the same bundle? 360 bool InBundle = false; 361 362 for (MachineBasicBlock::const_instr_iterator MBBI = MFI->instr_begin(), 363 MBBE = MFI->instr_end(); MBBI != MBBE; ++MBBI) { 364 if (MBBI->getParent() != &*MFI) { 365 report("Bad instruction parent pointer", &*MFI); 366 errs() << "Instruction: " << *MBBI; 367 continue; 368 } 369 370 // Check for consistent bundle flags. 371 if (InBundle && !MBBI->isBundledWithPred()) 372 report("Missing BundledPred flag, " 373 "BundledSucc was set on predecessor", 374 &*MBBI); 375 if (!InBundle && MBBI->isBundledWithPred()) 376 report("BundledPred flag is set, " 377 "but BundledSucc not set on predecessor", 378 &*MBBI); 379 380 // Is this a bundle header? 381 if (!MBBI->isInsideBundle()) { 382 if (CurBundle) 383 visitMachineBundleAfter(CurBundle); 384 CurBundle = &*MBBI; 385 visitMachineBundleBefore(CurBundle); 386 } else if (!CurBundle) 387 report("No bundle header", &*MBBI); 388 visitMachineInstrBefore(&*MBBI); 389 for (unsigned I = 0, E = MBBI->getNumOperands(); I != E; ++I) { 390 const MachineInstr &MI = *MBBI; 391 const MachineOperand &Op = MI.getOperand(I); 392 if (Op.getParent() != &MI) { 393 // Make sure to use correct addOperand / RemoveOperand / ChangeTo 394 // functions when replacing operands of a MachineInstr. 395 report("Instruction has operand with wrong parent set", &MI); 396 } 397 398 visitMachineOperand(&Op, I); 399 } 400 401 visitMachineInstrAfter(&*MBBI); 402 403 // Was this the last bundled instruction? 404 InBundle = MBBI->isBundledWithSucc(); 405 } 406 if (CurBundle) 407 visitMachineBundleAfter(CurBundle); 408 if (InBundle) 409 report("BundledSucc flag set on last instruction in block", &MFI->back()); 410 visitMachineBasicBlockAfter(&*MFI); 411 } 412 visitMachineFunctionAfter(); 413 414 // Clean up. 415 regsLive.clear(); 416 regsDefined.clear(); 417 regsDead.clear(); 418 regsKilled.clear(); 419 regMasks.clear(); 420 regsLiveInButUnused.clear(); 421 MBBInfoMap.clear(); 422 423 return foundErrors; 424 } 425 426 void MachineVerifier::report(const char *msg, const MachineFunction *MF) { 427 assert(MF); 428 errs() << '\n'; 429 if (!foundErrors++) { 430 if (Banner) 431 errs() << "# " << Banner << '\n'; 432 if (LiveInts != nullptr) 433 LiveInts->print(errs()); 434 else 435 MF->print(errs(), Indexes); 436 } 437 errs() << "*** Bad machine code: " << msg << " ***\n" 438 << "- function: " << MF->getName() << "\n"; 439 } 440 441 void MachineVerifier::report(const char *msg, const MachineBasicBlock *MBB) { 442 assert(MBB); 443 report(msg, MBB->getParent()); 444 errs() << "- basic block: BB#" << MBB->getNumber() 445 << ' ' << MBB->getName() 446 << " (" << (const void*)MBB << ')'; 447 if (Indexes) 448 errs() << " [" << Indexes->getMBBStartIdx(MBB) 449 << ';' << Indexes->getMBBEndIdx(MBB) << ')'; 450 errs() << '\n'; 451 } 452 453 void MachineVerifier::report(const char *msg, const MachineInstr *MI) { 454 assert(MI); 455 report(msg, MI->getParent()); 456 errs() << "- instruction: "; 457 if (Indexes && Indexes->hasIndex(*MI)) 458 errs() << Indexes->getInstructionIndex(*MI) << '\t'; 459 MI->print(errs(), /*SkipOpers=*/true); 460 errs() << '\n'; 461 } 462 463 void MachineVerifier::report(const char *msg, 464 const MachineOperand *MO, unsigned MONum) { 465 assert(MO); 466 report(msg, MO->getParent()); 467 errs() << "- operand " << MONum << ": "; 468 MO->print(errs(), TRI); 469 errs() << "\n"; 470 } 471 472 void MachineVerifier::report_context(SlotIndex Pos) const { 473 errs() << "- at: " << Pos << '\n'; 474 } 475 476 void MachineVerifier::report_context(const LiveInterval &LI) const { 477 errs() << "- interval: " << LI << '\n'; 478 } 479 480 void MachineVerifier::report_context(const LiveRange &LR, unsigned VRegUnit, 481 LaneBitmask LaneMask) const { 482 report_context_liverange(LR); 483 report_context_vreg_regunit(VRegUnit); 484 if (LaneMask != 0) 485 report_context_lanemask(LaneMask); 486 } 487 488 void MachineVerifier::report_context(const LiveRange::Segment &S) const { 489 errs() << "- segment: " << S << '\n'; 490 } 491 492 void MachineVerifier::report_context(const VNInfo &VNI) const { 493 errs() << "- ValNo: " << VNI.id << " (def " << VNI.def << ")\n"; 494 } 495 496 void MachineVerifier::report_context_liverange(const LiveRange &LR) const { 497 errs() << "- liverange: " << LR << '\n'; 498 } 499 500 void MachineVerifier::report_context_vreg(unsigned VReg) const { 501 errs() << "- v. register: " << PrintReg(VReg, TRI) << '\n'; 502 } 503 504 void MachineVerifier::report_context_vreg_regunit(unsigned VRegOrUnit) const { 505 if (TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) { 506 report_context_vreg(VRegOrUnit); 507 } else { 508 errs() << "- regunit: " << PrintRegUnit(VRegOrUnit, TRI) << '\n'; 509 } 510 } 511 512 void MachineVerifier::report_context_lanemask(LaneBitmask LaneMask) const { 513 errs() << "- lanemask: " << PrintLaneMask(LaneMask) << '\n'; 514 } 515 516 void MachineVerifier::markReachable(const MachineBasicBlock *MBB) { 517 BBInfo &MInfo = MBBInfoMap[MBB]; 518 if (!MInfo.reachable) { 519 MInfo.reachable = true; 520 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(), 521 SuE = MBB->succ_end(); SuI != SuE; ++SuI) 522 markReachable(*SuI); 523 } 524 } 525 526 void MachineVerifier::visitMachineFunctionBefore() { 527 lastIndex = SlotIndex(); 528 regsReserved = MRI->getReservedRegs(); 529 530 markReachable(&MF->front()); 531 532 // Build a set of the basic blocks in the function. 533 FunctionBlocks.clear(); 534 for (const auto &MBB : *MF) { 535 FunctionBlocks.insert(&MBB); 536 BBInfo &MInfo = MBBInfoMap[&MBB]; 537 538 MInfo.Preds.insert(MBB.pred_begin(), MBB.pred_end()); 539 if (MInfo.Preds.size() != MBB.pred_size()) 540 report("MBB has duplicate entries in its predecessor list.", &MBB); 541 542 MInfo.Succs.insert(MBB.succ_begin(), MBB.succ_end()); 543 if (MInfo.Succs.size() != MBB.succ_size()) 544 report("MBB has duplicate entries in its successor list.", &MBB); 545 } 546 547 // Check that the register use lists are sane. 548 MRI->verifyUseLists(); 549 550 verifyStackFrame(); 551 } 552 553 // Does iterator point to a and b as the first two elements? 554 static bool matchPair(MachineBasicBlock::const_succ_iterator i, 555 const MachineBasicBlock *a, const MachineBasicBlock *b) { 556 if (*i == a) 557 return *++i == b; 558 if (*i == b) 559 return *++i == a; 560 return false; 561 } 562 563 void 564 MachineVerifier::visitMachineBasicBlockBefore(const MachineBasicBlock *MBB) { 565 FirstTerminator = nullptr; 566 567 if (!MF->getProperties().hasProperty( 568 MachineFunctionProperties::Property::NoPHIs)) { 569 // If this block has allocatable physical registers live-in, check that 570 // it is an entry block or landing pad. 571 for (const auto &LI : MBB->liveins()) { 572 if (isAllocatable(LI.PhysReg) && !MBB->isEHPad() && 573 MBB->getIterator() != MBB->getParent()->begin()) { 574 report("MBB has allocable live-in, but isn't entry or landing-pad.", MBB); 575 } 576 } 577 } 578 579 // Count the number of landing pad successors. 580 SmallPtrSet<MachineBasicBlock*, 4> LandingPadSuccs; 581 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(), 582 E = MBB->succ_end(); I != E; ++I) { 583 if ((*I)->isEHPad()) 584 LandingPadSuccs.insert(*I); 585 if (!FunctionBlocks.count(*I)) 586 report("MBB has successor that isn't part of the function.", MBB); 587 if (!MBBInfoMap[*I].Preds.count(MBB)) { 588 report("Inconsistent CFG", MBB); 589 errs() << "MBB is not in the predecessor list of the successor BB#" 590 << (*I)->getNumber() << ".\n"; 591 } 592 } 593 594 // Check the predecessor list. 595 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(), 596 E = MBB->pred_end(); I != E; ++I) { 597 if (!FunctionBlocks.count(*I)) 598 report("MBB has predecessor that isn't part of the function.", MBB); 599 if (!MBBInfoMap[*I].Succs.count(MBB)) { 600 report("Inconsistent CFG", MBB); 601 errs() << "MBB is not in the successor list of the predecessor BB#" 602 << (*I)->getNumber() << ".\n"; 603 } 604 } 605 606 const MCAsmInfo *AsmInfo = TM->getMCAsmInfo(); 607 const BasicBlock *BB = MBB->getBasicBlock(); 608 const Function *Fn = MF->getFunction(); 609 if (LandingPadSuccs.size() > 1 && 610 !(AsmInfo && 611 AsmInfo->getExceptionHandlingType() == ExceptionHandling::SjLj && 612 BB && isa<SwitchInst>(BB->getTerminator())) && 613 !isFuncletEHPersonality(classifyEHPersonality(Fn->getPersonalityFn()))) 614 report("MBB has more than one landing pad successor", MBB); 615 616 // Call AnalyzeBranch. If it succeeds, there several more conditions to check. 617 MachineBasicBlock *TBB = nullptr, *FBB = nullptr; 618 SmallVector<MachineOperand, 4> Cond; 619 if (!TII->analyzeBranch(*const_cast<MachineBasicBlock *>(MBB), TBB, FBB, 620 Cond)) { 621 // Ok, AnalyzeBranch thinks it knows what's going on with this block. Let's 622 // check whether its answers match up with reality. 623 if (!TBB && !FBB) { 624 // Block falls through to its successor. 625 MachineFunction::const_iterator MBBI = MBB->getIterator(); 626 ++MBBI; 627 if (MBBI == MF->end()) { 628 // It's possible that the block legitimately ends with a noreturn 629 // call or an unreachable, in which case it won't actually fall 630 // out the bottom of the function. 631 } else if (MBB->succ_size() == LandingPadSuccs.size()) { 632 // It's possible that the block legitimately ends with a noreturn 633 // call or an unreachable, in which case it won't actuall fall 634 // out of the block. 635 } else if (MBB->succ_size() != 1+LandingPadSuccs.size()) { 636 report("MBB exits via unconditional fall-through but doesn't have " 637 "exactly one CFG successor!", MBB); 638 } else if (!MBB->isSuccessor(&*MBBI)) { 639 report("MBB exits via unconditional fall-through but its successor " 640 "differs from its CFG successor!", MBB); 641 } 642 if (!MBB->empty() && MBB->back().isBarrier() && 643 !TII->isPredicated(MBB->back())) { 644 report("MBB exits via unconditional fall-through but ends with a " 645 "barrier instruction!", MBB); 646 } 647 if (!Cond.empty()) { 648 report("MBB exits via unconditional fall-through but has a condition!", 649 MBB); 650 } 651 } else if (TBB && !FBB && Cond.empty()) { 652 // Block unconditionally branches somewhere. 653 // If the block has exactly one successor, that happens to be a 654 // landingpad, accept it as valid control flow. 655 if (MBB->succ_size() != 1+LandingPadSuccs.size() && 656 (MBB->succ_size() != 1 || LandingPadSuccs.size() != 1 || 657 *MBB->succ_begin() != *LandingPadSuccs.begin())) { 658 report("MBB exits via unconditional branch but doesn't have " 659 "exactly one CFG successor!", MBB); 660 } else if (!MBB->isSuccessor(TBB)) { 661 report("MBB exits via unconditional branch but the CFG " 662 "successor doesn't match the actual successor!", MBB); 663 } 664 if (MBB->empty()) { 665 report("MBB exits via unconditional branch but doesn't contain " 666 "any instructions!", MBB); 667 } else if (!MBB->back().isBarrier()) { 668 report("MBB exits via unconditional branch but doesn't end with a " 669 "barrier instruction!", MBB); 670 } else if (!MBB->back().isTerminator()) { 671 report("MBB exits via unconditional branch but the branch isn't a " 672 "terminator instruction!", MBB); 673 } 674 } else if (TBB && !FBB && !Cond.empty()) { 675 // Block conditionally branches somewhere, otherwise falls through. 676 MachineFunction::const_iterator MBBI = MBB->getIterator(); 677 ++MBBI; 678 if (MBBI == MF->end()) { 679 report("MBB conditionally falls through out of function!", MBB); 680 } else if (MBB->succ_size() == 1) { 681 // A conditional branch with only one successor is weird, but allowed. 682 if (&*MBBI != TBB) 683 report("MBB exits via conditional branch/fall-through but only has " 684 "one CFG successor!", MBB); 685 else if (TBB != *MBB->succ_begin()) 686 report("MBB exits via conditional branch/fall-through but the CFG " 687 "successor don't match the actual successor!", MBB); 688 } else if (MBB->succ_size() != 2) { 689 report("MBB exits via conditional branch/fall-through but doesn't have " 690 "exactly two CFG successors!", MBB); 691 } else if (!matchPair(MBB->succ_begin(), TBB, &*MBBI)) { 692 report("MBB exits via conditional branch/fall-through but the CFG " 693 "successors don't match the actual successors!", MBB); 694 } 695 if (MBB->empty()) { 696 report("MBB exits via conditional branch/fall-through but doesn't " 697 "contain any instructions!", MBB); 698 } else if (MBB->back().isBarrier()) { 699 report("MBB exits via conditional branch/fall-through but ends with a " 700 "barrier instruction!", MBB); 701 } else if (!MBB->back().isTerminator()) { 702 report("MBB exits via conditional branch/fall-through but the branch " 703 "isn't a terminator instruction!", MBB); 704 } 705 } else if (TBB && FBB) { 706 // Block conditionally branches somewhere, otherwise branches 707 // somewhere else. 708 if (MBB->succ_size() == 1) { 709 // A conditional branch with only one successor is weird, but allowed. 710 if (FBB != TBB) 711 report("MBB exits via conditional branch/branch through but only has " 712 "one CFG successor!", MBB); 713 else if (TBB != *MBB->succ_begin()) 714 report("MBB exits via conditional branch/branch through but the CFG " 715 "successor don't match the actual successor!", MBB); 716 } else if (MBB->succ_size() != 2) { 717 report("MBB exits via conditional branch/branch but doesn't have " 718 "exactly two CFG successors!", MBB); 719 } else if (!matchPair(MBB->succ_begin(), TBB, FBB)) { 720 report("MBB exits via conditional branch/branch but the CFG " 721 "successors don't match the actual successors!", MBB); 722 } 723 if (MBB->empty()) { 724 report("MBB exits via conditional branch/branch but doesn't " 725 "contain any instructions!", MBB); 726 } else if (!MBB->back().isBarrier()) { 727 report("MBB exits via conditional branch/branch but doesn't end with a " 728 "barrier instruction!", MBB); 729 } else if (!MBB->back().isTerminator()) { 730 report("MBB exits via conditional branch/branch but the branch " 731 "isn't a terminator instruction!", MBB); 732 } 733 if (Cond.empty()) { 734 report("MBB exits via conditinal branch/branch but there's no " 735 "condition!", MBB); 736 } 737 } else { 738 report("AnalyzeBranch returned invalid data!", MBB); 739 } 740 } 741 742 regsLive.clear(); 743 for (const auto &LI : MBB->liveins()) { 744 if (!TargetRegisterInfo::isPhysicalRegister(LI.PhysReg)) { 745 report("MBB live-in list contains non-physical register", MBB); 746 continue; 747 } 748 for (MCSubRegIterator SubRegs(LI.PhysReg, TRI, /*IncludeSelf=*/true); 749 SubRegs.isValid(); ++SubRegs) 750 regsLive.insert(*SubRegs); 751 } 752 regsLiveInButUnused = regsLive; 753 754 const MachineFrameInfo &MFI = MF->getFrameInfo(); 755 BitVector PR = MFI.getPristineRegs(*MF); 756 for (int I = PR.find_first(); I>0; I = PR.find_next(I)) { 757 for (MCSubRegIterator SubRegs(I, TRI, /*IncludeSelf=*/true); 758 SubRegs.isValid(); ++SubRegs) 759 regsLive.insert(*SubRegs); 760 } 761 762 regsKilled.clear(); 763 regsDefined.clear(); 764 765 if (Indexes) 766 lastIndex = Indexes->getMBBStartIdx(MBB); 767 } 768 769 // This function gets called for all bundle headers, including normal 770 // stand-alone unbundled instructions. 771 void MachineVerifier::visitMachineBundleBefore(const MachineInstr *MI) { 772 if (Indexes && Indexes->hasIndex(*MI)) { 773 SlotIndex idx = Indexes->getInstructionIndex(*MI); 774 if (!(idx > lastIndex)) { 775 report("Instruction index out of order", MI); 776 errs() << "Last instruction was at " << lastIndex << '\n'; 777 } 778 lastIndex = idx; 779 } 780 781 // Ensure non-terminators don't follow terminators. 782 // Ignore predicated terminators formed by if conversion. 783 // FIXME: If conversion shouldn't need to violate this rule. 784 if (MI->isTerminator() && !TII->isPredicated(*MI)) { 785 if (!FirstTerminator) 786 FirstTerminator = MI; 787 } else if (FirstTerminator) { 788 report("Non-terminator instruction after the first terminator", MI); 789 errs() << "First terminator was:\t" << *FirstTerminator; 790 } 791 } 792 793 // The operands on an INLINEASM instruction must follow a template. 794 // Verify that the flag operands make sense. 795 void MachineVerifier::verifyInlineAsm(const MachineInstr *MI) { 796 // The first two operands on INLINEASM are the asm string and global flags. 797 if (MI->getNumOperands() < 2) { 798 report("Too few operands on inline asm", MI); 799 return; 800 } 801 if (!MI->getOperand(0).isSymbol()) 802 report("Asm string must be an external symbol", MI); 803 if (!MI->getOperand(1).isImm()) 804 report("Asm flags must be an immediate", MI); 805 // Allowed flags are Extra_HasSideEffects = 1, Extra_IsAlignStack = 2, 806 // Extra_AsmDialect = 4, Extra_MayLoad = 8, and Extra_MayStore = 16, 807 // and Extra_IsConvergent = 32. 808 if (!isUInt<6>(MI->getOperand(1).getImm())) 809 report("Unknown asm flags", &MI->getOperand(1), 1); 810 811 static_assert(InlineAsm::MIOp_FirstOperand == 2, "Asm format changed"); 812 813 unsigned OpNo = InlineAsm::MIOp_FirstOperand; 814 unsigned NumOps; 815 for (unsigned e = MI->getNumOperands(); OpNo < e; OpNo += NumOps) { 816 const MachineOperand &MO = MI->getOperand(OpNo); 817 // There may be implicit ops after the fixed operands. 818 if (!MO.isImm()) 819 break; 820 NumOps = 1 + InlineAsm::getNumOperandRegisters(MO.getImm()); 821 } 822 823 if (OpNo > MI->getNumOperands()) 824 report("Missing operands in last group", MI); 825 826 // An optional MDNode follows the groups. 827 if (OpNo < MI->getNumOperands() && MI->getOperand(OpNo).isMetadata()) 828 ++OpNo; 829 830 // All trailing operands must be implicit registers. 831 for (unsigned e = MI->getNumOperands(); OpNo < e; ++OpNo) { 832 const MachineOperand &MO = MI->getOperand(OpNo); 833 if (!MO.isReg() || !MO.isImplicit()) 834 report("Expected implicit register after groups", &MO, OpNo); 835 } 836 } 837 838 void MachineVerifier::visitMachineInstrBefore(const MachineInstr *MI) { 839 const MCInstrDesc &MCID = MI->getDesc(); 840 if (MI->getNumOperands() < MCID.getNumOperands()) { 841 report("Too few operands", MI); 842 errs() << MCID.getNumOperands() << " operands expected, but " 843 << MI->getNumOperands() << " given.\n"; 844 } 845 846 if (MI->isPHI() && MF->getProperties().hasProperty( 847 MachineFunctionProperties::Property::NoPHIs)) 848 report("Found PHI instruction with NoPHIs property set", MI); 849 850 // Check the tied operands. 851 if (MI->isInlineAsm()) 852 verifyInlineAsm(MI); 853 854 // Check the MachineMemOperands for basic consistency. 855 for (MachineInstr::mmo_iterator I = MI->memoperands_begin(), 856 E = MI->memoperands_end(); I != E; ++I) { 857 if ((*I)->isLoad() && !MI->mayLoad()) 858 report("Missing mayLoad flag", MI); 859 if ((*I)->isStore() && !MI->mayStore()) 860 report("Missing mayStore flag", MI); 861 } 862 863 // Debug values must not have a slot index. 864 // Other instructions must have one, unless they are inside a bundle. 865 if (LiveInts) { 866 bool mapped = !LiveInts->isNotInMIMap(*MI); 867 if (MI->isDebugValue()) { 868 if (mapped) 869 report("Debug instruction has a slot index", MI); 870 } else if (MI->isInsideBundle()) { 871 if (mapped) 872 report("Instruction inside bundle has a slot index", MI); 873 } else { 874 if (!mapped) 875 report("Missing slot index", MI); 876 } 877 } 878 879 // Check types. 880 if (isPreISelGenericOpcode(MCID.getOpcode())) { 881 if (isFunctionSelected) 882 report("Unexpected generic instruction in a Selected function", MI); 883 884 // Generic instructions specify equality constraints between some 885 // of their operands. Make sure these are consistent. 886 SmallVector<LLT, 4> Types; 887 for (unsigned i = 0; i < MCID.getNumOperands(); ++i) { 888 if (!MCID.OpInfo[i].isGenericType()) 889 continue; 890 size_t TypeIdx = MCID.OpInfo[i].getGenericTypeIndex(); 891 Types.resize(std::max(TypeIdx + 1, Types.size())); 892 893 LLT OpTy = MRI->getType(MI->getOperand(i).getReg()); 894 if (Types[TypeIdx].isValid() && Types[TypeIdx] != OpTy) 895 report("type mismatch in generic instruction", MI); 896 Types[TypeIdx] = OpTy; 897 } 898 } 899 900 // Generic opcodes must not have physical register operands. 901 if (isPreISelGenericOpcode(MCID.getOpcode())) { 902 for (auto &Op : MI->operands()) { 903 if (Op.isReg() && TargetRegisterInfo::isPhysicalRegister(Op.getReg())) 904 report("Generic instruction cannot have physical register", MI); 905 } 906 } 907 908 StringRef ErrorInfo; 909 if (!TII->verifyInstruction(*MI, ErrorInfo)) 910 report(ErrorInfo.data(), MI); 911 } 912 913 void 914 MachineVerifier::visitMachineOperand(const MachineOperand *MO, unsigned MONum) { 915 const MachineInstr *MI = MO->getParent(); 916 const MCInstrDesc &MCID = MI->getDesc(); 917 unsigned NumDefs = MCID.getNumDefs(); 918 if (MCID.getOpcode() == TargetOpcode::PATCHPOINT) 919 NumDefs = (MONum == 0 && MO->isReg()) ? NumDefs : 0; 920 921 // The first MCID.NumDefs operands must be explicit register defines 922 if (MONum < NumDefs) { 923 const MCOperandInfo &MCOI = MCID.OpInfo[MONum]; 924 if (!MO->isReg()) 925 report("Explicit definition must be a register", MO, MONum); 926 else if (!MO->isDef() && !MCOI.isOptionalDef()) 927 report("Explicit definition marked as use", MO, MONum); 928 else if (MO->isImplicit()) 929 report("Explicit definition marked as implicit", MO, MONum); 930 } else if (MONum < MCID.getNumOperands()) { 931 const MCOperandInfo &MCOI = MCID.OpInfo[MONum]; 932 // Don't check if it's the last operand in a variadic instruction. See, 933 // e.g., LDM_RET in the arm back end. 934 if (MO->isReg() && 935 !(MI->isVariadic() && MONum == MCID.getNumOperands()-1)) { 936 if (MO->isDef() && !MCOI.isOptionalDef()) 937 report("Explicit operand marked as def", MO, MONum); 938 if (MO->isImplicit()) 939 report("Explicit operand marked as implicit", MO, MONum); 940 } 941 942 int TiedTo = MCID.getOperandConstraint(MONum, MCOI::TIED_TO); 943 if (TiedTo != -1) { 944 if (!MO->isReg()) 945 report("Tied use must be a register", MO, MONum); 946 else if (!MO->isTied()) 947 report("Operand should be tied", MO, MONum); 948 else if (unsigned(TiedTo) != MI->findTiedOperandIdx(MONum)) 949 report("Tied def doesn't match MCInstrDesc", MO, MONum); 950 } else if (MO->isReg() && MO->isTied()) 951 report("Explicit operand should not be tied", MO, MONum); 952 } else { 953 // ARM adds %reg0 operands to indicate predicates. We'll allow that. 954 if (MO->isReg() && !MO->isImplicit() && !MI->isVariadic() && MO->getReg()) 955 report("Extra explicit operand on non-variadic instruction", MO, MONum); 956 } 957 958 switch (MO->getType()) { 959 case MachineOperand::MO_Register: { 960 const unsigned Reg = MO->getReg(); 961 if (!Reg) 962 return; 963 if (MRI->tracksLiveness() && !MI->isDebugValue()) 964 checkLiveness(MO, MONum); 965 966 // Verify the consistency of tied operands. 967 if (MO->isTied()) { 968 unsigned OtherIdx = MI->findTiedOperandIdx(MONum); 969 const MachineOperand &OtherMO = MI->getOperand(OtherIdx); 970 if (!OtherMO.isReg()) 971 report("Must be tied to a register", MO, MONum); 972 if (!OtherMO.isTied()) 973 report("Missing tie flags on tied operand", MO, MONum); 974 if (MI->findTiedOperandIdx(OtherIdx) != MONum) 975 report("Inconsistent tie links", MO, MONum); 976 if (MONum < MCID.getNumDefs()) { 977 if (OtherIdx < MCID.getNumOperands()) { 978 if (-1 == MCID.getOperandConstraint(OtherIdx, MCOI::TIED_TO)) 979 report("Explicit def tied to explicit use without tie constraint", 980 MO, MONum); 981 } else { 982 if (!OtherMO.isImplicit()) 983 report("Explicit def should be tied to implicit use", MO, MONum); 984 } 985 } 986 } 987 988 // Verify two-address constraints after leaving SSA form. 989 unsigned DefIdx; 990 if (!MRI->isSSA() && MO->isUse() && 991 MI->isRegTiedToDefOperand(MONum, &DefIdx) && 992 Reg != MI->getOperand(DefIdx).getReg()) 993 report("Two-address instruction operands must be identical", MO, MONum); 994 995 // Check register classes. 996 if (MONum < MCID.getNumOperands() && !MO->isImplicit()) { 997 unsigned SubIdx = MO->getSubReg(); 998 999 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 1000 if (SubIdx) { 1001 report("Illegal subregister index for physical register", MO, MONum); 1002 return; 1003 } 1004 if (const TargetRegisterClass *DRC = 1005 TII->getRegClass(MCID, MONum, TRI, *MF)) { 1006 if (!DRC->contains(Reg)) { 1007 report("Illegal physical register for instruction", MO, MONum); 1008 errs() << TRI->getName(Reg) << " is not a " 1009 << TRI->getRegClassName(DRC) << " register.\n"; 1010 } 1011 } 1012 } else { 1013 // Virtual register. 1014 const TargetRegisterClass *RC = MRI->getRegClassOrNull(Reg); 1015 if (!RC) { 1016 // This is a generic virtual register. 1017 1018 // If we're post-Select, we can't have gvregs anymore. 1019 if (isFunctionSelected) { 1020 report("Generic virtual register invalid in a Selected function", 1021 MO, MONum); 1022 return; 1023 } 1024 1025 // The gvreg must have a size and it must not have a SubIdx. 1026 LLT Ty = MRI->getType(Reg); 1027 if (!Ty.isValid()) { 1028 report("Generic virtual register must have a valid type", MO, 1029 MONum); 1030 return; 1031 } 1032 1033 const RegisterBank *RegBank = MRI->getRegBankOrNull(Reg); 1034 1035 // If we're post-RegBankSelect, the gvreg must have a bank. 1036 if (!RegBank && isFunctionRegBankSelected) { 1037 report("Generic virtual register must have a bank in a " 1038 "RegBankSelected function", 1039 MO, MONum); 1040 return; 1041 } 1042 1043 // Make sure the register fits into its register bank if any. 1044 if (RegBank && Ty.isValid() && 1045 RegBank->getSize() < Ty.getSizeInBits()) { 1046 report("Register bank is too small for virtual register", MO, 1047 MONum); 1048 errs() << "Register bank " << RegBank->getName() << " too small(" 1049 << RegBank->getSize() << ") to fit " << Ty.getSizeInBits() 1050 << "-bits\n"; 1051 return; 1052 } 1053 if (SubIdx) { 1054 report("Generic virtual register does not subregister index", MO, 1055 MONum); 1056 return; 1057 } 1058 break; 1059 } 1060 if (SubIdx) { 1061 const TargetRegisterClass *SRC = 1062 TRI->getSubClassWithSubReg(RC, SubIdx); 1063 if (!SRC) { 1064 report("Invalid subregister index for virtual register", MO, MONum); 1065 errs() << "Register class " << TRI->getRegClassName(RC) 1066 << " does not support subreg index " << SubIdx << "\n"; 1067 return; 1068 } 1069 if (RC != SRC) { 1070 report("Invalid register class for subregister index", MO, MONum); 1071 errs() << "Register class " << TRI->getRegClassName(RC) 1072 << " does not fully support subreg index " << SubIdx << "\n"; 1073 return; 1074 } 1075 } 1076 if (const TargetRegisterClass *DRC = 1077 TII->getRegClass(MCID, MONum, TRI, *MF)) { 1078 if (SubIdx) { 1079 const TargetRegisterClass *SuperRC = 1080 TRI->getLargestLegalSuperClass(RC, *MF); 1081 if (!SuperRC) { 1082 report("No largest legal super class exists.", MO, MONum); 1083 return; 1084 } 1085 DRC = TRI->getMatchingSuperRegClass(SuperRC, DRC, SubIdx); 1086 if (!DRC) { 1087 report("No matching super-reg register class.", MO, MONum); 1088 return; 1089 } 1090 } 1091 if (!RC->hasSuperClassEq(DRC)) { 1092 report("Illegal virtual register for instruction", MO, MONum); 1093 errs() << "Expected a " << TRI->getRegClassName(DRC) 1094 << " register, but got a " << TRI->getRegClassName(RC) 1095 << " register\n"; 1096 } 1097 } 1098 } 1099 } 1100 break; 1101 } 1102 1103 case MachineOperand::MO_RegisterMask: 1104 regMasks.push_back(MO->getRegMask()); 1105 break; 1106 1107 case MachineOperand::MO_MachineBasicBlock: 1108 if (MI->isPHI() && !MO->getMBB()->isSuccessor(MI->getParent())) 1109 report("PHI operand is not in the CFG", MO, MONum); 1110 break; 1111 1112 case MachineOperand::MO_FrameIndex: 1113 if (LiveStks && LiveStks->hasInterval(MO->getIndex()) && 1114 LiveInts && !LiveInts->isNotInMIMap(*MI)) { 1115 int FI = MO->getIndex(); 1116 LiveInterval &LI = LiveStks->getInterval(FI); 1117 SlotIndex Idx = LiveInts->getInstructionIndex(*MI); 1118 1119 bool stores = MI->mayStore(); 1120 bool loads = MI->mayLoad(); 1121 // For a memory-to-memory move, we need to check if the frame 1122 // index is used for storing or loading, by inspecting the 1123 // memory operands. 1124 if (stores && loads) { 1125 for (auto *MMO : MI->memoperands()) { 1126 const PseudoSourceValue *PSV = MMO->getPseudoValue(); 1127 if (PSV == nullptr) continue; 1128 const FixedStackPseudoSourceValue *Value = 1129 dyn_cast<FixedStackPseudoSourceValue>(PSV); 1130 if (Value == nullptr) continue; 1131 if (Value->getFrameIndex() != FI) continue; 1132 1133 if (MMO->isStore()) 1134 loads = false; 1135 else 1136 stores = false; 1137 break; 1138 } 1139 if (loads == stores) 1140 report("Missing fixed stack memoperand.", MI); 1141 } 1142 if (loads && !LI.liveAt(Idx.getRegSlot(true))) { 1143 report("Instruction loads from dead spill slot", MO, MONum); 1144 errs() << "Live stack: " << LI << '\n'; 1145 } 1146 if (stores && !LI.liveAt(Idx.getRegSlot())) { 1147 report("Instruction stores to dead spill slot", MO, MONum); 1148 errs() << "Live stack: " << LI << '\n'; 1149 } 1150 } 1151 break; 1152 1153 default: 1154 break; 1155 } 1156 } 1157 1158 void MachineVerifier::checkLivenessAtUse(const MachineOperand *MO, 1159 unsigned MONum, SlotIndex UseIdx, const LiveRange &LR, unsigned VRegOrUnit, 1160 LaneBitmask LaneMask) { 1161 LiveQueryResult LRQ = LR.Query(UseIdx); 1162 // Check if we have a segment at the use, note however that we only need one 1163 // live subregister range, the others may be dead. 1164 if (!LRQ.valueIn() && LaneMask == 0) { 1165 report("No live segment at use", MO, MONum); 1166 report_context_liverange(LR); 1167 report_context_vreg_regunit(VRegOrUnit); 1168 report_context(UseIdx); 1169 } 1170 if (MO->isKill() && !LRQ.isKill()) { 1171 report("Live range continues after kill flag", MO, MONum); 1172 report_context_liverange(LR); 1173 report_context_vreg_regunit(VRegOrUnit); 1174 if (LaneMask != 0) 1175 report_context_lanemask(LaneMask); 1176 report_context(UseIdx); 1177 } 1178 } 1179 1180 void MachineVerifier::checkLivenessAtDef(const MachineOperand *MO, 1181 unsigned MONum, SlotIndex DefIdx, const LiveRange &LR, unsigned VRegOrUnit, 1182 LaneBitmask LaneMask) { 1183 if (const VNInfo *VNI = LR.getVNInfoAt(DefIdx)) { 1184 assert(VNI && "NULL valno is not allowed"); 1185 if (VNI->def != DefIdx) { 1186 report("Inconsistent valno->def", MO, MONum); 1187 report_context_liverange(LR); 1188 report_context_vreg_regunit(VRegOrUnit); 1189 if (LaneMask != 0) 1190 report_context_lanemask(LaneMask); 1191 report_context(*VNI); 1192 report_context(DefIdx); 1193 } 1194 } else { 1195 report("No live segment at def", MO, MONum); 1196 report_context_liverange(LR); 1197 report_context_vreg_regunit(VRegOrUnit); 1198 if (LaneMask != 0) 1199 report_context_lanemask(LaneMask); 1200 report_context(DefIdx); 1201 } 1202 // Check that, if the dead def flag is present, LiveInts agree. 1203 if (MO->isDead()) { 1204 LiveQueryResult LRQ = LR.Query(DefIdx); 1205 if (!LRQ.isDeadDef()) { 1206 // In case of physregs we can have a non-dead definition on another 1207 // operand. 1208 bool otherDef = false; 1209 if (!TargetRegisterInfo::isVirtualRegister(VRegOrUnit)) { 1210 const MachineInstr &MI = *MO->getParent(); 1211 for (const MachineOperand &MO : MI.operands()) { 1212 if (!MO.isReg() || !MO.isDef() || MO.isDead()) 1213 continue; 1214 unsigned Reg = MO.getReg(); 1215 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) { 1216 if (*Units == VRegOrUnit) { 1217 otherDef = true; 1218 break; 1219 } 1220 } 1221 } 1222 } 1223 1224 if (!otherDef) { 1225 report("Live range continues after dead def flag", MO, MONum); 1226 report_context_liverange(LR); 1227 report_context_vreg_regunit(VRegOrUnit); 1228 if (LaneMask != 0) 1229 report_context_lanemask(LaneMask); 1230 } 1231 } 1232 } 1233 } 1234 1235 void MachineVerifier::checkLiveness(const MachineOperand *MO, unsigned MONum) { 1236 const MachineInstr *MI = MO->getParent(); 1237 const unsigned Reg = MO->getReg(); 1238 1239 // Both use and def operands can read a register. 1240 if (MO->readsReg()) { 1241 regsLiveInButUnused.erase(Reg); 1242 1243 if (MO->isKill()) 1244 addRegWithSubRegs(regsKilled, Reg); 1245 1246 // Check that LiveVars knows this kill. 1247 if (LiveVars && TargetRegisterInfo::isVirtualRegister(Reg) && 1248 MO->isKill()) { 1249 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg); 1250 if (!is_contained(VI.Kills, MI)) 1251 report("Kill missing from LiveVariables", MO, MONum); 1252 } 1253 1254 // Check LiveInts liveness and kill. 1255 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) { 1256 SlotIndex UseIdx = LiveInts->getInstructionIndex(*MI); 1257 // Check the cached regunit intervals. 1258 if (TargetRegisterInfo::isPhysicalRegister(Reg) && !isReserved(Reg)) { 1259 for (MCRegUnitIterator Units(Reg, TRI); Units.isValid(); ++Units) { 1260 if (const LiveRange *LR = LiveInts->getCachedRegUnit(*Units)) 1261 checkLivenessAtUse(MO, MONum, UseIdx, *LR, *Units); 1262 } 1263 } 1264 1265 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 1266 if (LiveInts->hasInterval(Reg)) { 1267 // This is a virtual register interval. 1268 const LiveInterval &LI = LiveInts->getInterval(Reg); 1269 checkLivenessAtUse(MO, MONum, UseIdx, LI, Reg); 1270 1271 if (LI.hasSubRanges() && !MO->isDef()) { 1272 unsigned SubRegIdx = MO->getSubReg(); 1273 LaneBitmask MOMask = SubRegIdx != 0 1274 ? TRI->getSubRegIndexLaneMask(SubRegIdx) 1275 : MRI->getMaxLaneMaskForVReg(Reg); 1276 LaneBitmask LiveInMask = 0; 1277 for (const LiveInterval::SubRange &SR : LI.subranges()) { 1278 if ((MOMask & SR.LaneMask) == 0) 1279 continue; 1280 checkLivenessAtUse(MO, MONum, UseIdx, SR, Reg, SR.LaneMask); 1281 LiveQueryResult LRQ = SR.Query(UseIdx); 1282 if (LRQ.valueIn()) 1283 LiveInMask |= SR.LaneMask; 1284 } 1285 // At least parts of the register has to be live at the use. 1286 if ((LiveInMask & MOMask) == 0) { 1287 report("No live subrange at use", MO, MONum); 1288 report_context(LI); 1289 report_context(UseIdx); 1290 } 1291 } 1292 } else { 1293 report("Virtual register has no live interval", MO, MONum); 1294 } 1295 } 1296 } 1297 1298 // Use of a dead register. 1299 if (!regsLive.count(Reg)) { 1300 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 1301 // Reserved registers may be used even when 'dead'. 1302 bool Bad = !isReserved(Reg); 1303 // We are fine if just any subregister has a defined value. 1304 if (Bad) { 1305 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); 1306 ++SubRegs) { 1307 if (regsLive.count(*SubRegs)) { 1308 Bad = false; 1309 break; 1310 } 1311 } 1312 } 1313 // If there is an additional implicit-use of a super register we stop 1314 // here. By definition we are fine if the super register is not 1315 // (completely) dead, if the complete super register is dead we will 1316 // get a report for its operand. 1317 if (Bad) { 1318 for (const MachineOperand &MOP : MI->uses()) { 1319 if (!MOP.isReg()) 1320 continue; 1321 if (!MOP.isImplicit()) 1322 continue; 1323 for (MCSubRegIterator SubRegs(MOP.getReg(), TRI); SubRegs.isValid(); 1324 ++SubRegs) { 1325 if (*SubRegs == Reg) { 1326 Bad = false; 1327 break; 1328 } 1329 } 1330 } 1331 } 1332 if (Bad) 1333 report("Using an undefined physical register", MO, MONum); 1334 } else if (MRI->def_empty(Reg)) { 1335 report("Reading virtual register without a def", MO, MONum); 1336 } else { 1337 BBInfo &MInfo = MBBInfoMap[MI->getParent()]; 1338 // We don't know which virtual registers are live in, so only complain 1339 // if vreg was killed in this MBB. Otherwise keep track of vregs that 1340 // must be live in. PHI instructions are handled separately. 1341 if (MInfo.regsKilled.count(Reg)) 1342 report("Using a killed virtual register", MO, MONum); 1343 else if (!MI->isPHI()) 1344 MInfo.vregsLiveIn.insert(std::make_pair(Reg, MI)); 1345 } 1346 } 1347 } 1348 1349 if (MO->isDef()) { 1350 // Register defined. 1351 // TODO: verify that earlyclobber ops are not used. 1352 if (MO->isDead()) 1353 addRegWithSubRegs(regsDead, Reg); 1354 else 1355 addRegWithSubRegs(regsDefined, Reg); 1356 1357 // Verify SSA form. 1358 if (MRI->isSSA() && TargetRegisterInfo::isVirtualRegister(Reg) && 1359 std::next(MRI->def_begin(Reg)) != MRI->def_end()) 1360 report("Multiple virtual register defs in SSA form", MO, MONum); 1361 1362 // Check LiveInts for a live segment, but only for virtual registers. 1363 if (LiveInts && !LiveInts->isNotInMIMap(*MI)) { 1364 SlotIndex DefIdx = LiveInts->getInstructionIndex(*MI); 1365 DefIdx = DefIdx.getRegSlot(MO->isEarlyClobber()); 1366 1367 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 1368 if (LiveInts->hasInterval(Reg)) { 1369 const LiveInterval &LI = LiveInts->getInterval(Reg); 1370 checkLivenessAtDef(MO, MONum, DefIdx, LI, Reg); 1371 1372 if (LI.hasSubRanges()) { 1373 unsigned SubRegIdx = MO->getSubReg(); 1374 LaneBitmask MOMask = SubRegIdx != 0 1375 ? TRI->getSubRegIndexLaneMask(SubRegIdx) 1376 : MRI->getMaxLaneMaskForVReg(Reg); 1377 for (const LiveInterval::SubRange &SR : LI.subranges()) { 1378 if ((SR.LaneMask & MOMask) == 0) 1379 continue; 1380 checkLivenessAtDef(MO, MONum, DefIdx, SR, Reg, SR.LaneMask); 1381 } 1382 } 1383 } else { 1384 report("Virtual register has no Live interval", MO, MONum); 1385 } 1386 } 1387 } 1388 } 1389 } 1390 1391 void MachineVerifier::visitMachineInstrAfter(const MachineInstr *MI) { 1392 } 1393 1394 // This function gets called after visiting all instructions in a bundle. The 1395 // argument points to the bundle header. 1396 // Normal stand-alone instructions are also considered 'bundles', and this 1397 // function is called for all of them. 1398 void MachineVerifier::visitMachineBundleAfter(const MachineInstr *MI) { 1399 BBInfo &MInfo = MBBInfoMap[MI->getParent()]; 1400 set_union(MInfo.regsKilled, regsKilled); 1401 set_subtract(regsLive, regsKilled); regsKilled.clear(); 1402 // Kill any masked registers. 1403 while (!regMasks.empty()) { 1404 const uint32_t *Mask = regMasks.pop_back_val(); 1405 for (RegSet::iterator I = regsLive.begin(), E = regsLive.end(); I != E; ++I) 1406 if (TargetRegisterInfo::isPhysicalRegister(*I) && 1407 MachineOperand::clobbersPhysReg(Mask, *I)) 1408 regsDead.push_back(*I); 1409 } 1410 set_subtract(regsLive, regsDead); regsDead.clear(); 1411 set_union(regsLive, regsDefined); regsDefined.clear(); 1412 } 1413 1414 void 1415 MachineVerifier::visitMachineBasicBlockAfter(const MachineBasicBlock *MBB) { 1416 MBBInfoMap[MBB].regsLiveOut = regsLive; 1417 regsLive.clear(); 1418 1419 if (Indexes) { 1420 SlotIndex stop = Indexes->getMBBEndIdx(MBB); 1421 if (!(stop > lastIndex)) { 1422 report("Block ends before last instruction index", MBB); 1423 errs() << "Block ends at " << stop 1424 << " last instruction was at " << lastIndex << '\n'; 1425 } 1426 lastIndex = stop; 1427 } 1428 } 1429 1430 // Calculate the largest possible vregsPassed sets. These are the registers that 1431 // can pass through an MBB live, but may not be live every time. It is assumed 1432 // that all vregsPassed sets are empty before the call. 1433 void MachineVerifier::calcRegsPassed() { 1434 // First push live-out regs to successors' vregsPassed. Remember the MBBs that 1435 // have any vregsPassed. 1436 SmallPtrSet<const MachineBasicBlock*, 8> todo; 1437 for (const auto &MBB : *MF) { 1438 BBInfo &MInfo = MBBInfoMap[&MBB]; 1439 if (!MInfo.reachable) 1440 continue; 1441 for (MachineBasicBlock::const_succ_iterator SuI = MBB.succ_begin(), 1442 SuE = MBB.succ_end(); SuI != SuE; ++SuI) { 1443 BBInfo &SInfo = MBBInfoMap[*SuI]; 1444 if (SInfo.addPassed(MInfo.regsLiveOut)) 1445 todo.insert(*SuI); 1446 } 1447 } 1448 1449 // Iteratively push vregsPassed to successors. This will converge to the same 1450 // final state regardless of DenseSet iteration order. 1451 while (!todo.empty()) { 1452 const MachineBasicBlock *MBB = *todo.begin(); 1453 todo.erase(MBB); 1454 BBInfo &MInfo = MBBInfoMap[MBB]; 1455 for (MachineBasicBlock::const_succ_iterator SuI = MBB->succ_begin(), 1456 SuE = MBB->succ_end(); SuI != SuE; ++SuI) { 1457 if (*SuI == MBB) 1458 continue; 1459 BBInfo &SInfo = MBBInfoMap[*SuI]; 1460 if (SInfo.addPassed(MInfo.vregsPassed)) 1461 todo.insert(*SuI); 1462 } 1463 } 1464 } 1465 1466 // Calculate the set of virtual registers that must be passed through each basic 1467 // block in order to satisfy the requirements of successor blocks. This is very 1468 // similar to calcRegsPassed, only backwards. 1469 void MachineVerifier::calcRegsRequired() { 1470 // First push live-in regs to predecessors' vregsRequired. 1471 SmallPtrSet<const MachineBasicBlock*, 8> todo; 1472 for (const auto &MBB : *MF) { 1473 BBInfo &MInfo = MBBInfoMap[&MBB]; 1474 for (MachineBasicBlock::const_pred_iterator PrI = MBB.pred_begin(), 1475 PrE = MBB.pred_end(); PrI != PrE; ++PrI) { 1476 BBInfo &PInfo = MBBInfoMap[*PrI]; 1477 if (PInfo.addRequired(MInfo.vregsLiveIn)) 1478 todo.insert(*PrI); 1479 } 1480 } 1481 1482 // Iteratively push vregsRequired to predecessors. This will converge to the 1483 // same final state regardless of DenseSet iteration order. 1484 while (!todo.empty()) { 1485 const MachineBasicBlock *MBB = *todo.begin(); 1486 todo.erase(MBB); 1487 BBInfo &MInfo = MBBInfoMap[MBB]; 1488 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(), 1489 PrE = MBB->pred_end(); PrI != PrE; ++PrI) { 1490 if (*PrI == MBB) 1491 continue; 1492 BBInfo &SInfo = MBBInfoMap[*PrI]; 1493 if (SInfo.addRequired(MInfo.vregsRequired)) 1494 todo.insert(*PrI); 1495 } 1496 } 1497 } 1498 1499 // Check PHI instructions at the beginning of MBB. It is assumed that 1500 // calcRegsPassed has been run so BBInfo::isLiveOut is valid. 1501 void MachineVerifier::checkPHIOps(const MachineBasicBlock *MBB) { 1502 SmallPtrSet<const MachineBasicBlock*, 8> seen; 1503 for (const auto &BBI : *MBB) { 1504 if (!BBI.isPHI()) 1505 break; 1506 seen.clear(); 1507 1508 for (unsigned i = 1, e = BBI.getNumOperands(); i != e; i += 2) { 1509 unsigned Reg = BBI.getOperand(i).getReg(); 1510 const MachineBasicBlock *Pre = BBI.getOperand(i + 1).getMBB(); 1511 if (!Pre->isSuccessor(MBB)) 1512 continue; 1513 seen.insert(Pre); 1514 BBInfo &PrInfo = MBBInfoMap[Pre]; 1515 if (PrInfo.reachable && !PrInfo.isLiveOut(Reg)) 1516 report("PHI operand is not live-out from predecessor", 1517 &BBI.getOperand(i), i); 1518 } 1519 1520 // Did we see all predecessors? 1521 for (MachineBasicBlock::const_pred_iterator PrI = MBB->pred_begin(), 1522 PrE = MBB->pred_end(); PrI != PrE; ++PrI) { 1523 if (!seen.count(*PrI)) { 1524 report("Missing PHI operand", &BBI); 1525 errs() << "BB#" << (*PrI)->getNumber() 1526 << " is a predecessor according to the CFG.\n"; 1527 } 1528 } 1529 } 1530 } 1531 1532 void MachineVerifier::visitMachineFunctionAfter() { 1533 calcRegsPassed(); 1534 1535 for (const auto &MBB : *MF) { 1536 BBInfo &MInfo = MBBInfoMap[&MBB]; 1537 1538 // Skip unreachable MBBs. 1539 if (!MInfo.reachable) 1540 continue; 1541 1542 checkPHIOps(&MBB); 1543 } 1544 1545 // Now check liveness info if available 1546 calcRegsRequired(); 1547 1548 // Check for killed virtual registers that should be live out. 1549 for (const auto &MBB : *MF) { 1550 BBInfo &MInfo = MBBInfoMap[&MBB]; 1551 for (RegSet::iterator 1552 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E; 1553 ++I) 1554 if (MInfo.regsKilled.count(*I)) { 1555 report("Virtual register killed in block, but needed live out.", &MBB); 1556 errs() << "Virtual register " << PrintReg(*I) 1557 << " is used after the block.\n"; 1558 } 1559 } 1560 1561 if (!MF->empty()) { 1562 BBInfo &MInfo = MBBInfoMap[&MF->front()]; 1563 for (RegSet::iterator 1564 I = MInfo.vregsRequired.begin(), E = MInfo.vregsRequired.end(); I != E; 1565 ++I) { 1566 report("Virtual register defs don't dominate all uses.", MF); 1567 report_context_vreg(*I); 1568 } 1569 } 1570 1571 if (LiveVars) 1572 verifyLiveVariables(); 1573 if (LiveInts) 1574 verifyLiveIntervals(); 1575 } 1576 1577 void MachineVerifier::verifyLiveVariables() { 1578 assert(LiveVars && "Don't call verifyLiveVariables without LiveVars"); 1579 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) { 1580 unsigned Reg = TargetRegisterInfo::index2VirtReg(i); 1581 LiveVariables::VarInfo &VI = LiveVars->getVarInfo(Reg); 1582 for (const auto &MBB : *MF) { 1583 BBInfo &MInfo = MBBInfoMap[&MBB]; 1584 1585 // Our vregsRequired should be identical to LiveVariables' AliveBlocks 1586 if (MInfo.vregsRequired.count(Reg)) { 1587 if (!VI.AliveBlocks.test(MBB.getNumber())) { 1588 report("LiveVariables: Block missing from AliveBlocks", &MBB); 1589 errs() << "Virtual register " << PrintReg(Reg) 1590 << " must be live through the block.\n"; 1591 } 1592 } else { 1593 if (VI.AliveBlocks.test(MBB.getNumber())) { 1594 report("LiveVariables: Block should not be in AliveBlocks", &MBB); 1595 errs() << "Virtual register " << PrintReg(Reg) 1596 << " is not needed live through the block.\n"; 1597 } 1598 } 1599 } 1600 } 1601 } 1602 1603 void MachineVerifier::verifyLiveIntervals() { 1604 assert(LiveInts && "Don't call verifyLiveIntervals without LiveInts"); 1605 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) { 1606 unsigned Reg = TargetRegisterInfo::index2VirtReg(i); 1607 1608 // Spilling and splitting may leave unused registers around. Skip them. 1609 if (MRI->reg_nodbg_empty(Reg)) 1610 continue; 1611 1612 if (!LiveInts->hasInterval(Reg)) { 1613 report("Missing live interval for virtual register", MF); 1614 errs() << PrintReg(Reg, TRI) << " still has defs or uses\n"; 1615 continue; 1616 } 1617 1618 const LiveInterval &LI = LiveInts->getInterval(Reg); 1619 assert(Reg == LI.reg && "Invalid reg to interval mapping"); 1620 verifyLiveInterval(LI); 1621 } 1622 1623 // Verify all the cached regunit intervals. 1624 for (unsigned i = 0, e = TRI->getNumRegUnits(); i != e; ++i) 1625 if (const LiveRange *LR = LiveInts->getCachedRegUnit(i)) 1626 verifyLiveRange(*LR, i); 1627 } 1628 1629 void MachineVerifier::verifyLiveRangeValue(const LiveRange &LR, 1630 const VNInfo *VNI, unsigned Reg, 1631 LaneBitmask LaneMask) { 1632 if (VNI->isUnused()) 1633 return; 1634 1635 const VNInfo *DefVNI = LR.getVNInfoAt(VNI->def); 1636 1637 if (!DefVNI) { 1638 report("Value not live at VNInfo def and not marked unused", MF); 1639 report_context(LR, Reg, LaneMask); 1640 report_context(*VNI); 1641 return; 1642 } 1643 1644 if (DefVNI != VNI) { 1645 report("Live segment at def has different VNInfo", MF); 1646 report_context(LR, Reg, LaneMask); 1647 report_context(*VNI); 1648 return; 1649 } 1650 1651 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(VNI->def); 1652 if (!MBB) { 1653 report("Invalid VNInfo definition index", MF); 1654 report_context(LR, Reg, LaneMask); 1655 report_context(*VNI); 1656 return; 1657 } 1658 1659 if (VNI->isPHIDef()) { 1660 if (VNI->def != LiveInts->getMBBStartIdx(MBB)) { 1661 report("PHIDef VNInfo is not defined at MBB start", MBB); 1662 report_context(LR, Reg, LaneMask); 1663 report_context(*VNI); 1664 } 1665 return; 1666 } 1667 1668 // Non-PHI def. 1669 const MachineInstr *MI = LiveInts->getInstructionFromIndex(VNI->def); 1670 if (!MI) { 1671 report("No instruction at VNInfo def index", MBB); 1672 report_context(LR, Reg, LaneMask); 1673 report_context(*VNI); 1674 return; 1675 } 1676 1677 if (Reg != 0) { 1678 bool hasDef = false; 1679 bool isEarlyClobber = false; 1680 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) { 1681 if (!MOI->isReg() || !MOI->isDef()) 1682 continue; 1683 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 1684 if (MOI->getReg() != Reg) 1685 continue; 1686 } else { 1687 if (!TargetRegisterInfo::isPhysicalRegister(MOI->getReg()) || 1688 !TRI->hasRegUnit(MOI->getReg(), Reg)) 1689 continue; 1690 } 1691 if (LaneMask != 0 && 1692 (TRI->getSubRegIndexLaneMask(MOI->getSubReg()) & LaneMask) == 0) 1693 continue; 1694 hasDef = true; 1695 if (MOI->isEarlyClobber()) 1696 isEarlyClobber = true; 1697 } 1698 1699 if (!hasDef) { 1700 report("Defining instruction does not modify register", MI); 1701 report_context(LR, Reg, LaneMask); 1702 report_context(*VNI); 1703 } 1704 1705 // Early clobber defs begin at USE slots, but other defs must begin at 1706 // DEF slots. 1707 if (isEarlyClobber) { 1708 if (!VNI->def.isEarlyClobber()) { 1709 report("Early clobber def must be at an early-clobber slot", MBB); 1710 report_context(LR, Reg, LaneMask); 1711 report_context(*VNI); 1712 } 1713 } else if (!VNI->def.isRegister()) { 1714 report("Non-PHI, non-early clobber def must be at a register slot", MBB); 1715 report_context(LR, Reg, LaneMask); 1716 report_context(*VNI); 1717 } 1718 } 1719 } 1720 1721 void MachineVerifier::verifyLiveRangeSegment(const LiveRange &LR, 1722 const LiveRange::const_iterator I, 1723 unsigned Reg, LaneBitmask LaneMask) 1724 { 1725 const LiveRange::Segment &S = *I; 1726 const VNInfo *VNI = S.valno; 1727 assert(VNI && "Live segment has no valno"); 1728 1729 if (VNI->id >= LR.getNumValNums() || VNI != LR.getValNumInfo(VNI->id)) { 1730 report("Foreign valno in live segment", MF); 1731 report_context(LR, Reg, LaneMask); 1732 report_context(S); 1733 report_context(*VNI); 1734 } 1735 1736 if (VNI->isUnused()) { 1737 report("Live segment valno is marked unused", MF); 1738 report_context(LR, Reg, LaneMask); 1739 report_context(S); 1740 } 1741 1742 const MachineBasicBlock *MBB = LiveInts->getMBBFromIndex(S.start); 1743 if (!MBB) { 1744 report("Bad start of live segment, no basic block", MF); 1745 report_context(LR, Reg, LaneMask); 1746 report_context(S); 1747 return; 1748 } 1749 SlotIndex MBBStartIdx = LiveInts->getMBBStartIdx(MBB); 1750 if (S.start != MBBStartIdx && S.start != VNI->def) { 1751 report("Live segment must begin at MBB entry or valno def", MBB); 1752 report_context(LR, Reg, LaneMask); 1753 report_context(S); 1754 } 1755 1756 const MachineBasicBlock *EndMBB = 1757 LiveInts->getMBBFromIndex(S.end.getPrevSlot()); 1758 if (!EndMBB) { 1759 report("Bad end of live segment, no basic block", MF); 1760 report_context(LR, Reg, LaneMask); 1761 report_context(S); 1762 return; 1763 } 1764 1765 // No more checks for live-out segments. 1766 if (S.end == LiveInts->getMBBEndIdx(EndMBB)) 1767 return; 1768 1769 // RegUnit intervals are allowed dead phis. 1770 if (!TargetRegisterInfo::isVirtualRegister(Reg) && VNI->isPHIDef() && 1771 S.start == VNI->def && S.end == VNI->def.getDeadSlot()) 1772 return; 1773 1774 // The live segment is ending inside EndMBB 1775 const MachineInstr *MI = 1776 LiveInts->getInstructionFromIndex(S.end.getPrevSlot()); 1777 if (!MI) { 1778 report("Live segment doesn't end at a valid instruction", EndMBB); 1779 report_context(LR, Reg, LaneMask); 1780 report_context(S); 1781 return; 1782 } 1783 1784 // The block slot must refer to a basic block boundary. 1785 if (S.end.isBlock()) { 1786 report("Live segment ends at B slot of an instruction", EndMBB); 1787 report_context(LR, Reg, LaneMask); 1788 report_context(S); 1789 } 1790 1791 if (S.end.isDead()) { 1792 // Segment ends on the dead slot. 1793 // That means there must be a dead def. 1794 if (!SlotIndex::isSameInstr(S.start, S.end)) { 1795 report("Live segment ending at dead slot spans instructions", EndMBB); 1796 report_context(LR, Reg, LaneMask); 1797 report_context(S); 1798 } 1799 } 1800 1801 // A live segment can only end at an early-clobber slot if it is being 1802 // redefined by an early-clobber def. 1803 if (S.end.isEarlyClobber()) { 1804 if (I+1 == LR.end() || (I+1)->start != S.end) { 1805 report("Live segment ending at early clobber slot must be " 1806 "redefined by an EC def in the same instruction", EndMBB); 1807 report_context(LR, Reg, LaneMask); 1808 report_context(S); 1809 } 1810 } 1811 1812 // The following checks only apply to virtual registers. Physreg liveness 1813 // is too weird to check. 1814 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 1815 // A live segment can end with either a redefinition, a kill flag on a 1816 // use, or a dead flag on a def. 1817 bool hasRead = false; 1818 bool hasSubRegDef = false; 1819 bool hasDeadDef = false; 1820 for (ConstMIBundleOperands MOI(*MI); MOI.isValid(); ++MOI) { 1821 if (!MOI->isReg() || MOI->getReg() != Reg) 1822 continue; 1823 unsigned Sub = MOI->getSubReg(); 1824 LaneBitmask SLM = Sub != 0 ? TRI->getSubRegIndexLaneMask(Sub) : ~0U; 1825 if (MOI->isDef()) { 1826 if (Sub != 0) { 1827 hasSubRegDef = true; 1828 // An operand vreg0:sub0<def> reads vreg0:sub1..n. Invert the lane 1829 // mask for subregister defs. Read-undef defs will be handled by 1830 // readsReg below. 1831 SLM = ~SLM; 1832 } 1833 if (MOI->isDead()) 1834 hasDeadDef = true; 1835 } 1836 if (LaneMask != 0 && !(LaneMask & SLM)) 1837 continue; 1838 if (MOI->readsReg()) 1839 hasRead = true; 1840 } 1841 if (S.end.isDead()) { 1842 // Make sure that the corresponding machine operand for a "dead" live 1843 // range has the dead flag. We cannot perform this check for subregister 1844 // liveranges as partially dead values are allowed. 1845 if (LaneMask == 0 && !hasDeadDef) { 1846 report("Instruction ending live segment on dead slot has no dead flag", 1847 MI); 1848 report_context(LR, Reg, LaneMask); 1849 report_context(S); 1850 } 1851 } else { 1852 if (!hasRead) { 1853 // When tracking subregister liveness, the main range must start new 1854 // values on partial register writes, even if there is no read. 1855 if (!MRI->shouldTrackSubRegLiveness(Reg) || LaneMask != 0 || 1856 !hasSubRegDef) { 1857 report("Instruction ending live segment doesn't read the register", 1858 MI); 1859 report_context(LR, Reg, LaneMask); 1860 report_context(S); 1861 } 1862 } 1863 } 1864 } 1865 1866 // Now check all the basic blocks in this live segment. 1867 MachineFunction::const_iterator MFI = MBB->getIterator(); 1868 // Is this live segment the beginning of a non-PHIDef VN? 1869 if (S.start == VNI->def && !VNI->isPHIDef()) { 1870 // Not live-in to any blocks. 1871 if (MBB == EndMBB) 1872 return; 1873 // Skip this block. 1874 ++MFI; 1875 } 1876 for (;;) { 1877 assert(LiveInts->isLiveInToMBB(LR, &*MFI)); 1878 // We don't know how to track physregs into a landing pad. 1879 if (!TargetRegisterInfo::isVirtualRegister(Reg) && 1880 MFI->isEHPad()) { 1881 if (&*MFI == EndMBB) 1882 break; 1883 ++MFI; 1884 continue; 1885 } 1886 1887 // Is VNI a PHI-def in the current block? 1888 bool IsPHI = VNI->isPHIDef() && 1889 VNI->def == LiveInts->getMBBStartIdx(&*MFI); 1890 1891 // Check that VNI is live-out of all predecessors. 1892 for (MachineBasicBlock::const_pred_iterator PI = MFI->pred_begin(), 1893 PE = MFI->pred_end(); PI != PE; ++PI) { 1894 SlotIndex PEnd = LiveInts->getMBBEndIdx(*PI); 1895 const VNInfo *PVNI = LR.getVNInfoBefore(PEnd); 1896 1897 // All predecessors must have a live-out value if this is not a 1898 // subregister liverange. 1899 if (!PVNI && LaneMask == 0) { 1900 report("Register not marked live out of predecessor", *PI); 1901 report_context(LR, Reg, LaneMask); 1902 report_context(*VNI); 1903 errs() << " live into BB#" << MFI->getNumber() 1904 << '@' << LiveInts->getMBBStartIdx(&*MFI) << ", not live before " 1905 << PEnd << '\n'; 1906 continue; 1907 } 1908 1909 // Only PHI-defs can take different predecessor values. 1910 if (!IsPHI && PVNI != VNI) { 1911 report("Different value live out of predecessor", *PI); 1912 report_context(LR, Reg, LaneMask); 1913 errs() << "Valno #" << PVNI->id << " live out of BB#" 1914 << (*PI)->getNumber() << '@' << PEnd << "\nValno #" << VNI->id 1915 << " live into BB#" << MFI->getNumber() << '@' 1916 << LiveInts->getMBBStartIdx(&*MFI) << '\n'; 1917 } 1918 } 1919 if (&*MFI == EndMBB) 1920 break; 1921 ++MFI; 1922 } 1923 } 1924 1925 void MachineVerifier::verifyLiveRange(const LiveRange &LR, unsigned Reg, 1926 LaneBitmask LaneMask) { 1927 for (const VNInfo *VNI : LR.valnos) 1928 verifyLiveRangeValue(LR, VNI, Reg, LaneMask); 1929 1930 for (LiveRange::const_iterator I = LR.begin(), E = LR.end(); I != E; ++I) 1931 verifyLiveRangeSegment(LR, I, Reg, LaneMask); 1932 } 1933 1934 void MachineVerifier::verifyLiveInterval(const LiveInterval &LI) { 1935 unsigned Reg = LI.reg; 1936 assert(TargetRegisterInfo::isVirtualRegister(Reg)); 1937 verifyLiveRange(LI, Reg); 1938 1939 LaneBitmask Mask = 0; 1940 LaneBitmask MaxMask = MRI->getMaxLaneMaskForVReg(Reg); 1941 for (const LiveInterval::SubRange &SR : LI.subranges()) { 1942 if ((Mask & SR.LaneMask) != 0) { 1943 report("Lane masks of sub ranges overlap in live interval", MF); 1944 report_context(LI); 1945 } 1946 if ((SR.LaneMask & ~MaxMask) != 0) { 1947 report("Subrange lanemask is invalid", MF); 1948 report_context(LI); 1949 } 1950 if (SR.empty()) { 1951 report("Subrange must not be empty", MF); 1952 report_context(SR, LI.reg, SR.LaneMask); 1953 } 1954 Mask |= SR.LaneMask; 1955 verifyLiveRange(SR, LI.reg, SR.LaneMask); 1956 if (!LI.covers(SR)) { 1957 report("A Subrange is not covered by the main range", MF); 1958 report_context(LI); 1959 } 1960 } 1961 1962 // Check the LI only has one connected component. 1963 ConnectedVNInfoEqClasses ConEQ(*LiveInts); 1964 unsigned NumComp = ConEQ.Classify(LI); 1965 if (NumComp > 1) { 1966 report("Multiple connected components in live interval", MF); 1967 report_context(LI); 1968 for (unsigned comp = 0; comp != NumComp; ++comp) { 1969 errs() << comp << ": valnos"; 1970 for (LiveInterval::const_vni_iterator I = LI.vni_begin(), 1971 E = LI.vni_end(); I!=E; ++I) 1972 if (comp == ConEQ.getEqClass(*I)) 1973 errs() << ' ' << (*I)->id; 1974 errs() << '\n'; 1975 } 1976 } 1977 } 1978 1979 namespace { 1980 // FrameSetup and FrameDestroy can have zero adjustment, so using a single 1981 // integer, we can't tell whether it is a FrameSetup or FrameDestroy if the 1982 // value is zero. 1983 // We use a bool plus an integer to capture the stack state. 1984 struct StackStateOfBB { 1985 StackStateOfBB() : EntryValue(0), ExitValue(0), EntryIsSetup(false), 1986 ExitIsSetup(false) { } 1987 StackStateOfBB(int EntryVal, int ExitVal, bool EntrySetup, bool ExitSetup) : 1988 EntryValue(EntryVal), ExitValue(ExitVal), EntryIsSetup(EntrySetup), 1989 ExitIsSetup(ExitSetup) { } 1990 // Can be negative, which means we are setting up a frame. 1991 int EntryValue; 1992 int ExitValue; 1993 bool EntryIsSetup; 1994 bool ExitIsSetup; 1995 }; 1996 } 1997 1998 /// Make sure on every path through the CFG, a FrameSetup <n> is always followed 1999 /// by a FrameDestroy <n>, stack adjustments are identical on all 2000 /// CFG edges to a merge point, and frame is destroyed at end of a return block. 2001 void MachineVerifier::verifyStackFrame() { 2002 unsigned FrameSetupOpcode = TII->getCallFrameSetupOpcode(); 2003 unsigned FrameDestroyOpcode = TII->getCallFrameDestroyOpcode(); 2004 2005 SmallVector<StackStateOfBB, 8> SPState; 2006 SPState.resize(MF->getNumBlockIDs()); 2007 df_iterator_default_set<const MachineBasicBlock*> Reachable; 2008 2009 // Visit the MBBs in DFS order. 2010 for (df_ext_iterator<const MachineFunction*, 2011 df_iterator_default_set<const MachineBasicBlock*> > 2012 DFI = df_ext_begin(MF, Reachable), DFE = df_ext_end(MF, Reachable); 2013 DFI != DFE; ++DFI) { 2014 const MachineBasicBlock *MBB = *DFI; 2015 2016 StackStateOfBB BBState; 2017 // Check the exit state of the DFS stack predecessor. 2018 if (DFI.getPathLength() >= 2) { 2019 const MachineBasicBlock *StackPred = DFI.getPath(DFI.getPathLength() - 2); 2020 assert(Reachable.count(StackPred) && 2021 "DFS stack predecessor is already visited.\n"); 2022 BBState.EntryValue = SPState[StackPred->getNumber()].ExitValue; 2023 BBState.EntryIsSetup = SPState[StackPred->getNumber()].ExitIsSetup; 2024 BBState.ExitValue = BBState.EntryValue; 2025 BBState.ExitIsSetup = BBState.EntryIsSetup; 2026 } 2027 2028 // Update stack state by checking contents of MBB. 2029 for (const auto &I : *MBB) { 2030 if (I.getOpcode() == FrameSetupOpcode) { 2031 // The first operand of a FrameOpcode should be i32. 2032 int Size = I.getOperand(0).getImm(); 2033 assert(Size >= 0 && 2034 "Value should be non-negative in FrameSetup and FrameDestroy.\n"); 2035 2036 if (BBState.ExitIsSetup) 2037 report("FrameSetup is after another FrameSetup", &I); 2038 BBState.ExitValue -= Size; 2039 BBState.ExitIsSetup = true; 2040 } 2041 2042 if (I.getOpcode() == FrameDestroyOpcode) { 2043 // The first operand of a FrameOpcode should be i32. 2044 int Size = I.getOperand(0).getImm(); 2045 assert(Size >= 0 && 2046 "Value should be non-negative in FrameSetup and FrameDestroy.\n"); 2047 2048 if (!BBState.ExitIsSetup) 2049 report("FrameDestroy is not after a FrameSetup", &I); 2050 int AbsSPAdj = BBState.ExitValue < 0 ? -BBState.ExitValue : 2051 BBState.ExitValue; 2052 if (BBState.ExitIsSetup && AbsSPAdj != Size) { 2053 report("FrameDestroy <n> is after FrameSetup <m>", &I); 2054 errs() << "FrameDestroy <" << Size << "> is after FrameSetup <" 2055 << AbsSPAdj << ">.\n"; 2056 } 2057 BBState.ExitValue += Size; 2058 BBState.ExitIsSetup = false; 2059 } 2060 } 2061 SPState[MBB->getNumber()] = BBState; 2062 2063 // Make sure the exit state of any predecessor is consistent with the entry 2064 // state. 2065 for (MachineBasicBlock::const_pred_iterator I = MBB->pred_begin(), 2066 E = MBB->pred_end(); I != E; ++I) { 2067 if (Reachable.count(*I) && 2068 (SPState[(*I)->getNumber()].ExitValue != BBState.EntryValue || 2069 SPState[(*I)->getNumber()].ExitIsSetup != BBState.EntryIsSetup)) { 2070 report("The exit stack state of a predecessor is inconsistent.", MBB); 2071 errs() << "Predecessor BB#" << (*I)->getNumber() << " has exit state (" 2072 << SPState[(*I)->getNumber()].ExitValue << ", " 2073 << SPState[(*I)->getNumber()].ExitIsSetup 2074 << "), while BB#" << MBB->getNumber() << " has entry state (" 2075 << BBState.EntryValue << ", " << BBState.EntryIsSetup << ").\n"; 2076 } 2077 } 2078 2079 // Make sure the entry state of any successor is consistent with the exit 2080 // state. 2081 for (MachineBasicBlock::const_succ_iterator I = MBB->succ_begin(), 2082 E = MBB->succ_end(); I != E; ++I) { 2083 if (Reachable.count(*I) && 2084 (SPState[(*I)->getNumber()].EntryValue != BBState.ExitValue || 2085 SPState[(*I)->getNumber()].EntryIsSetup != BBState.ExitIsSetup)) { 2086 report("The entry stack state of a successor is inconsistent.", MBB); 2087 errs() << "Successor BB#" << (*I)->getNumber() << " has entry state (" 2088 << SPState[(*I)->getNumber()].EntryValue << ", " 2089 << SPState[(*I)->getNumber()].EntryIsSetup 2090 << "), while BB#" << MBB->getNumber() << " has exit state (" 2091 << BBState.ExitValue << ", " << BBState.ExitIsSetup << ").\n"; 2092 } 2093 } 2094 2095 // Make sure a basic block with return ends with zero stack adjustment. 2096 if (!MBB->empty() && MBB->back().isReturn()) { 2097 if (BBState.ExitIsSetup) 2098 report("A return block ends with a FrameSetup.", MBB); 2099 if (BBState.ExitValue) 2100 report("A return block ends with a nonzero stack adjustment.", MBB); 2101 } 2102 } 2103 } 2104