1 //===- RegisterScavenging.cpp - Machine register scavenging ---------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 /// \file 10 /// This file implements the machine register scavenger. It can provide 11 /// information, such as unused registers, at any point in a machine basic 12 /// block. It also provides a mechanism to make registers available by evicting 13 /// them to spill slots. 14 // 15 //===----------------------------------------------------------------------===// 16 17 #include "llvm/CodeGen/RegisterScavenging.h" 18 #include "llvm/ADT/ArrayRef.h" 19 #include "llvm/ADT/BitVector.h" 20 #include "llvm/ADT/SmallVector.h" 21 #include "llvm/ADT/Statistic.h" 22 #include "llvm/CodeGen/LiveRegUnits.h" 23 #include "llvm/CodeGen/MachineBasicBlock.h" 24 #include "llvm/CodeGen/MachineFrameInfo.h" 25 #include "llvm/CodeGen/MachineFunction.h" 26 #include "llvm/CodeGen/MachineFunctionPass.h" 27 #include "llvm/CodeGen/MachineInstr.h" 28 #include "llvm/CodeGen/MachineOperand.h" 29 #include "llvm/CodeGen/MachineRegisterInfo.h" 30 #include "llvm/CodeGen/TargetFrameLowering.h" 31 #include "llvm/CodeGen/TargetInstrInfo.h" 32 #include "llvm/CodeGen/TargetRegisterInfo.h" 33 #include "llvm/CodeGen/TargetSubtargetInfo.h" 34 #include "llvm/InitializePasses.h" 35 #include "llvm/MC/MCRegisterInfo.h" 36 #include "llvm/Pass.h" 37 #include "llvm/Support/Debug.h" 38 #include "llvm/Support/ErrorHandling.h" 39 #include "llvm/Support/raw_ostream.h" 40 #include <algorithm> 41 #include <cassert> 42 #include <iterator> 43 #include <limits> 44 #include <string> 45 #include <utility> 46 47 using namespace llvm; 48 49 #define DEBUG_TYPE "reg-scavenging" 50 51 STATISTIC(NumScavengedRegs, "Number of frame index regs scavenged"); 52 53 void RegScavenger::setRegUsed(Register Reg, LaneBitmask LaneMask) { 54 LiveUnits.addRegMasked(Reg, LaneMask); 55 } 56 57 void RegScavenger::init(MachineBasicBlock &MBB) { 58 MachineFunction &MF = *MBB.getParent(); 59 TII = MF.getSubtarget().getInstrInfo(); 60 TRI = MF.getSubtarget().getRegisterInfo(); 61 MRI = &MF.getRegInfo(); 62 LiveUnits.init(*TRI); 63 64 assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) && 65 "Target changed?"); 66 67 // Self-initialize. 68 if (!this->MBB) { 69 NumRegUnits = TRI->getNumRegUnits(); 70 KillRegUnits.resize(NumRegUnits); 71 DefRegUnits.resize(NumRegUnits); 72 TmpRegUnits.resize(NumRegUnits); 73 } 74 this->MBB = &MBB; 75 76 for (ScavengedInfo &SI : Scavenged) { 77 SI.Reg = 0; 78 SI.Restore = nullptr; 79 } 80 81 Tracking = false; 82 } 83 84 void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) { 85 init(MBB); 86 LiveUnits.addLiveIns(MBB); 87 } 88 89 void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) { 90 init(MBB); 91 LiveUnits.addLiveOuts(MBB); 92 93 // Move internal iterator at the last instruction of the block. 94 if (MBB.begin() != MBB.end()) { 95 MBBI = std::prev(MBB.end()); 96 Tracking = true; 97 } 98 } 99 100 void RegScavenger::addRegUnits(BitVector &BV, MCRegister Reg) { 101 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI) 102 BV.set(*RUI); 103 } 104 105 void RegScavenger::removeRegUnits(BitVector &BV, MCRegister Reg) { 106 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI) 107 BV.reset(*RUI); 108 } 109 110 void RegScavenger::determineKillsAndDefs() { 111 assert(Tracking && "Must be tracking to determine kills and defs"); 112 113 MachineInstr &MI = *MBBI; 114 assert(!MI.isDebugInstr() && "Debug values have no kills or defs"); 115 116 // Find out which registers are early clobbered, killed, defined, and marked 117 // def-dead in this instruction. 118 KillRegUnits.reset(); 119 DefRegUnits.reset(); 120 for (const MachineOperand &MO : MI.operands()) { 121 if (MO.isRegMask()) { 122 TmpRegUnits.clear(); 123 for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; ++RU) { 124 for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); ++RURI) { 125 if (MO.clobbersPhysReg(*RURI)) { 126 TmpRegUnits.set(RU); 127 break; 128 } 129 } 130 } 131 132 // Apply the mask. 133 KillRegUnits |= TmpRegUnits; 134 } 135 if (!MO.isReg()) 136 continue; 137 if (!MO.getReg().isPhysical() || isReserved(MO.getReg())) 138 continue; 139 MCRegister Reg = MO.getReg().asMCReg(); 140 141 if (MO.isUse()) { 142 // Ignore undef uses. 143 if (MO.isUndef()) 144 continue; 145 if (MO.isKill()) 146 addRegUnits(KillRegUnits, Reg); 147 } else { 148 assert(MO.isDef()); 149 if (MO.isDead()) 150 addRegUnits(KillRegUnits, Reg); 151 else 152 addRegUnits(DefRegUnits, Reg); 153 } 154 } 155 } 156 157 void RegScavenger::forward() { 158 // Move ptr forward. 159 if (!Tracking) { 160 MBBI = MBB->begin(); 161 Tracking = true; 162 } else { 163 assert(MBBI != MBB->end() && "Already past the end of the basic block!"); 164 MBBI = std::next(MBBI); 165 } 166 assert(MBBI != MBB->end() && "Already at the end of the basic block!"); 167 168 MachineInstr &MI = *MBBI; 169 170 for (SmallVectorImpl<ScavengedInfo>::iterator I = Scavenged.begin(), 171 IE = Scavenged.end(); I != IE; ++I) { 172 if (I->Restore != &MI) 173 continue; 174 175 I->Reg = 0; 176 I->Restore = nullptr; 177 } 178 179 if (MI.isDebugInstr()) 180 return; 181 182 determineKillsAndDefs(); 183 184 // Verify uses and defs. 185 #ifndef NDEBUG 186 for (const MachineOperand &MO : MI.operands()) { 187 if (!MO.isReg()) 188 continue; 189 Register Reg = MO.getReg(); 190 if (!Register::isPhysicalRegister(Reg) || isReserved(Reg)) 191 continue; 192 if (MO.isUse()) { 193 if (MO.isUndef()) 194 continue; 195 if (!isRegUsed(Reg)) { 196 // Check if it's partial live: e.g. 197 // D0 = insert_subreg undef D0, S0 198 // ... D0 199 // The problem is the insert_subreg could be eliminated. The use of 200 // D0 is using a partially undef value. This is not *incorrect* since 201 // S1 is can be freely clobbered. 202 // Ideally we would like a way to model this, but leaving the 203 // insert_subreg around causes both correctness and performance issues. 204 bool SubUsed = false; 205 for (const MCPhysReg &SubReg : TRI->subregs(Reg)) 206 if (isRegUsed(SubReg)) { 207 SubUsed = true; 208 break; 209 } 210 bool SuperUsed = false; 211 for (MCSuperRegIterator SR(Reg, TRI); SR.isValid(); ++SR) { 212 if (isRegUsed(*SR)) { 213 SuperUsed = true; 214 break; 215 } 216 } 217 if (!SubUsed && !SuperUsed) { 218 MBB->getParent()->verify(nullptr, "In Register Scavenger"); 219 llvm_unreachable("Using an undefined register!"); 220 } 221 (void)SubUsed; 222 (void)SuperUsed; 223 } 224 } else { 225 assert(MO.isDef()); 226 #if 0 227 // FIXME: Enable this once we've figured out how to correctly transfer 228 // implicit kills during codegen passes like the coalescer. 229 assert((KillRegs.test(Reg) || isUnused(Reg) || 230 isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) && 231 "Re-defining a live register!"); 232 #endif 233 } 234 } 235 #endif // NDEBUG 236 237 // Commit the changes. 238 setUnused(KillRegUnits); 239 setUsed(DefRegUnits); 240 } 241 242 void RegScavenger::backward() { 243 assert(Tracking && "Must be tracking to determine kills and defs"); 244 245 const MachineInstr &MI = *MBBI; 246 LiveUnits.stepBackward(MI); 247 248 // Expire scavenge spill frameindex uses. 249 for (ScavengedInfo &I : Scavenged) { 250 if (I.Restore == &MI) { 251 I.Reg = 0; 252 I.Restore = nullptr; 253 } 254 } 255 256 if (MBBI == MBB->begin()) { 257 MBBI = MachineBasicBlock::iterator(nullptr); 258 Tracking = false; 259 } else 260 --MBBI; 261 } 262 263 bool RegScavenger::isRegUsed(Register Reg, bool includeReserved) const { 264 if (isReserved(Reg)) 265 return includeReserved; 266 return !LiveUnits.available(Reg); 267 } 268 269 Register RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const { 270 for (Register Reg : *RC) { 271 if (!isRegUsed(Reg)) { 272 LLVM_DEBUG(dbgs() << "Scavenger found unused reg: " << printReg(Reg, TRI) 273 << "\n"); 274 return Reg; 275 } 276 } 277 return 0; 278 } 279 280 BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) { 281 BitVector Mask(TRI->getNumRegs()); 282 for (Register Reg : *RC) 283 if (!isRegUsed(Reg)) 284 Mask.set(Reg); 285 return Mask; 286 } 287 288 Register RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI, 289 BitVector &Candidates, 290 unsigned InstrLimit, 291 MachineBasicBlock::iterator &UseMI) { 292 int Survivor = Candidates.find_first(); 293 assert(Survivor > 0 && "No candidates for scavenging"); 294 295 MachineBasicBlock::iterator ME = MBB->getFirstTerminator(); 296 assert(StartMI != ME && "MI already at terminator"); 297 MachineBasicBlock::iterator RestorePointMI = StartMI; 298 MachineBasicBlock::iterator MI = StartMI; 299 300 bool inVirtLiveRange = false; 301 for (++MI; InstrLimit > 0 && MI != ME; ++MI, --InstrLimit) { 302 if (MI->isDebugInstr()) { 303 ++InstrLimit; // Don't count debug instructions 304 continue; 305 } 306 bool isVirtKillInsn = false; 307 bool isVirtDefInsn = false; 308 // Remove any candidates touched by instruction. 309 for (const MachineOperand &MO : MI->operands()) { 310 if (MO.isRegMask()) 311 Candidates.clearBitsNotInMask(MO.getRegMask()); 312 if (!MO.isReg() || MO.isUndef() || !MO.getReg()) 313 continue; 314 if (Register::isVirtualRegister(MO.getReg())) { 315 if (MO.isDef()) 316 isVirtDefInsn = true; 317 else if (MO.isKill()) 318 isVirtKillInsn = true; 319 continue; 320 } 321 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI) 322 Candidates.reset(*AI); 323 } 324 // If we're not in a virtual reg's live range, this is a valid 325 // restore point. 326 if (!inVirtLiveRange) RestorePointMI = MI; 327 328 // Update whether we're in the live range of a virtual register 329 if (isVirtKillInsn) inVirtLiveRange = false; 330 if (isVirtDefInsn) inVirtLiveRange = true; 331 332 // Was our survivor untouched by this instruction? 333 if (Candidates.test(Survivor)) 334 continue; 335 336 // All candidates gone? 337 if (Candidates.none()) 338 break; 339 340 Survivor = Candidates.find_first(); 341 } 342 // If we ran off the end, that's where we want to restore. 343 if (MI == ME) RestorePointMI = ME; 344 assert(RestorePointMI != StartMI && 345 "No available scavenger restore location!"); 346 347 // We ran out of candidates, so stop the search. 348 UseMI = RestorePointMI; 349 return Survivor; 350 } 351 352 /// Given the bitvector \p Available of free register units at position 353 /// \p From. Search backwards to find a register that is part of \p 354 /// Candidates and not used/clobbered until the point \p To. If there is 355 /// multiple candidates continue searching and pick the one that is not used/ 356 /// clobbered for the longest time. 357 /// Returns the register and the earliest position we know it to be free or 358 /// the position MBB.end() if no register is available. 359 static std::pair<MCPhysReg, MachineBasicBlock::iterator> 360 findSurvivorBackwards(const MachineRegisterInfo &MRI, 361 MachineBasicBlock::iterator From, MachineBasicBlock::iterator To, 362 const LiveRegUnits &LiveOut, ArrayRef<MCPhysReg> AllocationOrder, 363 bool RestoreAfter) { 364 bool FoundTo = false; 365 MCPhysReg Survivor = 0; 366 MachineBasicBlock::iterator Pos; 367 MachineBasicBlock &MBB = *From->getParent(); 368 unsigned InstrLimit = 25; 369 unsigned InstrCountDown = InstrLimit; 370 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo(); 371 LiveRegUnits Used(TRI); 372 373 for (MachineBasicBlock::iterator I = From;; --I) { 374 const MachineInstr &MI = *I; 375 376 Used.accumulate(MI); 377 378 if (I == To) { 379 // See if one of the registers in RC wasn't used so far. 380 for (MCPhysReg Reg : AllocationOrder) { 381 if (!MRI.isReserved(Reg) && Used.available(Reg) && 382 LiveOut.available(Reg)) 383 return std::make_pair(Reg, MBB.end()); 384 } 385 // Otherwise we will continue up to InstrLimit instructions to find 386 // the register which is not defined/used for the longest time. 387 FoundTo = true; 388 Pos = To; 389 // Note: It was fine so far to start our search at From, however now that 390 // we have to spill, and can only place the restore after From then 391 // add the regs used/defed by std::next(From) to the set. 392 if (RestoreAfter) 393 Used.accumulate(*std::next(From)); 394 } 395 if (FoundTo) { 396 if (Survivor == 0 || !Used.available(Survivor)) { 397 MCPhysReg AvilableReg = 0; 398 for (MCPhysReg Reg : AllocationOrder) { 399 if (!MRI.isReserved(Reg) && Used.available(Reg)) { 400 AvilableReg = Reg; 401 break; 402 } 403 } 404 if (AvilableReg == 0) 405 break; 406 Survivor = AvilableReg; 407 } 408 if (--InstrCountDown == 0) 409 break; 410 411 // Keep searching when we find a vreg since the spilled register will 412 // be usefull for this other vreg as well later. 413 bool FoundVReg = false; 414 for (const MachineOperand &MO : MI.operands()) { 415 if (MO.isReg() && Register::isVirtualRegister(MO.getReg())) { 416 FoundVReg = true; 417 break; 418 } 419 } 420 if (FoundVReg) { 421 InstrCountDown = InstrLimit; 422 Pos = I; 423 } 424 if (I == MBB.begin()) 425 break; 426 } 427 } 428 429 return std::make_pair(Survivor, Pos); 430 } 431 432 static unsigned getFrameIndexOperandNum(MachineInstr &MI) { 433 unsigned i = 0; 434 while (!MI.getOperand(i).isFI()) { 435 ++i; 436 assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!"); 437 } 438 return i; 439 } 440 441 RegScavenger::ScavengedInfo & 442 RegScavenger::spill(Register Reg, const TargetRegisterClass &RC, int SPAdj, 443 MachineBasicBlock::iterator Before, 444 MachineBasicBlock::iterator &UseMI) { 445 // Find an available scavenging slot with size and alignment matching 446 // the requirements of the class RC. 447 const MachineFunction &MF = *Before->getMF(); 448 const MachineFrameInfo &MFI = MF.getFrameInfo(); 449 unsigned NeedSize = TRI->getSpillSize(RC); 450 Align NeedAlign = TRI->getSpillAlign(RC); 451 452 unsigned SI = Scavenged.size(), Diff = std::numeric_limits<unsigned>::max(); 453 int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd(); 454 for (unsigned I = 0; I < Scavenged.size(); ++I) { 455 if (Scavenged[I].Reg != 0) 456 continue; 457 // Verify that this slot is valid for this register. 458 int FI = Scavenged[I].FrameIndex; 459 if (FI < FIB || FI >= FIE) 460 continue; 461 unsigned S = MFI.getObjectSize(FI); 462 Align A = MFI.getObjectAlign(FI); 463 if (NeedSize > S || NeedAlign > A) 464 continue; 465 // Avoid wasting slots with large size and/or large alignment. Pick one 466 // that is the best fit for this register class (in street metric). 467 // Picking a larger slot than necessary could happen if a slot for a 468 // larger register is reserved before a slot for a smaller one. When 469 // trying to spill a smaller register, the large slot would be found 470 // first, thus making it impossible to spill the larger register later. 471 unsigned D = (S - NeedSize) + (A.value() - NeedAlign.value()); 472 if (D < Diff) { 473 SI = I; 474 Diff = D; 475 } 476 } 477 478 if (SI == Scavenged.size()) { 479 // We need to scavenge a register but have no spill slot, the target 480 // must know how to do it (if not, we'll assert below). 481 Scavenged.push_back(ScavengedInfo(FIE)); 482 } 483 484 // Avoid infinite regress 485 Scavenged[SI].Reg = Reg; 486 487 // If the target knows how to save/restore the register, let it do so; 488 // otherwise, use the emergency stack spill slot. 489 if (!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)) { 490 // Spill the scavenged register before \p Before. 491 int FI = Scavenged[SI].FrameIndex; 492 if (FI < FIB || FI >= FIE) { 493 std::string Msg = std::string("Error while trying to spill ") + 494 TRI->getName(Reg) + " from class " + TRI->getRegClassName(&RC) + 495 ": Cannot scavenge register without an emergency spill slot!"; 496 report_fatal_error(Msg.c_str()); 497 } 498 TII->storeRegToStackSlot(*MBB, Before, Reg, true, Scavenged[SI].FrameIndex, 499 &RC, TRI); 500 MachineBasicBlock::iterator II = std::prev(Before); 501 502 unsigned FIOperandNum = getFrameIndexOperandNum(*II); 503 TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this); 504 505 // Restore the scavenged register before its use (or first terminator). 506 TII->loadRegFromStackSlot(*MBB, UseMI, Reg, Scavenged[SI].FrameIndex, 507 &RC, TRI); 508 II = std::prev(UseMI); 509 510 FIOperandNum = getFrameIndexOperandNum(*II); 511 TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this); 512 } 513 return Scavenged[SI]; 514 } 515 516 Register RegScavenger::scavengeRegister(const TargetRegisterClass *RC, 517 MachineBasicBlock::iterator I, 518 int SPAdj, bool AllowSpill) { 519 MachineInstr &MI = *I; 520 const MachineFunction &MF = *MI.getMF(); 521 // Consider all allocatable registers in the register class initially 522 BitVector Candidates = TRI->getAllocatableSet(MF, RC); 523 524 // Exclude all the registers being used by the instruction. 525 for (const MachineOperand &MO : MI.operands()) { 526 if (MO.isReg() && MO.getReg() != 0 && !(MO.isUse() && MO.isUndef()) && 527 !Register::isVirtualRegister(MO.getReg())) 528 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI) 529 Candidates.reset(*AI); 530 } 531 532 // Try to find a register that's unused if there is one, as then we won't 533 // have to spill. 534 BitVector Available = getRegsAvailable(RC); 535 Available &= Candidates; 536 if (Available.any()) 537 Candidates = Available; 538 539 // Find the register whose use is furthest away. 540 MachineBasicBlock::iterator UseMI; 541 Register SReg = findSurvivorReg(I, Candidates, 25, UseMI); 542 543 // If we found an unused register there is no reason to spill it. 544 if (!isRegUsed(SReg)) { 545 LLVM_DEBUG(dbgs() << "Scavenged register: " << printReg(SReg, TRI) << "\n"); 546 return SReg; 547 } 548 549 if (!AllowSpill) 550 return 0; 551 552 ScavengedInfo &Scavenged = spill(SReg, *RC, SPAdj, I, UseMI); 553 Scavenged.Restore = &*std::prev(UseMI); 554 555 LLVM_DEBUG(dbgs() << "Scavenged register (with spill): " 556 << printReg(SReg, TRI) << "\n"); 557 558 return SReg; 559 } 560 561 Register RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass &RC, 562 MachineBasicBlock::iterator To, 563 bool RestoreAfter, int SPAdj, 564 bool AllowSpill) { 565 const MachineBasicBlock &MBB = *To->getParent(); 566 const MachineFunction &MF = *MBB.getParent(); 567 568 // Find the register whose use is furthest away. 569 MachineBasicBlock::iterator UseMI; 570 ArrayRef<MCPhysReg> AllocationOrder = RC.getRawAllocationOrder(MF); 571 std::pair<MCPhysReg, MachineBasicBlock::iterator> P = 572 findSurvivorBackwards(*MRI, MBBI, To, LiveUnits, AllocationOrder, 573 RestoreAfter); 574 MCPhysReg Reg = P.first; 575 MachineBasicBlock::iterator SpillBefore = P.second; 576 // Found an available register? 577 if (Reg != 0 && SpillBefore == MBB.end()) { 578 LLVM_DEBUG(dbgs() << "Scavenged free register: " << printReg(Reg, TRI) 579 << '\n'); 580 return Reg; 581 } 582 583 if (!AllowSpill) 584 return 0; 585 586 assert(Reg != 0 && "No register left to scavenge!"); 587 588 MachineBasicBlock::iterator ReloadAfter = 589 RestoreAfter ? std::next(MBBI) : MBBI; 590 MachineBasicBlock::iterator ReloadBefore = std::next(ReloadAfter); 591 if (ReloadBefore != MBB.end()) 592 LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n'); 593 ScavengedInfo &Scavenged = spill(Reg, RC, SPAdj, SpillBefore, ReloadBefore); 594 Scavenged.Restore = &*std::prev(SpillBefore); 595 LiveUnits.removeReg(Reg); 596 LLVM_DEBUG(dbgs() << "Scavenged register with spill: " << printReg(Reg, TRI) 597 << " until " << *SpillBefore); 598 return Reg; 599 } 600 601 /// Allocate a register for the virtual register \p VReg. The last use of 602 /// \p VReg is around the current position of the register scavenger \p RS. 603 /// \p ReserveAfter controls whether the scavenged register needs to be reserved 604 /// after the current instruction, otherwise it will only be reserved before the 605 /// current instruction. 606 static Register scavengeVReg(MachineRegisterInfo &MRI, RegScavenger &RS, 607 Register VReg, bool ReserveAfter) { 608 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo(); 609 #ifndef NDEBUG 610 // Verify that all definitions and uses are in the same basic block. 611 const MachineBasicBlock *CommonMBB = nullptr; 612 // Real definition for the reg, re-definitions are not considered. 613 const MachineInstr *RealDef = nullptr; 614 for (MachineOperand &MO : MRI.reg_nodbg_operands(VReg)) { 615 MachineBasicBlock *MBB = MO.getParent()->getParent(); 616 if (CommonMBB == nullptr) 617 CommonMBB = MBB; 618 assert(MBB == CommonMBB && "All defs+uses must be in the same basic block"); 619 if (MO.isDef()) { 620 const MachineInstr &MI = *MO.getParent(); 621 if (!MI.readsRegister(VReg, &TRI)) { 622 assert((!RealDef || RealDef == &MI) && 623 "Can have at most one definition which is not a redefinition"); 624 RealDef = &MI; 625 } 626 } 627 } 628 assert(RealDef != nullptr && "Must have at least 1 Def"); 629 #endif 630 631 // We should only have one definition of the register. However to accommodate 632 // the requirements of two address code we also allow definitions in 633 // subsequent instructions provided they also read the register. That way 634 // we get a single contiguous lifetime. 635 // 636 // Definitions in MRI.def_begin() are unordered, search for the first. 637 MachineRegisterInfo::def_iterator FirstDef = 638 std::find_if(MRI.def_begin(VReg), MRI.def_end(), 639 [VReg, &TRI](const MachineOperand &MO) { 640 return !MO.getParent()->readsRegister(VReg, &TRI); 641 }); 642 assert(FirstDef != MRI.def_end() && 643 "Must have one definition that does not redefine vreg"); 644 MachineInstr &DefMI = *FirstDef->getParent(); 645 646 // The register scavenger will report a free register inserting an emergency 647 // spill/reload if necessary. 648 int SPAdj = 0; 649 const TargetRegisterClass &RC = *MRI.getRegClass(VReg); 650 Register SReg = RS.scavengeRegisterBackwards(RC, DefMI.getIterator(), 651 ReserveAfter, SPAdj); 652 MRI.replaceRegWith(VReg, SReg); 653 ++NumScavengedRegs; 654 return SReg; 655 } 656 657 /// Allocate (scavenge) vregs inside a single basic block. 658 /// Returns true if the target spill callback created new vregs and a 2nd pass 659 /// is necessary. 660 static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo &MRI, 661 RegScavenger &RS, 662 MachineBasicBlock &MBB) { 663 const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo(); 664 RS.enterBasicBlockEnd(MBB); 665 666 unsigned InitialNumVirtRegs = MRI.getNumVirtRegs(); 667 bool NextInstructionReadsVReg = false; 668 for (MachineBasicBlock::iterator I = MBB.end(); I != MBB.begin(); ) { 669 --I; 670 // Move RegScavenger to the position between *I and *std::next(I). 671 RS.backward(I); 672 673 // Look for unassigned vregs in the uses of *std::next(I). 674 if (NextInstructionReadsVReg) { 675 MachineBasicBlock::iterator N = std::next(I); 676 const MachineInstr &NMI = *N; 677 for (const MachineOperand &MO : NMI.operands()) { 678 if (!MO.isReg()) 679 continue; 680 Register Reg = MO.getReg(); 681 // We only care about virtual registers and ignore virtual registers 682 // created by the target callbacks in the process (those will be handled 683 // in a scavenging round). 684 if (!Register::isVirtualRegister(Reg) || 685 Register::virtReg2Index(Reg) >= InitialNumVirtRegs) 686 continue; 687 if (!MO.readsReg()) 688 continue; 689 690 Register SReg = scavengeVReg(MRI, RS, Reg, true); 691 N->addRegisterKilled(SReg, &TRI, false); 692 RS.setRegUsed(SReg); 693 } 694 } 695 696 // Look for unassigned vregs in the defs of *I. 697 NextInstructionReadsVReg = false; 698 const MachineInstr &MI = *I; 699 for (const MachineOperand &MO : MI.operands()) { 700 if (!MO.isReg()) 701 continue; 702 Register Reg = MO.getReg(); 703 // Only vregs, no newly created vregs (see above). 704 if (!Register::isVirtualRegister(Reg) || 705 Register::virtReg2Index(Reg) >= InitialNumVirtRegs) 706 continue; 707 // We have to look at all operands anyway so we can precalculate here 708 // whether there is a reading operand. This allows use to skip the use 709 // step in the next iteration if there was none. 710 assert(!MO.isInternalRead() && "Cannot assign inside bundles"); 711 assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses"); 712 if (MO.readsReg()) { 713 NextInstructionReadsVReg = true; 714 } 715 if (MO.isDef()) { 716 Register SReg = scavengeVReg(MRI, RS, Reg, false); 717 I->addRegisterDead(SReg, &TRI, false); 718 } 719 } 720 } 721 #ifndef NDEBUG 722 for (const MachineOperand &MO : MBB.front().operands()) { 723 if (!MO.isReg() || !Register::isVirtualRegister(MO.getReg())) 724 continue; 725 assert(!MO.isInternalRead() && "Cannot assign inside bundles"); 726 assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses"); 727 assert(!MO.readsReg() && "Vreg use in first instruction not allowed"); 728 } 729 #endif 730 731 return MRI.getNumVirtRegs() != InitialNumVirtRegs; 732 } 733 734 void llvm::scavengeFrameVirtualRegs(MachineFunction &MF, RegScavenger &RS) { 735 // FIXME: Iterating over the instruction stream is unnecessary. We can simply 736 // iterate over the vreg use list, which at this point only contains machine 737 // operands for which eliminateFrameIndex need a new scratch reg. 738 MachineRegisterInfo &MRI = MF.getRegInfo(); 739 // Shortcut. 740 if (MRI.getNumVirtRegs() == 0) { 741 MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs); 742 return; 743 } 744 745 // Run through the instructions and find any virtual registers. 746 for (MachineBasicBlock &MBB : MF) { 747 if (MBB.empty()) 748 continue; 749 750 bool Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB); 751 if (Again) { 752 LLVM_DEBUG(dbgs() << "Warning: Required two scavenging passes for block " 753 << MBB.getName() << '\n'); 754 Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB); 755 // The target required a 2nd run (because it created new vregs while 756 // spilling). Refuse to do another pass to keep compiletime in check. 757 if (Again) 758 report_fatal_error("Incomplete scavenging after 2nd pass"); 759 } 760 } 761 762 MRI.clearVirtRegs(); 763 MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs); 764 } 765 766 namespace { 767 768 /// This class runs register scavenging independ of the PrologEpilogInserter. 769 /// This is used in for testing. 770 class ScavengerTest : public MachineFunctionPass { 771 public: 772 static char ID; 773 774 ScavengerTest() : MachineFunctionPass(ID) {} 775 776 bool runOnMachineFunction(MachineFunction &MF) override { 777 const TargetSubtargetInfo &STI = MF.getSubtarget(); 778 const TargetFrameLowering &TFL = *STI.getFrameLowering(); 779 780 RegScavenger RS; 781 // Let's hope that calling those outside of PrologEpilogueInserter works 782 // well enough to initialize the scavenger with some emergency spillslots 783 // for the target. 784 BitVector SavedRegs; 785 TFL.determineCalleeSaves(MF, SavedRegs, &RS); 786 TFL.processFunctionBeforeFrameFinalized(MF, &RS); 787 788 // Let's scavenge the current function 789 scavengeFrameVirtualRegs(MF, RS); 790 return true; 791 } 792 }; 793 794 } // end anonymous namespace 795 796 char ScavengerTest::ID; 797 798 INITIALIZE_PASS(ScavengerTest, "scavenger-test", 799 "Scavenge virtual registers inside basic blocks", false, false) 800