1 //===-- RegisterScavenging.cpp - Machine register scavenging --------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 /// \file 11 /// This file implements the machine register scavenger. It can provide 12 /// information, such as unused registers, at any point in a machine basic 13 /// block. It also provides a mechanism to make registers available by evicting 14 /// them to spill slots. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "llvm/CodeGen/RegisterScavenging.h" 19 #include "llvm/CodeGen/MachineBasicBlock.h" 20 #include "llvm/CodeGen/MachineFrameInfo.h" 21 #include "llvm/CodeGen/MachineFunction.h" 22 #include "llvm/CodeGen/MachineInstr.h" 23 #include "llvm/CodeGen/MachineRegisterInfo.h" 24 #include "llvm/Support/Debug.h" 25 #include "llvm/Support/ErrorHandling.h" 26 #include "llvm/Support/raw_ostream.h" 27 #include "llvm/Target/TargetInstrInfo.h" 28 #include "llvm/Target/TargetRegisterInfo.h" 29 #include "llvm/Target/TargetSubtargetInfo.h" 30 using namespace llvm; 31 32 #define DEBUG_TYPE "reg-scavenging" 33 34 void RegScavenger::setRegUsed(unsigned Reg, LaneBitmask LaneMask) { 35 for (MCRegUnitMaskIterator RUI(Reg, TRI); RUI.isValid(); ++RUI) { 36 LaneBitmask UnitMask = (*RUI).second; 37 if (UnitMask.none() || (LaneMask & UnitMask).any()) 38 RegUnitsAvailable.reset((*RUI).first); 39 } 40 } 41 42 void RegScavenger::init(MachineBasicBlock &MBB) { 43 MachineFunction &MF = *MBB.getParent(); 44 TII = MF.getSubtarget().getInstrInfo(); 45 TRI = MF.getSubtarget().getRegisterInfo(); 46 MRI = &MF.getRegInfo(); 47 48 assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) && 49 "Target changed?"); 50 51 // Self-initialize. 52 if (!this->MBB) { 53 NumRegUnits = TRI->getNumRegUnits(); 54 RegUnitsAvailable.resize(NumRegUnits); 55 KillRegUnits.resize(NumRegUnits); 56 DefRegUnits.resize(NumRegUnits); 57 TmpRegUnits.resize(NumRegUnits); 58 } 59 this->MBB = &MBB; 60 61 for (SmallVectorImpl<ScavengedInfo>::iterator I = Scavenged.begin(), 62 IE = Scavenged.end(); I != IE; ++I) { 63 I->Reg = 0; 64 I->Restore = nullptr; 65 } 66 67 // All register units start out unused. 68 RegUnitsAvailable.set(); 69 70 // Pristine CSRs are not available. 71 BitVector PR = MF.getFrameInfo().getPristineRegs(MF); 72 for (int I = PR.find_first(); I>0; I = PR.find_next(I)) 73 setRegUsed(I); 74 75 Tracking = false; 76 } 77 78 void RegScavenger::setLiveInsUsed(const MachineBasicBlock &MBB) { 79 for (const auto &LI : MBB.liveins()) 80 setRegUsed(LI.PhysReg, LI.LaneMask); 81 } 82 83 void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) { 84 init(MBB); 85 setLiveInsUsed(MBB); 86 } 87 88 void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) { 89 init(MBB); 90 // Merge live-ins of successors to get live-outs. 91 for (const MachineBasicBlock *Succ : MBB.successors()) 92 setLiveInsUsed(*Succ); 93 94 // Move internal iterator at the last instruction of the block. 95 if (MBB.begin() != MBB.end()) { 96 MBBI = std::prev(MBB.end()); 97 Tracking = true; 98 } 99 } 100 101 void RegScavenger::addRegUnits(BitVector &BV, unsigned Reg) { 102 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI) 103 BV.set(*RUI); 104 } 105 106 void RegScavenger::removeRegUnits(BitVector &BV, unsigned Reg) { 107 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI) 108 BV.reset(*RUI); 109 } 110 111 void RegScavenger::determineKillsAndDefs() { 112 assert(Tracking && "Must be tracking to determine kills and defs"); 113 114 MachineInstr &MI = *MBBI; 115 assert(!MI.isDebugValue() && "Debug values have no kills or defs"); 116 117 // Find out which registers are early clobbered, killed, defined, and marked 118 // def-dead in this instruction. 119 KillRegUnits.reset(); 120 DefRegUnits.reset(); 121 for (const MachineOperand &MO : MI.operands()) { 122 if (MO.isRegMask()) { 123 TmpRegUnits.clear(); 124 for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; ++RU) { 125 for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); ++RURI) { 126 if (MO.clobbersPhysReg(*RURI)) { 127 TmpRegUnits.set(RU); 128 break; 129 } 130 } 131 } 132 133 // Apply the mask. 134 KillRegUnits |= TmpRegUnits; 135 } 136 if (!MO.isReg()) 137 continue; 138 unsigned Reg = MO.getReg(); 139 if (!TargetRegisterInfo::isPhysicalRegister(Reg) || isReserved(Reg)) 140 continue; 141 142 if (MO.isUse()) { 143 // Ignore undef uses. 144 if (MO.isUndef()) 145 continue; 146 if (MO.isKill()) 147 addRegUnits(KillRegUnits, Reg); 148 } else { 149 assert(MO.isDef()); 150 if (MO.isDead()) 151 addRegUnits(KillRegUnits, Reg); 152 else 153 addRegUnits(DefRegUnits, Reg); 154 } 155 } 156 } 157 158 void RegScavenger::unprocess() { 159 assert(Tracking && "Cannot unprocess because we're not tracking"); 160 161 MachineInstr &MI = *MBBI; 162 if (!MI.isDebugValue()) { 163 determineKillsAndDefs(); 164 165 // Commit the changes. 166 setUsed(KillRegUnits); 167 setUnused(DefRegUnits); 168 } 169 170 if (MBBI == MBB->begin()) { 171 MBBI = MachineBasicBlock::iterator(nullptr); 172 Tracking = false; 173 } else 174 --MBBI; 175 } 176 177 void RegScavenger::forward() { 178 // Move ptr forward. 179 if (!Tracking) { 180 MBBI = MBB->begin(); 181 Tracking = true; 182 } else { 183 assert(MBBI != MBB->end() && "Already past the end of the basic block!"); 184 MBBI = std::next(MBBI); 185 } 186 assert(MBBI != MBB->end() && "Already at the end of the basic block!"); 187 188 MachineInstr &MI = *MBBI; 189 190 for (SmallVectorImpl<ScavengedInfo>::iterator I = Scavenged.begin(), 191 IE = Scavenged.end(); I != IE; ++I) { 192 if (I->Restore != &MI) 193 continue; 194 195 I->Reg = 0; 196 I->Restore = nullptr; 197 } 198 199 if (MI.isDebugValue()) 200 return; 201 202 determineKillsAndDefs(); 203 204 // Verify uses and defs. 205 #ifndef NDEBUG 206 for (const MachineOperand &MO : MI.operands()) { 207 if (!MO.isReg()) 208 continue; 209 unsigned Reg = MO.getReg(); 210 if (!TargetRegisterInfo::isPhysicalRegister(Reg) || isReserved(Reg)) 211 continue; 212 if (MO.isUse()) { 213 if (MO.isUndef()) 214 continue; 215 if (!isRegUsed(Reg)) { 216 // Check if it's partial live: e.g. 217 // D0 = insert_subreg D0<undef>, S0 218 // ... D0 219 // The problem is the insert_subreg could be eliminated. The use of 220 // D0 is using a partially undef value. This is not *incorrect* since 221 // S1 is can be freely clobbered. 222 // Ideally we would like a way to model this, but leaving the 223 // insert_subreg around causes both correctness and performance issues. 224 bool SubUsed = false; 225 for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) 226 if (isRegUsed(*SubRegs)) { 227 SubUsed = true; 228 break; 229 } 230 bool SuperUsed = false; 231 for (MCSuperRegIterator SR(Reg, TRI); SR.isValid(); ++SR) { 232 if (isRegUsed(*SR)) { 233 SuperUsed = true; 234 break; 235 } 236 } 237 if (!SubUsed && !SuperUsed) { 238 MBB->getParent()->verify(nullptr, "In Register Scavenger"); 239 llvm_unreachable("Using an undefined register!"); 240 } 241 (void)SubUsed; 242 (void)SuperUsed; 243 } 244 } else { 245 assert(MO.isDef()); 246 #if 0 247 // FIXME: Enable this once we've figured out how to correctly transfer 248 // implicit kills during codegen passes like the coalescer. 249 assert((KillRegs.test(Reg) || isUnused(Reg) || 250 isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) && 251 "Re-defining a live register!"); 252 #endif 253 } 254 } 255 #endif // NDEBUG 256 257 // Commit the changes. 258 setUnused(KillRegUnits); 259 setUsed(DefRegUnits); 260 } 261 262 void RegScavenger::backward() { 263 assert(Tracking && "Must be tracking to determine kills and defs"); 264 265 const MachineInstr &MI = *MBBI; 266 // Defined or clobbered registers are available now. 267 for (const MachineOperand &MO : MI.operands()) { 268 if (MO.isRegMask()) { 269 for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; 270 ++RU) { 271 for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); ++RURI) { 272 if (MO.clobbersPhysReg(*RURI)) { 273 RegUnitsAvailable.set(RU); 274 break; 275 } 276 } 277 } 278 } else if (MO.isReg() && MO.isDef()) { 279 unsigned Reg = MO.getReg(); 280 if (!Reg || TargetRegisterInfo::isVirtualRegister(Reg) || 281 isReserved(Reg)) 282 continue; 283 addRegUnits(RegUnitsAvailable, Reg); 284 } 285 } 286 // Mark read registers as unavailable. 287 for (const MachineOperand &MO : MI.uses()) { 288 if (MO.isReg() && MO.readsReg()) { 289 unsigned Reg = MO.getReg(); 290 if (!Reg || TargetRegisterInfo::isVirtualRegister(Reg) || 291 isReserved(Reg)) 292 continue; 293 removeRegUnits(RegUnitsAvailable, Reg); 294 } 295 } 296 297 if (MBBI == MBB->begin()) { 298 MBBI = MachineBasicBlock::iterator(nullptr); 299 Tracking = false; 300 } else 301 --MBBI; 302 } 303 304 bool RegScavenger::isRegUsed(unsigned Reg, bool includeReserved) const { 305 if (includeReserved && isReserved(Reg)) 306 return true; 307 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI) 308 if (!RegUnitsAvailable.test(*RUI)) 309 return true; 310 return false; 311 } 312 313 unsigned RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const { 314 for (unsigned Reg : *RC) { 315 if (!isRegUsed(Reg)) { 316 DEBUG(dbgs() << "Scavenger found unused reg: " << TRI->getName(Reg) << 317 "\n"); 318 return Reg; 319 } 320 } 321 return 0; 322 } 323 324 BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) { 325 BitVector Mask(TRI->getNumRegs()); 326 for (unsigned Reg : *RC) 327 if (!isRegUsed(Reg)) 328 Mask.set(Reg); 329 return Mask; 330 } 331 332 unsigned RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI, 333 BitVector &Candidates, 334 unsigned InstrLimit, 335 MachineBasicBlock::iterator &UseMI) { 336 int Survivor = Candidates.find_first(); 337 assert(Survivor > 0 && "No candidates for scavenging"); 338 339 MachineBasicBlock::iterator ME = MBB->getFirstTerminator(); 340 assert(StartMI != ME && "MI already at terminator"); 341 MachineBasicBlock::iterator RestorePointMI = StartMI; 342 MachineBasicBlock::iterator MI = StartMI; 343 344 bool inVirtLiveRange = false; 345 for (++MI; InstrLimit > 0 && MI != ME; ++MI, --InstrLimit) { 346 if (MI->isDebugValue()) { 347 ++InstrLimit; // Don't count debug instructions 348 continue; 349 } 350 bool isVirtKillInsn = false; 351 bool isVirtDefInsn = false; 352 // Remove any candidates touched by instruction. 353 for (const MachineOperand &MO : MI->operands()) { 354 if (MO.isRegMask()) 355 Candidates.clearBitsNotInMask(MO.getRegMask()); 356 if (!MO.isReg() || MO.isUndef() || !MO.getReg()) 357 continue; 358 if (TargetRegisterInfo::isVirtualRegister(MO.getReg())) { 359 if (MO.isDef()) 360 isVirtDefInsn = true; 361 else if (MO.isKill()) 362 isVirtKillInsn = true; 363 continue; 364 } 365 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI) 366 Candidates.reset(*AI); 367 } 368 // If we're not in a virtual reg's live range, this is a valid 369 // restore point. 370 if (!inVirtLiveRange) RestorePointMI = MI; 371 372 // Update whether we're in the live range of a virtual register 373 if (isVirtKillInsn) inVirtLiveRange = false; 374 if (isVirtDefInsn) inVirtLiveRange = true; 375 376 // Was our survivor untouched by this instruction? 377 if (Candidates.test(Survivor)) 378 continue; 379 380 // All candidates gone? 381 if (Candidates.none()) 382 break; 383 384 Survivor = Candidates.find_first(); 385 } 386 // If we ran off the end, that's where we want to restore. 387 if (MI == ME) RestorePointMI = ME; 388 assert(RestorePointMI != StartMI && 389 "No available scavenger restore location!"); 390 391 // We ran out of candidates, so stop the search. 392 UseMI = RestorePointMI; 393 return Survivor; 394 } 395 396 static unsigned getFrameIndexOperandNum(MachineInstr &MI) { 397 unsigned i = 0; 398 while (!MI.getOperand(i).isFI()) { 399 ++i; 400 assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!"); 401 } 402 return i; 403 } 404 405 unsigned RegScavenger::scavengeRegister(const TargetRegisterClass *RC, 406 MachineBasicBlock::iterator I, 407 int SPAdj) { 408 MachineInstr &MI = *I; 409 const MachineFunction &MF = *MI.getParent()->getParent(); 410 // Consider all allocatable registers in the register class initially 411 BitVector Candidates = TRI->getAllocatableSet(MF, RC); 412 413 // Exclude all the registers being used by the instruction. 414 for (const MachineOperand &MO : MI.operands()) { 415 if (MO.isReg() && MO.getReg() != 0 && !(MO.isUse() && MO.isUndef()) && 416 !TargetRegisterInfo::isVirtualRegister(MO.getReg())) 417 for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI) 418 Candidates.reset(*AI); 419 } 420 421 // Try to find a register that's unused if there is one, as then we won't 422 // have to spill. 423 BitVector Available = getRegsAvailable(RC); 424 Available &= Candidates; 425 if (Available.any()) 426 Candidates = Available; 427 428 // Find the register whose use is furthest away. 429 MachineBasicBlock::iterator UseMI; 430 unsigned SReg = findSurvivorReg(I, Candidates, 25, UseMI); 431 432 // If we found an unused register there is no reason to spill it. 433 if (!isRegUsed(SReg)) { 434 DEBUG(dbgs() << "Scavenged register: " << TRI->getName(SReg) << "\n"); 435 return SReg; 436 } 437 438 // Find an available scavenging slot with size and alignment matching 439 // the requirements of the class RC. 440 const MachineFrameInfo &MFI = MF.getFrameInfo(); 441 unsigned NeedSize = RC->getSize(); 442 unsigned NeedAlign = RC->getAlignment(); 443 444 unsigned SI = Scavenged.size(), Diff = UINT_MAX; 445 int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd(); 446 for (unsigned I = 0; I < Scavenged.size(); ++I) { 447 if (Scavenged[I].Reg != 0) 448 continue; 449 // Verify that this slot is valid for this register. 450 int FI = Scavenged[I].FrameIndex; 451 if (FI < FIB || FI >= FIE) 452 continue; 453 unsigned S = MFI.getObjectSize(FI); 454 unsigned A = MFI.getObjectAlignment(FI); 455 if (NeedSize > S || NeedAlign > A) 456 continue; 457 // Avoid wasting slots with large size and/or large alignment. Pick one 458 // that is the best fit for this register class (in street metric). 459 // Picking a larger slot than necessary could happen if a slot for a 460 // larger register is reserved before a slot for a smaller one. When 461 // trying to spill a smaller register, the large slot would be found 462 // first, thus making it impossible to spill the larger register later. 463 unsigned D = (S-NeedSize) + (A-NeedAlign); 464 if (D < Diff) { 465 SI = I; 466 Diff = D; 467 } 468 } 469 470 if (SI == Scavenged.size()) { 471 // We need to scavenge a register but have no spill slot, the target 472 // must know how to do it (if not, we'll assert below). 473 Scavenged.push_back(ScavengedInfo(FIE)); 474 } 475 476 // Avoid infinite regress 477 Scavenged[SI].Reg = SReg; 478 479 // If the target knows how to save/restore the register, let it do so; 480 // otherwise, use the emergency stack spill slot. 481 if (!TRI->saveScavengerRegister(*MBB, I, UseMI, RC, SReg)) { 482 // Spill the scavenged register before I. 483 int FI = Scavenged[SI].FrameIndex; 484 if (FI < FIB || FI >= FIE) { 485 std::string Msg = std::string("Error while trying to spill ") + 486 TRI->getName(SReg) + " from class " + TRI->getRegClassName(RC) + 487 ": Cannot scavenge register without an emergency spill slot!"; 488 report_fatal_error(Msg.c_str()); 489 } 490 TII->storeRegToStackSlot(*MBB, I, SReg, true, Scavenged[SI].FrameIndex, 491 RC, TRI); 492 MachineBasicBlock::iterator II = std::prev(I); 493 494 unsigned FIOperandNum = getFrameIndexOperandNum(*II); 495 TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this); 496 497 // Restore the scavenged register before its use (or first terminator). 498 TII->loadRegFromStackSlot(*MBB, UseMI, SReg, Scavenged[SI].FrameIndex, 499 RC, TRI); 500 II = std::prev(UseMI); 501 502 FIOperandNum = getFrameIndexOperandNum(*II); 503 TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this); 504 } 505 506 Scavenged[SI].Restore = &*std::prev(UseMI); 507 508 // Doing this here leads to infinite regress. 509 // Scavenged[SI].Reg = SReg; 510 511 DEBUG(dbgs() << "Scavenged register (with spill): " << TRI->getName(SReg) << 512 "\n"); 513 514 return SReg; 515 } 516