1 //===-- llvm/CodeGen/VirtRegMap.cpp - Virtual Register Map ----------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the VirtRegMap class. 11 // 12 // It also contains implementations of the Spiller interface, which, given a 13 // virtual register map and a machine function, eliminates all virtual 14 // references by replacing them with physical register references - adding spill 15 // code as necessary. 16 // 17 //===----------------------------------------------------------------------===// 18 19 #include "llvm/CodeGen/VirtRegMap.h" 20 #include "LiveDebugVariables.h" 21 #include "llvm/ADT/STLExtras.h" 22 #include "llvm/ADT/Statistic.h" 23 #include "llvm/CodeGen/LiveIntervalAnalysis.h" 24 #include "llvm/CodeGen/LiveStackAnalysis.h" 25 #include "llvm/CodeGen/MachineFrameInfo.h" 26 #include "llvm/CodeGen/MachineFunction.h" 27 #include "llvm/CodeGen/MachineInstrBuilder.h" 28 #include "llvm/CodeGen/MachineRegisterInfo.h" 29 #include "llvm/CodeGen/Passes.h" 30 #include "llvm/IR/Function.h" 31 #include "llvm/Support/Compiler.h" 32 #include "llvm/Support/Debug.h" 33 #include "llvm/Support/raw_ostream.h" 34 #include "llvm/Target/TargetInstrInfo.h" 35 #include "llvm/Target/TargetMachine.h" 36 #include "llvm/Target/TargetRegisterInfo.h" 37 #include "llvm/Target/TargetSubtargetInfo.h" 38 #include <algorithm> 39 using namespace llvm; 40 41 #define DEBUG_TYPE "regalloc" 42 43 STATISTIC(NumSpillSlots, "Number of spill slots allocated"); 44 STATISTIC(NumIdCopies, "Number of identity moves eliminated after rewriting"); 45 46 //===----------------------------------------------------------------------===// 47 // VirtRegMap implementation 48 //===----------------------------------------------------------------------===// 49 50 char VirtRegMap::ID = 0; 51 52 INITIALIZE_PASS(VirtRegMap, "virtregmap", "Virtual Register Map", false, false) 53 54 bool VirtRegMap::runOnMachineFunction(MachineFunction &mf) { 55 MRI = &mf.getRegInfo(); 56 TII = mf.getSubtarget().getInstrInfo(); 57 TRI = mf.getSubtarget().getRegisterInfo(); 58 MF = &mf; 59 60 Virt2PhysMap.clear(); 61 Virt2StackSlotMap.clear(); 62 Virt2SplitMap.clear(); 63 64 grow(); 65 return false; 66 } 67 68 void VirtRegMap::grow() { 69 unsigned NumRegs = MF->getRegInfo().getNumVirtRegs(); 70 Virt2PhysMap.resize(NumRegs); 71 Virt2StackSlotMap.resize(NumRegs); 72 Virt2SplitMap.resize(NumRegs); 73 } 74 75 unsigned VirtRegMap::createSpillSlot(const TargetRegisterClass *RC) { 76 unsigned Size = TRI->getSpillSize(*RC); 77 unsigned Align = TRI->getSpillAlignment(*RC); 78 int SS = MF->getFrameInfo().CreateSpillStackObject(Size, Align); 79 ++NumSpillSlots; 80 return SS; 81 } 82 83 bool VirtRegMap::hasPreferredPhys(unsigned VirtReg) { 84 unsigned Hint = MRI->getSimpleHint(VirtReg); 85 if (!Hint) 86 return false; 87 if (TargetRegisterInfo::isVirtualRegister(Hint)) 88 Hint = getPhys(Hint); 89 return getPhys(VirtReg) == Hint; 90 } 91 92 bool VirtRegMap::hasKnownPreference(unsigned VirtReg) { 93 std::pair<unsigned, unsigned> Hint = MRI->getRegAllocationHint(VirtReg); 94 if (TargetRegisterInfo::isPhysicalRegister(Hint.second)) 95 return true; 96 if (TargetRegisterInfo::isVirtualRegister(Hint.second)) 97 return hasPhys(Hint.second); 98 return false; 99 } 100 101 int VirtRegMap::assignVirt2StackSlot(unsigned virtReg) { 102 assert(TargetRegisterInfo::isVirtualRegister(virtReg)); 103 assert(Virt2StackSlotMap[virtReg] == NO_STACK_SLOT && 104 "attempt to assign stack slot to already spilled register"); 105 const TargetRegisterClass* RC = MF->getRegInfo().getRegClass(virtReg); 106 return Virt2StackSlotMap[virtReg] = createSpillSlot(RC); 107 } 108 109 void VirtRegMap::assignVirt2StackSlot(unsigned virtReg, int SS) { 110 assert(TargetRegisterInfo::isVirtualRegister(virtReg)); 111 assert(Virt2StackSlotMap[virtReg] == NO_STACK_SLOT && 112 "attempt to assign stack slot to already spilled register"); 113 assert((SS >= 0 || 114 (SS >= MF->getFrameInfo().getObjectIndexBegin())) && 115 "illegal fixed frame index"); 116 Virt2StackSlotMap[virtReg] = SS; 117 } 118 119 void VirtRegMap::print(raw_ostream &OS, const Module*) const { 120 OS << "********** REGISTER MAP **********\n"; 121 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) { 122 unsigned Reg = TargetRegisterInfo::index2VirtReg(i); 123 if (Virt2PhysMap[Reg] != (unsigned)VirtRegMap::NO_PHYS_REG) { 124 OS << '[' << PrintReg(Reg, TRI) << " -> " 125 << PrintReg(Virt2PhysMap[Reg], TRI) << "] " 126 << TRI->getRegClassName(MRI->getRegClass(Reg)) << "\n"; 127 } 128 } 129 130 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) { 131 unsigned Reg = TargetRegisterInfo::index2VirtReg(i); 132 if (Virt2StackSlotMap[Reg] != VirtRegMap::NO_STACK_SLOT) { 133 OS << '[' << PrintReg(Reg, TRI) << " -> fi#" << Virt2StackSlotMap[Reg] 134 << "] " << TRI->getRegClassName(MRI->getRegClass(Reg)) << "\n"; 135 } 136 } 137 OS << '\n'; 138 } 139 140 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 141 LLVM_DUMP_METHOD void VirtRegMap::dump() const { 142 print(dbgs()); 143 } 144 #endif 145 146 //===----------------------------------------------------------------------===// 147 // VirtRegRewriter 148 //===----------------------------------------------------------------------===// 149 // 150 // The VirtRegRewriter is the last of the register allocator passes. 151 // It rewrites virtual registers to physical registers as specified in the 152 // VirtRegMap analysis. It also updates live-in information on basic blocks 153 // according to LiveIntervals. 154 // 155 namespace { 156 class VirtRegRewriter : public MachineFunctionPass { 157 MachineFunction *MF; 158 const TargetMachine *TM; 159 const TargetRegisterInfo *TRI; 160 const TargetInstrInfo *TII; 161 MachineRegisterInfo *MRI; 162 SlotIndexes *Indexes; 163 LiveIntervals *LIS; 164 VirtRegMap *VRM; 165 166 void rewrite(); 167 void addMBBLiveIns(); 168 bool readsUndefSubreg(const MachineOperand &MO) const; 169 void addLiveInsForSubRanges(const LiveInterval &LI, unsigned PhysReg) const; 170 void handleIdentityCopy(MachineInstr &MI) const; 171 void expandCopyBundle(MachineInstr &MI) const; 172 173 public: 174 static char ID; 175 VirtRegRewriter() : MachineFunctionPass(ID) {} 176 177 void getAnalysisUsage(AnalysisUsage &AU) const override; 178 179 bool runOnMachineFunction(MachineFunction&) override; 180 MachineFunctionProperties getSetProperties() const override { 181 return MachineFunctionProperties().set( 182 MachineFunctionProperties::Property::NoVRegs); 183 } 184 }; 185 } // end anonymous namespace 186 187 char &llvm::VirtRegRewriterID = VirtRegRewriter::ID; 188 189 INITIALIZE_PASS_BEGIN(VirtRegRewriter, "virtregrewriter", 190 "Virtual Register Rewriter", false, false) 191 INITIALIZE_PASS_DEPENDENCY(SlotIndexes) 192 INITIALIZE_PASS_DEPENDENCY(LiveIntervals) 193 INITIALIZE_PASS_DEPENDENCY(LiveDebugVariables) 194 INITIALIZE_PASS_DEPENDENCY(LiveStacks) 195 INITIALIZE_PASS_DEPENDENCY(VirtRegMap) 196 INITIALIZE_PASS_END(VirtRegRewriter, "virtregrewriter", 197 "Virtual Register Rewriter", false, false) 198 199 char VirtRegRewriter::ID = 0; 200 201 void VirtRegRewriter::getAnalysisUsage(AnalysisUsage &AU) const { 202 AU.setPreservesCFG(); 203 AU.addRequired<LiveIntervals>(); 204 AU.addRequired<SlotIndexes>(); 205 AU.addPreserved<SlotIndexes>(); 206 AU.addRequired<LiveDebugVariables>(); 207 AU.addRequired<LiveStacks>(); 208 AU.addPreserved<LiveStacks>(); 209 AU.addRequired<VirtRegMap>(); 210 MachineFunctionPass::getAnalysisUsage(AU); 211 } 212 213 bool VirtRegRewriter::runOnMachineFunction(MachineFunction &fn) { 214 MF = &fn; 215 TM = &MF->getTarget(); 216 TRI = MF->getSubtarget().getRegisterInfo(); 217 TII = MF->getSubtarget().getInstrInfo(); 218 MRI = &MF->getRegInfo(); 219 Indexes = &getAnalysis<SlotIndexes>(); 220 LIS = &getAnalysis<LiveIntervals>(); 221 VRM = &getAnalysis<VirtRegMap>(); 222 DEBUG(dbgs() << "********** REWRITE VIRTUAL REGISTERS **********\n" 223 << "********** Function: " 224 << MF->getName() << '\n'); 225 DEBUG(VRM->dump()); 226 227 // Add kill flags while we still have virtual registers. 228 LIS->addKillFlags(VRM); 229 230 // Live-in lists on basic blocks are required for physregs. 231 addMBBLiveIns(); 232 233 // Rewrite virtual registers. 234 rewrite(); 235 236 // Write out new DBG_VALUE instructions. 237 getAnalysis<LiveDebugVariables>().emitDebugValues(VRM); 238 239 // All machine operands and other references to virtual registers have been 240 // replaced. Remove the virtual registers and release all the transient data. 241 VRM->clearAllVirt(); 242 MRI->clearVirtRegs(); 243 return true; 244 } 245 246 void VirtRegRewriter::addLiveInsForSubRanges(const LiveInterval &LI, 247 unsigned PhysReg) const { 248 assert(!LI.empty()); 249 assert(LI.hasSubRanges()); 250 251 typedef std::pair<const LiveInterval::SubRange *, 252 LiveInterval::const_iterator> SubRangeIteratorPair; 253 SmallVector<SubRangeIteratorPair, 4> SubRanges; 254 SlotIndex First; 255 SlotIndex Last; 256 for (const LiveInterval::SubRange &SR : LI.subranges()) { 257 SubRanges.push_back(std::make_pair(&SR, SR.begin())); 258 if (!First.isValid() || SR.segments.front().start < First) 259 First = SR.segments.front().start; 260 if (!Last.isValid() || SR.segments.back().end > Last) 261 Last = SR.segments.back().end; 262 } 263 264 // Check all mbb start positions between First and Last while 265 // simulatenously advancing an iterator for each subrange. 266 for (SlotIndexes::MBBIndexIterator MBBI = Indexes->findMBBIndex(First); 267 MBBI != Indexes->MBBIndexEnd() && MBBI->first <= Last; ++MBBI) { 268 SlotIndex MBBBegin = MBBI->first; 269 // Advance all subrange iterators so that their end position is just 270 // behind MBBBegin (or the iterator is at the end). 271 LaneBitmask LaneMask; 272 for (auto &RangeIterPair : SubRanges) { 273 const LiveInterval::SubRange *SR = RangeIterPair.first; 274 LiveInterval::const_iterator &SRI = RangeIterPair.second; 275 while (SRI != SR->end() && SRI->end <= MBBBegin) 276 ++SRI; 277 if (SRI == SR->end()) 278 continue; 279 if (SRI->start <= MBBBegin) 280 LaneMask |= SR->LaneMask; 281 } 282 if (LaneMask.none()) 283 continue; 284 MachineBasicBlock *MBB = MBBI->second; 285 MBB->addLiveIn(PhysReg, LaneMask); 286 } 287 } 288 289 // Compute MBB live-in lists from virtual register live ranges and their 290 // assignments. 291 void VirtRegRewriter::addMBBLiveIns() { 292 for (unsigned Idx = 0, IdxE = MRI->getNumVirtRegs(); Idx != IdxE; ++Idx) { 293 unsigned VirtReg = TargetRegisterInfo::index2VirtReg(Idx); 294 if (MRI->reg_nodbg_empty(VirtReg)) 295 continue; 296 LiveInterval &LI = LIS->getInterval(VirtReg); 297 if (LI.empty() || LIS->intervalIsInOneMBB(LI)) 298 continue; 299 // This is a virtual register that is live across basic blocks. Its 300 // assigned PhysReg must be marked as live-in to those blocks. 301 unsigned PhysReg = VRM->getPhys(VirtReg); 302 assert(PhysReg != VirtRegMap::NO_PHYS_REG && "Unmapped virtual register."); 303 304 if (LI.hasSubRanges()) { 305 addLiveInsForSubRanges(LI, PhysReg); 306 } else { 307 // Go over MBB begin positions and see if we have segments covering them. 308 // The following works because segments and the MBBIndex list are both 309 // sorted by slot indexes. 310 SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin(); 311 for (const auto &Seg : LI) { 312 I = Indexes->advanceMBBIndex(I, Seg.start); 313 for (; I != Indexes->MBBIndexEnd() && I->first < Seg.end; ++I) { 314 MachineBasicBlock *MBB = I->second; 315 MBB->addLiveIn(PhysReg); 316 } 317 } 318 } 319 } 320 321 // Sort and unique MBB LiveIns as we've not checked if SubReg/PhysReg were in 322 // each MBB's LiveIns set before calling addLiveIn on them. 323 for (MachineBasicBlock &MBB : *MF) 324 MBB.sortUniqueLiveIns(); 325 } 326 327 /// Returns true if the given machine operand \p MO only reads undefined lanes. 328 /// The function only works for use operands with a subregister set. 329 bool VirtRegRewriter::readsUndefSubreg(const MachineOperand &MO) const { 330 // Shortcut if the operand is already marked undef. 331 if (MO.isUndef()) 332 return true; 333 334 unsigned Reg = MO.getReg(); 335 const LiveInterval &LI = LIS->getInterval(Reg); 336 const MachineInstr &MI = *MO.getParent(); 337 SlotIndex BaseIndex = LIS->getInstructionIndex(MI); 338 // This code is only meant to handle reading undefined subregisters which 339 // we couldn't properly detect before. 340 assert(LI.liveAt(BaseIndex) && 341 "Reads of completely dead register should be marked undef already"); 342 unsigned SubRegIdx = MO.getSubReg(); 343 assert(SubRegIdx != 0 && LI.hasSubRanges()); 344 LaneBitmask UseMask = TRI->getSubRegIndexLaneMask(SubRegIdx); 345 // See if any of the relevant subregister liveranges is defined at this point. 346 for (const LiveInterval::SubRange &SR : LI.subranges()) { 347 if ((SR.LaneMask & UseMask).any() && SR.liveAt(BaseIndex)) 348 return false; 349 } 350 return true; 351 } 352 353 void VirtRegRewriter::handleIdentityCopy(MachineInstr &MI) const { 354 if (!MI.isIdentityCopy()) 355 return; 356 DEBUG(dbgs() << "Identity copy: " << MI); 357 ++NumIdCopies; 358 359 // Copies like: 360 // %R0 = COPY %R0<undef> 361 // %AL = COPY %AL, %EAX<imp-def> 362 // give us additional liveness information: The target (super-)register 363 // must not be valid before this point. Replace the COPY with a KILL 364 // instruction to maintain this information. 365 if (MI.getOperand(0).isUndef() || MI.getNumOperands() > 2) { 366 MI.setDesc(TII->get(TargetOpcode::KILL)); 367 DEBUG(dbgs() << " replace by: " << MI); 368 return; 369 } 370 371 if (Indexes) 372 Indexes->removeSingleMachineInstrFromMaps(MI); 373 MI.eraseFromBundle(); 374 DEBUG(dbgs() << " deleted.\n"); 375 } 376 377 /// The liverange splitting logic sometimes produces bundles of copies when 378 /// subregisters are involved. Expand these into a sequence of copy instructions 379 /// after processing the last in the bundle. Does not update LiveIntervals 380 /// which we shouldn't need for this instruction anymore. 381 void VirtRegRewriter::expandCopyBundle(MachineInstr &MI) const { 382 if (!MI.isCopy()) 383 return; 384 385 if (MI.isBundledWithPred() && !MI.isBundledWithSucc()) { 386 // Only do this when the complete bundle is made out of COPYs. 387 MachineBasicBlock &MBB = *MI.getParent(); 388 for (MachineBasicBlock::reverse_instr_iterator I = 389 std::next(MI.getReverseIterator()), E = MBB.instr_rend(); 390 I != E && I->isBundledWithSucc(); ++I) { 391 if (!I->isCopy()) 392 return; 393 } 394 395 for (MachineBasicBlock::reverse_instr_iterator I = MI.getReverseIterator(); 396 I->isBundledWithPred(); ) { 397 MachineInstr &MI = *I; 398 ++I; 399 400 MI.unbundleFromPred(); 401 if (Indexes) 402 Indexes->insertMachineInstrInMaps(MI); 403 } 404 } 405 } 406 407 void VirtRegRewriter::rewrite() { 408 bool NoSubRegLiveness = !MRI->subRegLivenessEnabled(); 409 SmallVector<unsigned, 8> SuperDeads; 410 SmallVector<unsigned, 8> SuperDefs; 411 SmallVector<unsigned, 8> SuperKills; 412 413 for (MachineFunction::iterator MBBI = MF->begin(), MBBE = MF->end(); 414 MBBI != MBBE; ++MBBI) { 415 DEBUG(MBBI->print(dbgs(), Indexes)); 416 for (MachineBasicBlock::instr_iterator 417 MII = MBBI->instr_begin(), MIE = MBBI->instr_end(); MII != MIE;) { 418 MachineInstr *MI = &*MII; 419 ++MII; 420 421 for (MachineInstr::mop_iterator MOI = MI->operands_begin(), 422 MOE = MI->operands_end(); MOI != MOE; ++MOI) { 423 MachineOperand &MO = *MOI; 424 425 // Make sure MRI knows about registers clobbered by regmasks. 426 if (MO.isRegMask()) 427 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask()); 428 429 if (!MO.isReg() || !TargetRegisterInfo::isVirtualRegister(MO.getReg())) 430 continue; 431 unsigned VirtReg = MO.getReg(); 432 unsigned PhysReg = VRM->getPhys(VirtReg); 433 assert(PhysReg != VirtRegMap::NO_PHYS_REG && 434 "Instruction uses unmapped VirtReg"); 435 assert(!MRI->isReserved(PhysReg) && "Reserved register assignment"); 436 437 // Preserve semantics of sub-register operands. 438 unsigned SubReg = MO.getSubReg(); 439 if (SubReg != 0) { 440 if (NoSubRegLiveness) { 441 // A virtual register kill refers to the whole register, so we may 442 // have to add <imp-use,kill> operands for the super-register. A 443 // partial redef always kills and redefines the super-register. 444 if (MO.readsReg() && (MO.isDef() || MO.isKill())) 445 SuperKills.push_back(PhysReg); 446 447 if (MO.isDef()) { 448 // Also add implicit defs for the super-register. 449 if (MO.isDead()) 450 SuperDeads.push_back(PhysReg); 451 else 452 SuperDefs.push_back(PhysReg); 453 } 454 } else { 455 if (MO.isUse()) { 456 if (readsUndefSubreg(MO)) 457 // We need to add an <undef> flag if the subregister is 458 // completely undefined (and we are not adding super-register 459 // defs). 460 MO.setIsUndef(true); 461 } else if (!MO.isDead()) { 462 assert(MO.isDef()); 463 } 464 } 465 466 // The <def,undef> and <def,internal> flags only make sense for 467 // sub-register defs, and we are substituting a full physreg. An 468 // <imp-use,kill> operand from the SuperKills list will represent the 469 // partial read of the super-register. 470 if (MO.isDef()) { 471 MO.setIsUndef(false); 472 MO.setIsInternalRead(false); 473 } 474 475 // PhysReg operands cannot have subregister indexes. 476 PhysReg = TRI->getSubReg(PhysReg, SubReg); 477 assert(PhysReg && "Invalid SubReg for physical register"); 478 MO.setSubReg(0); 479 } 480 // Rewrite. Note we could have used MachineOperand::substPhysReg(), but 481 // we need the inlining here. 482 MO.setReg(PhysReg); 483 } 484 485 // Add any missing super-register kills after rewriting the whole 486 // instruction. 487 while (!SuperKills.empty()) 488 MI->addRegisterKilled(SuperKills.pop_back_val(), TRI, true); 489 490 while (!SuperDeads.empty()) 491 MI->addRegisterDead(SuperDeads.pop_back_val(), TRI, true); 492 493 while (!SuperDefs.empty()) 494 MI->addRegisterDefined(SuperDefs.pop_back_val(), TRI); 495 496 DEBUG(dbgs() << "> " << *MI); 497 498 expandCopyBundle(*MI); 499 500 // We can remove identity copies right now. 501 handleIdentityCopy(*MI); 502 } 503 } 504 } 505