1 //===- llvm/CodeGen/VirtRegMap.cpp - Virtual Register Map -----------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the VirtRegMap class. 11 // 12 // It also contains implementations of the Spiller interface, which, given a 13 // virtual register map and a machine function, eliminates all virtual 14 // references by replacing them with physical register references - adding spill 15 // code as necessary. 16 // 17 //===----------------------------------------------------------------------===// 18 19 #include "llvm/CodeGen/VirtRegMap.h" 20 #include "LiveDebugVariables.h" 21 #include "llvm/ADT/SmallVector.h" 22 #include "llvm/ADT/Statistic.h" 23 #include "llvm/CodeGen/LiveInterval.h" 24 #include "llvm/CodeGen/LiveIntervals.h" 25 #include "llvm/CodeGen/LiveStacks.h" 26 #include "llvm/CodeGen/MachineBasicBlock.h" 27 #include "llvm/CodeGen/MachineFrameInfo.h" 28 #include "llvm/CodeGen/MachineFunction.h" 29 #include "llvm/CodeGen/MachineFunctionPass.h" 30 #include "llvm/CodeGen/MachineInstr.h" 31 #include "llvm/CodeGen/MachineOperand.h" 32 #include "llvm/CodeGen/MachineRegisterInfo.h" 33 #include "llvm/CodeGen/SlotIndexes.h" 34 #include "llvm/CodeGen/TargetInstrInfo.h" 35 #include "llvm/CodeGen/TargetOpcodes.h" 36 #include "llvm/CodeGen/TargetRegisterInfo.h" 37 #include "llvm/CodeGen/TargetSubtargetInfo.h" 38 #include "llvm/Config/llvm-config.h" 39 #include "llvm/MC/LaneBitmask.h" 40 #include "llvm/Pass.h" 41 #include "llvm/Support/Compiler.h" 42 #include "llvm/Support/Debug.h" 43 #include "llvm/Support/raw_ostream.h" 44 #include <cassert> 45 #include <iterator> 46 #include <utility> 47 48 using namespace llvm; 49 50 #define DEBUG_TYPE "regalloc" 51 52 STATISTIC(NumSpillSlots, "Number of spill slots allocated"); 53 STATISTIC(NumIdCopies, "Number of identity moves eliminated after rewriting"); 54 55 //===----------------------------------------------------------------------===// 56 // VirtRegMap implementation 57 //===----------------------------------------------------------------------===// 58 59 char VirtRegMap::ID = 0; 60 61 INITIALIZE_PASS(VirtRegMap, "virtregmap", "Virtual Register Map", false, false) 62 63 bool VirtRegMap::runOnMachineFunction(MachineFunction &mf) { 64 MRI = &mf.getRegInfo(); 65 TII = mf.getSubtarget().getInstrInfo(); 66 TRI = mf.getSubtarget().getRegisterInfo(); 67 MF = &mf; 68 69 Virt2PhysMap.clear(); 70 Virt2StackSlotMap.clear(); 71 Virt2SplitMap.clear(); 72 73 grow(); 74 return false; 75 } 76 77 void VirtRegMap::grow() { 78 unsigned NumRegs = MF->getRegInfo().getNumVirtRegs(); 79 Virt2PhysMap.resize(NumRegs); 80 Virt2StackSlotMap.resize(NumRegs); 81 Virt2SplitMap.resize(NumRegs); 82 } 83 84 void VirtRegMap::assignVirt2Phys(unsigned virtReg, MCPhysReg physReg) { 85 assert(TargetRegisterInfo::isVirtualRegister(virtReg) && 86 TargetRegisterInfo::isPhysicalRegister(physReg)); 87 assert(Virt2PhysMap[virtReg] == NO_PHYS_REG && 88 "attempt to assign physical register to already mapped " 89 "virtual register"); 90 assert(!getRegInfo().isReserved(physReg) && 91 "Attempt to map virtReg to a reserved physReg"); 92 Virt2PhysMap[virtReg] = physReg; 93 } 94 95 unsigned VirtRegMap::createSpillSlot(const TargetRegisterClass *RC) { 96 unsigned Size = TRI->getSpillSize(*RC); 97 unsigned Align = TRI->getSpillAlignment(*RC); 98 int SS = MF->getFrameInfo().CreateSpillStackObject(Size, Align); 99 ++NumSpillSlots; 100 return SS; 101 } 102 103 bool VirtRegMap::hasPreferredPhys(unsigned VirtReg) { 104 unsigned Hint = MRI->getSimpleHint(VirtReg); 105 if (!Hint) 106 return false; 107 if (TargetRegisterInfo::isVirtualRegister(Hint)) 108 Hint = getPhys(Hint); 109 return getPhys(VirtReg) == Hint; 110 } 111 112 bool VirtRegMap::hasKnownPreference(unsigned VirtReg) { 113 std::pair<unsigned, unsigned> Hint = MRI->getRegAllocationHint(VirtReg); 114 if (TargetRegisterInfo::isPhysicalRegister(Hint.second)) 115 return true; 116 if (TargetRegisterInfo::isVirtualRegister(Hint.second)) 117 return hasPhys(Hint.second); 118 return false; 119 } 120 121 int VirtRegMap::assignVirt2StackSlot(unsigned virtReg) { 122 assert(TargetRegisterInfo::isVirtualRegister(virtReg)); 123 assert(Virt2StackSlotMap[virtReg] == NO_STACK_SLOT && 124 "attempt to assign stack slot to already spilled register"); 125 const TargetRegisterClass* RC = MF->getRegInfo().getRegClass(virtReg); 126 return Virt2StackSlotMap[virtReg] = createSpillSlot(RC); 127 } 128 129 void VirtRegMap::assignVirt2StackSlot(unsigned virtReg, int SS) { 130 assert(TargetRegisterInfo::isVirtualRegister(virtReg)); 131 assert(Virt2StackSlotMap[virtReg] == NO_STACK_SLOT && 132 "attempt to assign stack slot to already spilled register"); 133 assert((SS >= 0 || 134 (SS >= MF->getFrameInfo().getObjectIndexBegin())) && 135 "illegal fixed frame index"); 136 Virt2StackSlotMap[virtReg] = SS; 137 } 138 139 void VirtRegMap::print(raw_ostream &OS, const Module*) const { 140 OS << "********** REGISTER MAP **********\n"; 141 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) { 142 unsigned Reg = TargetRegisterInfo::index2VirtReg(i); 143 if (Virt2PhysMap[Reg] != (unsigned)VirtRegMap::NO_PHYS_REG) { 144 OS << '[' << printReg(Reg, TRI) << " -> " 145 << printReg(Virt2PhysMap[Reg], TRI) << "] " 146 << TRI->getRegClassName(MRI->getRegClass(Reg)) << "\n"; 147 } 148 } 149 150 for (unsigned i = 0, e = MRI->getNumVirtRegs(); i != e; ++i) { 151 unsigned Reg = TargetRegisterInfo::index2VirtReg(i); 152 if (Virt2StackSlotMap[Reg] != VirtRegMap::NO_STACK_SLOT) { 153 OS << '[' << printReg(Reg, TRI) << " -> fi#" << Virt2StackSlotMap[Reg] 154 << "] " << TRI->getRegClassName(MRI->getRegClass(Reg)) << "\n"; 155 } 156 } 157 OS << '\n'; 158 } 159 160 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 161 LLVM_DUMP_METHOD void VirtRegMap::dump() const { 162 print(dbgs()); 163 } 164 #endif 165 166 //===----------------------------------------------------------------------===// 167 // VirtRegRewriter 168 //===----------------------------------------------------------------------===// 169 // 170 // The VirtRegRewriter is the last of the register allocator passes. 171 // It rewrites virtual registers to physical registers as specified in the 172 // VirtRegMap analysis. It also updates live-in information on basic blocks 173 // according to LiveIntervals. 174 // 175 namespace { 176 177 class VirtRegRewriter : public MachineFunctionPass { 178 MachineFunction *MF; 179 const TargetRegisterInfo *TRI; 180 const TargetInstrInfo *TII; 181 MachineRegisterInfo *MRI; 182 SlotIndexes *Indexes; 183 LiveIntervals *LIS; 184 VirtRegMap *VRM; 185 186 void rewrite(); 187 void addMBBLiveIns(); 188 bool readsUndefSubreg(const MachineOperand &MO) const; 189 void addLiveInsForSubRanges(const LiveInterval &LI, unsigned PhysReg) const; 190 void handleIdentityCopy(MachineInstr &MI) const; 191 void expandCopyBundle(MachineInstr &MI) const; 192 bool subRegLiveThrough(const MachineInstr &MI, unsigned SuperPhysReg) const; 193 194 public: 195 static char ID; 196 197 VirtRegRewriter() : MachineFunctionPass(ID) {} 198 199 void getAnalysisUsage(AnalysisUsage &AU) const override; 200 201 bool runOnMachineFunction(MachineFunction&) override; 202 203 MachineFunctionProperties getSetProperties() const override { 204 return MachineFunctionProperties().set( 205 MachineFunctionProperties::Property::NoVRegs); 206 } 207 }; 208 209 } // end anonymous namespace 210 211 char VirtRegRewriter::ID = 0; 212 213 char &llvm::VirtRegRewriterID = VirtRegRewriter::ID; 214 215 INITIALIZE_PASS_BEGIN(VirtRegRewriter, "virtregrewriter", 216 "Virtual Register Rewriter", false, false) 217 INITIALIZE_PASS_DEPENDENCY(SlotIndexes) 218 INITIALIZE_PASS_DEPENDENCY(LiveIntervals) 219 INITIALIZE_PASS_DEPENDENCY(LiveDebugVariables) 220 INITIALIZE_PASS_DEPENDENCY(LiveStacks) 221 INITIALIZE_PASS_DEPENDENCY(VirtRegMap) 222 INITIALIZE_PASS_END(VirtRegRewriter, "virtregrewriter", 223 "Virtual Register Rewriter", false, false) 224 225 void VirtRegRewriter::getAnalysisUsage(AnalysisUsage &AU) const { 226 AU.setPreservesCFG(); 227 AU.addRequired<LiveIntervals>(); 228 AU.addRequired<SlotIndexes>(); 229 AU.addPreserved<SlotIndexes>(); 230 AU.addRequired<LiveDebugVariables>(); 231 AU.addRequired<LiveStacks>(); 232 AU.addPreserved<LiveStacks>(); 233 AU.addRequired<VirtRegMap>(); 234 MachineFunctionPass::getAnalysisUsage(AU); 235 } 236 237 bool VirtRegRewriter::runOnMachineFunction(MachineFunction &fn) { 238 MF = &fn; 239 TRI = MF->getSubtarget().getRegisterInfo(); 240 TII = MF->getSubtarget().getInstrInfo(); 241 MRI = &MF->getRegInfo(); 242 Indexes = &getAnalysis<SlotIndexes>(); 243 LIS = &getAnalysis<LiveIntervals>(); 244 VRM = &getAnalysis<VirtRegMap>(); 245 LLVM_DEBUG(dbgs() << "********** REWRITE VIRTUAL REGISTERS **********\n" 246 << "********** Function: " << MF->getName() << '\n'); 247 LLVM_DEBUG(VRM->dump()); 248 249 // Add kill flags while we still have virtual registers. 250 LIS->addKillFlags(VRM); 251 252 // Live-in lists on basic blocks are required for physregs. 253 addMBBLiveIns(); 254 255 // Rewrite virtual registers. 256 rewrite(); 257 258 // Write out new DBG_VALUE instructions. 259 getAnalysis<LiveDebugVariables>().emitDebugValues(VRM); 260 261 // All machine operands and other references to virtual registers have been 262 // replaced. Remove the virtual registers and release all the transient data. 263 VRM->clearAllVirt(); 264 MRI->clearVirtRegs(); 265 return true; 266 } 267 268 void VirtRegRewriter::addLiveInsForSubRanges(const LiveInterval &LI, 269 unsigned PhysReg) const { 270 assert(!LI.empty()); 271 assert(LI.hasSubRanges()); 272 273 using SubRangeIteratorPair = 274 std::pair<const LiveInterval::SubRange *, LiveInterval::const_iterator>; 275 276 SmallVector<SubRangeIteratorPair, 4> SubRanges; 277 SlotIndex First; 278 SlotIndex Last; 279 for (const LiveInterval::SubRange &SR : LI.subranges()) { 280 SubRanges.push_back(std::make_pair(&SR, SR.begin())); 281 if (!First.isValid() || SR.segments.front().start < First) 282 First = SR.segments.front().start; 283 if (!Last.isValid() || SR.segments.back().end > Last) 284 Last = SR.segments.back().end; 285 } 286 287 // Check all mbb start positions between First and Last while 288 // simulatenously advancing an iterator for each subrange. 289 for (SlotIndexes::MBBIndexIterator MBBI = Indexes->findMBBIndex(First); 290 MBBI != Indexes->MBBIndexEnd() && MBBI->first <= Last; ++MBBI) { 291 SlotIndex MBBBegin = MBBI->first; 292 // Advance all subrange iterators so that their end position is just 293 // behind MBBBegin (or the iterator is at the end). 294 LaneBitmask LaneMask; 295 for (auto &RangeIterPair : SubRanges) { 296 const LiveInterval::SubRange *SR = RangeIterPair.first; 297 LiveInterval::const_iterator &SRI = RangeIterPair.second; 298 while (SRI != SR->end() && SRI->end <= MBBBegin) 299 ++SRI; 300 if (SRI == SR->end()) 301 continue; 302 if (SRI->start <= MBBBegin) 303 LaneMask |= SR->LaneMask; 304 } 305 if (LaneMask.none()) 306 continue; 307 MachineBasicBlock *MBB = MBBI->second; 308 MBB->addLiveIn(PhysReg, LaneMask); 309 } 310 } 311 312 // Compute MBB live-in lists from virtual register live ranges and their 313 // assignments. 314 void VirtRegRewriter::addMBBLiveIns() { 315 for (unsigned Idx = 0, IdxE = MRI->getNumVirtRegs(); Idx != IdxE; ++Idx) { 316 unsigned VirtReg = TargetRegisterInfo::index2VirtReg(Idx); 317 if (MRI->reg_nodbg_empty(VirtReg)) 318 continue; 319 LiveInterval &LI = LIS->getInterval(VirtReg); 320 if (LI.empty() || LIS->intervalIsInOneMBB(LI)) 321 continue; 322 // This is a virtual register that is live across basic blocks. Its 323 // assigned PhysReg must be marked as live-in to those blocks. 324 unsigned PhysReg = VRM->getPhys(VirtReg); 325 assert(PhysReg != VirtRegMap::NO_PHYS_REG && "Unmapped virtual register."); 326 327 if (LI.hasSubRanges()) { 328 addLiveInsForSubRanges(LI, PhysReg); 329 } else { 330 // Go over MBB begin positions and see if we have segments covering them. 331 // The following works because segments and the MBBIndex list are both 332 // sorted by slot indexes. 333 SlotIndexes::MBBIndexIterator I = Indexes->MBBIndexBegin(); 334 for (const auto &Seg : LI) { 335 I = Indexes->advanceMBBIndex(I, Seg.start); 336 for (; I != Indexes->MBBIndexEnd() && I->first < Seg.end; ++I) { 337 MachineBasicBlock *MBB = I->second; 338 MBB->addLiveIn(PhysReg); 339 } 340 } 341 } 342 } 343 344 // Sort and unique MBB LiveIns as we've not checked if SubReg/PhysReg were in 345 // each MBB's LiveIns set before calling addLiveIn on them. 346 for (MachineBasicBlock &MBB : *MF) 347 MBB.sortUniqueLiveIns(); 348 } 349 350 /// Returns true if the given machine operand \p MO only reads undefined lanes. 351 /// The function only works for use operands with a subregister set. 352 bool VirtRegRewriter::readsUndefSubreg(const MachineOperand &MO) const { 353 // Shortcut if the operand is already marked undef. 354 if (MO.isUndef()) 355 return true; 356 357 unsigned Reg = MO.getReg(); 358 const LiveInterval &LI = LIS->getInterval(Reg); 359 const MachineInstr &MI = *MO.getParent(); 360 SlotIndex BaseIndex = LIS->getInstructionIndex(MI); 361 // This code is only meant to handle reading undefined subregisters which 362 // we couldn't properly detect before. 363 assert(LI.liveAt(BaseIndex) && 364 "Reads of completely dead register should be marked undef already"); 365 unsigned SubRegIdx = MO.getSubReg(); 366 assert(SubRegIdx != 0 && LI.hasSubRanges()); 367 LaneBitmask UseMask = TRI->getSubRegIndexLaneMask(SubRegIdx); 368 // See if any of the relevant subregister liveranges is defined at this point. 369 for (const LiveInterval::SubRange &SR : LI.subranges()) { 370 if ((SR.LaneMask & UseMask).any() && SR.liveAt(BaseIndex)) 371 return false; 372 } 373 return true; 374 } 375 376 void VirtRegRewriter::handleIdentityCopy(MachineInstr &MI) const { 377 if (!MI.isIdentityCopy()) 378 return; 379 LLVM_DEBUG(dbgs() << "Identity copy: " << MI); 380 ++NumIdCopies; 381 382 // Copies like: 383 // %r0 = COPY undef %r0 384 // %al = COPY %al, implicit-def %eax 385 // give us additional liveness information: The target (super-)register 386 // must not be valid before this point. Replace the COPY with a KILL 387 // instruction to maintain this information. 388 if (MI.getOperand(0).isUndef() || MI.getNumOperands() > 2) { 389 MI.setDesc(TII->get(TargetOpcode::KILL)); 390 LLVM_DEBUG(dbgs() << " replace by: " << MI); 391 return; 392 } 393 394 if (Indexes) 395 Indexes->removeSingleMachineInstrFromMaps(MI); 396 MI.eraseFromBundle(); 397 LLVM_DEBUG(dbgs() << " deleted.\n"); 398 } 399 400 /// The liverange splitting logic sometimes produces bundles of copies when 401 /// subregisters are involved. Expand these into a sequence of copy instructions 402 /// after processing the last in the bundle. Does not update LiveIntervals 403 /// which we shouldn't need for this instruction anymore. 404 void VirtRegRewriter::expandCopyBundle(MachineInstr &MI) const { 405 if (!MI.isCopy()) 406 return; 407 408 if (MI.isBundledWithPred() && !MI.isBundledWithSucc()) { 409 SmallVector<MachineInstr *, 2> MIs({&MI}); 410 411 // Only do this when the complete bundle is made out of COPYs. 412 MachineBasicBlock &MBB = *MI.getParent(); 413 for (MachineBasicBlock::reverse_instr_iterator I = 414 std::next(MI.getReverseIterator()), E = MBB.instr_rend(); 415 I != E && I->isBundledWithSucc(); ++I) { 416 if (!I->isCopy()) 417 return; 418 MIs.push_back(&*I); 419 } 420 MachineInstr *FirstMI = MIs.back(); 421 422 auto anyRegsAlias = [](const MachineInstr *Dst, 423 ArrayRef<MachineInstr *> Srcs, 424 const TargetRegisterInfo *TRI) { 425 for (const MachineInstr *Src : Srcs) 426 if (Src != Dst) 427 if (TRI->regsOverlap(Dst->getOperand(0).getReg(), 428 Src->getOperand(1).getReg())) 429 return true; 430 return false; 431 }; 432 433 // If any of the destination registers in the bundle of copies alias any of 434 // the source registers, try to schedule the instructions to avoid any 435 // clobbering. 436 for (int E = MIs.size(), PrevE = E; E > 1; PrevE = E) { 437 for (int I = E; I--; ) 438 if (!anyRegsAlias(MIs[I], makeArrayRef(MIs).take_front(E), TRI)) { 439 if (I + 1 != E) 440 std::swap(MIs[I], MIs[E - 1]); 441 --E; 442 } 443 if (PrevE == E) { 444 MF->getFunction().getContext().emitError( 445 "register rewriting failed: cycle in copy bundle"); 446 break; 447 } 448 } 449 450 MachineInstr *BundleStart = FirstMI; 451 for (MachineInstr *BundledMI : llvm::reverse(MIs)) { 452 // If instruction is in the middle of the bundle, move it before the 453 // bundle starts, otherwise, just unbundle it. When we get to the last 454 // instruction, the bundle will have been completely undone. 455 if (BundledMI != BundleStart) { 456 BundledMI->removeFromBundle(); 457 MBB.insert(FirstMI, BundledMI); 458 } else if (BundledMI->isBundledWithSucc()) { 459 BundledMI->unbundleFromSucc(); 460 BundleStart = &*std::next(BundledMI->getIterator()); 461 } 462 463 if (Indexes && BundledMI != FirstMI) 464 Indexes->insertMachineInstrInMaps(*BundledMI); 465 } 466 } 467 } 468 469 /// Check whether (part of) \p SuperPhysReg is live through \p MI. 470 /// \pre \p MI defines a subregister of a virtual register that 471 /// has been assigned to \p SuperPhysReg. 472 bool VirtRegRewriter::subRegLiveThrough(const MachineInstr &MI, 473 unsigned SuperPhysReg) const { 474 SlotIndex MIIndex = LIS->getInstructionIndex(MI); 475 SlotIndex BeforeMIUses = MIIndex.getBaseIndex(); 476 SlotIndex AfterMIDefs = MIIndex.getBoundaryIndex(); 477 for (MCRegUnitIterator Unit(SuperPhysReg, TRI); Unit.isValid(); ++Unit) { 478 const LiveRange &UnitRange = LIS->getRegUnit(*Unit); 479 // If the regunit is live both before and after MI, 480 // we assume it is live through. 481 // Generally speaking, this is not true, because something like 482 // "RU = op RU" would match that description. 483 // However, we know that we are trying to assess whether 484 // a def of a virtual reg, vreg, is live at the same time of RU. 485 // If we are in the "RU = op RU" situation, that means that vreg 486 // is defined at the same time as RU (i.e., "vreg, RU = op RU"). 487 // Thus, vreg and RU interferes and vreg cannot be assigned to 488 // SuperPhysReg. Therefore, this situation cannot happen. 489 if (UnitRange.liveAt(AfterMIDefs) && UnitRange.liveAt(BeforeMIUses)) 490 return true; 491 } 492 return false; 493 } 494 495 void VirtRegRewriter::rewrite() { 496 bool NoSubRegLiveness = !MRI->subRegLivenessEnabled(); 497 SmallVector<unsigned, 8> SuperDeads; 498 SmallVector<unsigned, 8> SuperDefs; 499 SmallVector<unsigned, 8> SuperKills; 500 501 for (MachineFunction::iterator MBBI = MF->begin(), MBBE = MF->end(); 502 MBBI != MBBE; ++MBBI) { 503 LLVM_DEBUG(MBBI->print(dbgs(), Indexes)); 504 for (MachineBasicBlock::instr_iterator 505 MII = MBBI->instr_begin(), MIE = MBBI->instr_end(); MII != MIE;) { 506 MachineInstr *MI = &*MII; 507 ++MII; 508 509 for (MachineInstr::mop_iterator MOI = MI->operands_begin(), 510 MOE = MI->operands_end(); MOI != MOE; ++MOI) { 511 MachineOperand &MO = *MOI; 512 513 // Make sure MRI knows about registers clobbered by regmasks. 514 if (MO.isRegMask()) 515 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask()); 516 517 if (!MO.isReg() || !TargetRegisterInfo::isVirtualRegister(MO.getReg())) 518 continue; 519 unsigned VirtReg = MO.getReg(); 520 unsigned PhysReg = VRM->getPhys(VirtReg); 521 assert(PhysReg != VirtRegMap::NO_PHYS_REG && 522 "Instruction uses unmapped VirtReg"); 523 assert(!MRI->isReserved(PhysReg) && "Reserved register assignment"); 524 525 // Preserve semantics of sub-register operands. 526 unsigned SubReg = MO.getSubReg(); 527 if (SubReg != 0) { 528 if (NoSubRegLiveness || !MRI->shouldTrackSubRegLiveness(VirtReg)) { 529 // A virtual register kill refers to the whole register, so we may 530 // have to add implicit killed operands for the super-register. A 531 // partial redef always kills and redefines the super-register. 532 if ((MO.readsReg() && (MO.isDef() || MO.isKill())) || 533 (MO.isDef() && subRegLiveThrough(*MI, PhysReg))) 534 SuperKills.push_back(PhysReg); 535 536 if (MO.isDef()) { 537 // Also add implicit defs for the super-register. 538 if (MO.isDead()) 539 SuperDeads.push_back(PhysReg); 540 else 541 SuperDefs.push_back(PhysReg); 542 } 543 } else { 544 if (MO.isUse()) { 545 if (readsUndefSubreg(MO)) 546 // We need to add an <undef> flag if the subregister is 547 // completely undefined (and we are not adding super-register 548 // defs). 549 MO.setIsUndef(true); 550 } else if (!MO.isDead()) { 551 assert(MO.isDef()); 552 } 553 } 554 555 // The def undef and def internal flags only make sense for 556 // sub-register defs, and we are substituting a full physreg. An 557 // implicit killed operand from the SuperKills list will represent the 558 // partial read of the super-register. 559 if (MO.isDef()) { 560 MO.setIsUndef(false); 561 MO.setIsInternalRead(false); 562 } 563 564 // PhysReg operands cannot have subregister indexes. 565 PhysReg = TRI->getSubReg(PhysReg, SubReg); 566 assert(PhysReg && "Invalid SubReg for physical register"); 567 MO.setSubReg(0); 568 } 569 // Rewrite. Note we could have used MachineOperand::substPhysReg(), but 570 // we need the inlining here. 571 MO.setReg(PhysReg); 572 MO.setIsRenamable(true); 573 } 574 575 // Add any missing super-register kills after rewriting the whole 576 // instruction. 577 while (!SuperKills.empty()) 578 MI->addRegisterKilled(SuperKills.pop_back_val(), TRI, true); 579 580 while (!SuperDeads.empty()) 581 MI->addRegisterDead(SuperDeads.pop_back_val(), TRI, true); 582 583 while (!SuperDefs.empty()) 584 MI->addRegisterDefined(SuperDefs.pop_back_val(), TRI); 585 586 LLVM_DEBUG(dbgs() << "> " << *MI); 587 588 expandCopyBundle(*MI); 589 590 // We can remove identity copies right now. 591 handleIdentityCopy(*MI); 592 } 593 } 594 } 595