1 //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This register allocator allocates registers to a basic block at a time, 11 // attempting to keep values in registers and reusing registers as appropriate. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/CodeGen/Passes.h" 16 #include "llvm/ADT/DenseMap.h" 17 #include "llvm/ADT/IndexedMap.h" 18 #include "llvm/ADT/STLExtras.h" 19 #include "llvm/ADT/SmallSet.h" 20 #include "llvm/ADT/SmallVector.h" 21 #include "llvm/ADT/SparseSet.h" 22 #include "llvm/ADT/Statistic.h" 23 #include "llvm/CodeGen/MachineFrameInfo.h" 24 #include "llvm/CodeGen/MachineFunctionPass.h" 25 #include "llvm/CodeGen/MachineInstr.h" 26 #include "llvm/CodeGen/MachineInstrBuilder.h" 27 #include "llvm/CodeGen/MachineRegisterInfo.h" 28 #include "llvm/CodeGen/RegAllocRegistry.h" 29 #include "llvm/CodeGen/RegisterClassInfo.h" 30 #include "llvm/IR/BasicBlock.h" 31 #include "llvm/Support/CommandLine.h" 32 #include "llvm/Support/Debug.h" 33 #include "llvm/Support/ErrorHandling.h" 34 #include "llvm/Support/raw_ostream.h" 35 #include "llvm/Target/TargetInstrInfo.h" 36 #include "llvm/Target/TargetMachine.h" 37 #include "llvm/Target/TargetSubtargetInfo.h" 38 #include <algorithm> 39 using namespace llvm; 40 41 #define DEBUG_TYPE "regalloc" 42 43 STATISTIC(NumStores, "Number of stores added"); 44 STATISTIC(NumLoads , "Number of loads added"); 45 STATISTIC(NumCopies, "Number of copies coalesced"); 46 47 static RegisterRegAlloc 48 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 49 50 namespace { 51 class RAFast : public MachineFunctionPass { 52 public: 53 static char ID; 54 RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1), 55 isBulkSpilling(false) {} 56 private: 57 const TargetMachine *TM; 58 MachineFunction *MF; 59 MachineRegisterInfo *MRI; 60 const TargetRegisterInfo *TRI; 61 const TargetInstrInfo *TII; 62 RegisterClassInfo RegClassInfo; 63 64 // Basic block currently being allocated. 65 MachineBasicBlock *MBB; 66 67 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 68 // values are spilled. 69 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 70 71 // Everything we know about a live virtual register. 72 struct LiveReg { 73 MachineInstr *LastUse; // Last instr to use reg. 74 unsigned VirtReg; // Virtual register number. 75 unsigned PhysReg; // Currently held here. 76 unsigned short LastOpNum; // OpNum on LastUse. 77 bool Dirty; // Register needs spill. 78 79 explicit LiveReg(unsigned v) 80 : LastUse(nullptr), VirtReg(v), PhysReg(0), LastOpNum(0), Dirty(false){} 81 82 unsigned getSparseSetIndex() const { 83 return TargetRegisterInfo::virtReg2Index(VirtReg); 84 } 85 }; 86 87 typedef SparseSet<LiveReg> LiveRegMap; 88 89 // LiveVirtRegs - This map contains entries for each virtual register 90 // that is currently available in a physical register. 91 LiveRegMap LiveVirtRegs; 92 93 DenseMap<unsigned, SmallVector<MachineInstr *, 4> > LiveDbgValueMap; 94 95 // RegState - Track the state of a physical register. 96 enum RegState { 97 // A disabled register is not available for allocation, but an alias may 98 // be in use. A register can only be moved out of the disabled state if 99 // all aliases are disabled. 100 regDisabled, 101 102 // A free register is not currently in use and can be allocated 103 // immediately without checking aliases. 104 regFree, 105 106 // A reserved register has been assigned explicitly (e.g., setting up a 107 // call parameter), and it remains reserved until it is used. 108 regReserved 109 110 // A register state may also be a virtual register number, indication that 111 // the physical register is currently allocated to a virtual register. In 112 // that case, LiveVirtRegs contains the inverse mapping. 113 }; 114 115 // PhysRegState - One of the RegState enums, or a virtreg. 116 std::vector<unsigned> PhysRegState; 117 118 // Set of register units. 119 typedef SparseSet<unsigned> UsedInInstrSet; 120 121 // Set of register units that are used in the current instruction, and so 122 // cannot be allocated. 123 UsedInInstrSet UsedInInstr; 124 125 // Mark a physreg as used in this instruction. 126 void markRegUsedInInstr(unsigned PhysReg) { 127 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) 128 UsedInInstr.insert(*Units); 129 } 130 131 // Check if a physreg or any of its aliases are used in this instruction. 132 bool isRegUsedInInstr(unsigned PhysReg) const { 133 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) 134 if (UsedInInstr.count(*Units)) 135 return true; 136 return false; 137 } 138 139 // SkippedInstrs - Descriptors of instructions whose clobber list was 140 // ignored because all registers were spilled. It is still necessary to 141 // mark all the clobbered registers as used by the function. 142 SmallPtrSet<const MCInstrDesc*, 4> SkippedInstrs; 143 144 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 145 // completely after spilling all live registers. LiveRegMap entries should 146 // not be erased. 147 bool isBulkSpilling; 148 149 enum : unsigned { 150 spillClean = 1, 151 spillDirty = 100, 152 spillImpossible = ~0u 153 }; 154 public: 155 const char *getPassName() const override { 156 return "Fast Register Allocator"; 157 } 158 159 void getAnalysisUsage(AnalysisUsage &AU) const override { 160 AU.setPreservesCFG(); 161 MachineFunctionPass::getAnalysisUsage(AU); 162 } 163 164 private: 165 bool runOnMachineFunction(MachineFunction &Fn) override; 166 void AllocateBasicBlock(); 167 void handleThroughOperands(MachineInstr *MI, 168 SmallVectorImpl<unsigned> &VirtDead); 169 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 170 bool isLastUseOfLocalReg(MachineOperand&); 171 172 void addKillFlag(const LiveReg&); 173 void killVirtReg(LiveRegMap::iterator); 174 void killVirtReg(unsigned VirtReg); 175 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 176 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 177 178 void usePhysReg(MachineOperand&); 179 void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); 180 unsigned calcSpillCost(unsigned PhysReg) const; 181 void assignVirtToPhysReg(LiveReg&, unsigned PhysReg); 182 LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) { 183 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); 184 } 185 LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const { 186 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); 187 } 188 LiveRegMap::iterator assignVirtToPhysReg(unsigned VReg, unsigned PhysReg); 189 LiveRegMap::iterator allocVirtReg(MachineInstr *MI, LiveRegMap::iterator, 190 unsigned Hint); 191 LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, 192 unsigned VirtReg, unsigned Hint); 193 LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, 194 unsigned VirtReg, unsigned Hint); 195 void spillAll(MachineBasicBlock::iterator MI); 196 bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); 197 }; 198 char RAFast::ID = 0; 199 } 200 201 /// getStackSpaceFor - This allocates space for the specified virtual register 202 /// to be held on the stack. 203 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 204 // Find the location Reg would belong... 205 int SS = StackSlotForVirtReg[VirtReg]; 206 if (SS != -1) 207 return SS; // Already has space allocated? 208 209 // Allocate a new stack object for this spill location... 210 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 211 RC->getAlignment()); 212 213 // Assign the slot. 214 StackSlotForVirtReg[VirtReg] = FrameIdx; 215 return FrameIdx; 216 } 217 218 /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 219 /// its virtual register, and it is guaranteed to be a block-local register. 220 /// 221 bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 222 // If the register has ever been spilled or reloaded, we conservatively assume 223 // it is a global register used in multiple blocks. 224 if (StackSlotForVirtReg[MO.getReg()] != -1) 225 return false; 226 227 // Check that the use/def chain has exactly one operand - MO. 228 MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg()); 229 if (&*I != &MO) 230 return false; 231 return ++I == MRI->reg_nodbg_end(); 232 } 233 234 /// addKillFlag - Set kill flags on last use of a virtual register. 235 void RAFast::addKillFlag(const LiveReg &LR) { 236 if (!LR.LastUse) return; 237 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 238 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 239 if (MO.getReg() == LR.PhysReg) 240 MO.setIsKill(); 241 else 242 LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true); 243 } 244 } 245 246 /// killVirtReg - Mark virtreg as no longer available. 247 void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 248 addKillFlag(*LRI); 249 assert(PhysRegState[LRI->PhysReg] == LRI->VirtReg && 250 "Broken RegState mapping"); 251 PhysRegState[LRI->PhysReg] = regFree; 252 // Erase from LiveVirtRegs unless we're spilling in bulk. 253 if (!isBulkSpilling) 254 LiveVirtRegs.erase(LRI); 255 } 256 257 /// killVirtReg - Mark virtreg as no longer available. 258 void RAFast::killVirtReg(unsigned VirtReg) { 259 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 260 "killVirtReg needs a virtual register"); 261 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 262 if (LRI != LiveVirtRegs.end()) 263 killVirtReg(LRI); 264 } 265 266 /// spillVirtReg - This method spills the value specified by VirtReg into the 267 /// corresponding stack slot if needed. 268 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 269 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 270 "Spilling a physical register is illegal!"); 271 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 272 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 273 spillVirtReg(MI, LRI); 274 } 275 276 /// spillVirtReg - Do the actual work of spilling. 277 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 278 LiveRegMap::iterator LRI) { 279 LiveReg &LR = *LRI; 280 assert(PhysRegState[LR.PhysReg] == LRI->VirtReg && "Broken RegState mapping"); 281 282 if (LR.Dirty) { 283 // If this physreg is used by the instruction, we want to kill it on the 284 // instruction, not on the spill. 285 bool SpillKill = LR.LastUse != MI; 286 LR.Dirty = false; 287 DEBUG(dbgs() << "Spilling " << PrintReg(LRI->VirtReg, TRI) 288 << " in " << PrintReg(LR.PhysReg, TRI)); 289 const TargetRegisterClass *RC = MRI->getRegClass(LRI->VirtReg); 290 int FI = getStackSpaceFor(LRI->VirtReg, RC); 291 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 292 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 293 ++NumStores; // Update statistics 294 295 // If this register is used by DBG_VALUE then insert new DBG_VALUE to 296 // identify spilled location as the place to find corresponding variable's 297 // value. 298 SmallVectorImpl<MachineInstr *> &LRIDbgValues = 299 LiveDbgValueMap[LRI->VirtReg]; 300 for (unsigned li = 0, le = LRIDbgValues.size(); li != le; ++li) { 301 MachineInstr *DBG = LRIDbgValues[li]; 302 const MDNode *MDPtr = DBG->getOperand(2).getMetadata(); 303 bool IsIndirect = DBG->isIndirectDebugValue(); 304 uint64_t Offset = IsIndirect ? DBG->getOperand(1).getImm() : 0; 305 DebugLoc DL; 306 if (MI == MBB->end()) { 307 // If MI is at basic block end then use last instruction's location. 308 MachineBasicBlock::iterator EI = MI; 309 DL = (--EI)->getDebugLoc(); 310 } else 311 DL = MI->getDebugLoc(); 312 MachineBasicBlock *MBB = DBG->getParent(); 313 MachineInstr *NewDV = 314 BuildMI(*MBB, MI, DL, TII->get(TargetOpcode::DBG_VALUE)) 315 .addFrameIndex(FI).addImm(Offset).addMetadata(MDPtr); 316 (void)NewDV; 317 DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV); 318 } 319 // Now this register is spilled there is should not be any DBG_VALUE 320 // pointing to this register because they are all pointing to spilled value 321 // now. 322 LRIDbgValues.clear(); 323 if (SpillKill) 324 LR.LastUse = nullptr; // Don't kill register again 325 } 326 killVirtReg(LRI); 327 } 328 329 /// spillAll - Spill all dirty virtregs without killing them. 330 void RAFast::spillAll(MachineBasicBlock::iterator MI) { 331 if (LiveVirtRegs.empty()) return; 332 isBulkSpilling = true; 333 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 334 // of spilling here is deterministic, if arbitrary. 335 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 336 i != e; ++i) 337 spillVirtReg(MI, i); 338 LiveVirtRegs.clear(); 339 isBulkSpilling = false; 340 } 341 342 /// usePhysReg - Handle the direct use of a physical register. 343 /// Check that the register is not used by a virtreg. 344 /// Kill the physreg, marking it free. 345 /// This may add implicit kills to MO->getParent() and invalidate MO. 346 void RAFast::usePhysReg(MachineOperand &MO) { 347 unsigned PhysReg = MO.getReg(); 348 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 349 "Bad usePhysReg operand"); 350 markRegUsedInInstr(PhysReg); 351 switch (PhysRegState[PhysReg]) { 352 case regDisabled: 353 break; 354 case regReserved: 355 PhysRegState[PhysReg] = regFree; 356 // Fall through 357 case regFree: 358 MO.setIsKill(); 359 return; 360 default: 361 // The physreg was allocated to a virtual register. That means the value we 362 // wanted has been clobbered. 363 llvm_unreachable("Instruction uses an allocated register"); 364 } 365 366 // Maybe a superregister is reserved? 367 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 368 unsigned Alias = *AI; 369 switch (PhysRegState[Alias]) { 370 case regDisabled: 371 break; 372 case regReserved: 373 assert(TRI->isSuperRegister(PhysReg, Alias) && 374 "Instruction is not using a subregister of a reserved register"); 375 // Leave the superregister in the working set. 376 PhysRegState[Alias] = regFree; 377 MO.getParent()->addRegisterKilled(Alias, TRI, true); 378 return; 379 case regFree: 380 if (TRI->isSuperRegister(PhysReg, Alias)) { 381 // Leave the superregister in the working set. 382 MO.getParent()->addRegisterKilled(Alias, TRI, true); 383 return; 384 } 385 // Some other alias was in the working set - clear it. 386 PhysRegState[Alias] = regDisabled; 387 break; 388 default: 389 llvm_unreachable("Instruction uses an alias of an allocated register"); 390 } 391 } 392 393 // All aliases are disabled, bring register into working set. 394 PhysRegState[PhysReg] = regFree; 395 MO.setIsKill(); 396 } 397 398 /// definePhysReg - Mark PhysReg as reserved or free after spilling any 399 /// virtregs. This is very similar to defineVirtReg except the physreg is 400 /// reserved instead of allocated. 401 void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, 402 RegState NewState) { 403 markRegUsedInInstr(PhysReg); 404 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 405 case regDisabled: 406 break; 407 default: 408 spillVirtReg(MI, VirtReg); 409 // Fall through. 410 case regFree: 411 case regReserved: 412 PhysRegState[PhysReg] = NewState; 413 return; 414 } 415 416 // This is a disabled register, disable all aliases. 417 PhysRegState[PhysReg] = NewState; 418 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 419 unsigned Alias = *AI; 420 switch (unsigned VirtReg = PhysRegState[Alias]) { 421 case regDisabled: 422 break; 423 default: 424 spillVirtReg(MI, VirtReg); 425 // Fall through. 426 case regFree: 427 case regReserved: 428 PhysRegState[Alias] = regDisabled; 429 if (TRI->isSuperRegister(PhysReg, Alias)) 430 return; 431 break; 432 } 433 } 434 } 435 436 437 // calcSpillCost - Return the cost of spilling clearing out PhysReg and 438 // aliases so it is free for allocation. 439 // Returns 0 when PhysReg is free or disabled with all aliases disabled - it 440 // can be allocated directly. 441 // Returns spillImpossible when PhysReg or an alias can't be spilled. 442 unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 443 if (isRegUsedInInstr(PhysReg)) { 444 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is already used in instr.\n"); 445 return spillImpossible; 446 } 447 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 448 case regDisabled: 449 break; 450 case regFree: 451 return 0; 452 case regReserved: 453 DEBUG(dbgs() << PrintReg(VirtReg, TRI) << " corresponding " 454 << PrintReg(PhysReg, TRI) << " is reserved already.\n"); 455 return spillImpossible; 456 default: { 457 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); 458 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 459 return I->Dirty ? spillDirty : spillClean; 460 } 461 } 462 463 // This is a disabled register, add up cost of aliases. 464 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is disabled.\n"); 465 unsigned Cost = 0; 466 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 467 unsigned Alias = *AI; 468 switch (unsigned VirtReg = PhysRegState[Alias]) { 469 case regDisabled: 470 break; 471 case regFree: 472 ++Cost; 473 break; 474 case regReserved: 475 return spillImpossible; 476 default: { 477 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); 478 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 479 Cost += I->Dirty ? spillDirty : spillClean; 480 break; 481 } 482 } 483 } 484 return Cost; 485 } 486 487 488 /// assignVirtToPhysReg - This method updates local state so that we know 489 /// that PhysReg is the proper container for VirtReg now. The physical 490 /// register must not be used for anything else when this is called. 491 /// 492 void RAFast::assignVirtToPhysReg(LiveReg &LR, unsigned PhysReg) { 493 DEBUG(dbgs() << "Assigning " << PrintReg(LR.VirtReg, TRI) << " to " 494 << PrintReg(PhysReg, TRI) << "\n"); 495 PhysRegState[PhysReg] = LR.VirtReg; 496 assert(!LR.PhysReg && "Already assigned a physreg"); 497 LR.PhysReg = PhysReg; 498 } 499 500 RAFast::LiveRegMap::iterator 501 RAFast::assignVirtToPhysReg(unsigned VirtReg, unsigned PhysReg) { 502 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 503 assert(LRI != LiveVirtRegs.end() && "VirtReg disappeared"); 504 assignVirtToPhysReg(*LRI, PhysReg); 505 return LRI; 506 } 507 508 /// allocVirtReg - Allocate a physical register for VirtReg. 509 RAFast::LiveRegMap::iterator RAFast::allocVirtReg(MachineInstr *MI, 510 LiveRegMap::iterator LRI, 511 unsigned Hint) { 512 const unsigned VirtReg = LRI->VirtReg; 513 514 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 515 "Can only allocate virtual registers"); 516 517 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 518 519 // Ignore invalid hints. 520 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 521 !RC->contains(Hint) || !MRI->isAllocatable(Hint))) 522 Hint = 0; 523 524 // Take hint when possible. 525 if (Hint) { 526 // Ignore the hint if we would have to spill a dirty register. 527 unsigned Cost = calcSpillCost(Hint); 528 if (Cost < spillDirty) { 529 if (Cost) 530 definePhysReg(MI, Hint, regFree); 531 // definePhysReg may kill virtual registers and modify LiveVirtRegs. 532 // That invalidates LRI, so run a new lookup for VirtReg. 533 return assignVirtToPhysReg(VirtReg, Hint); 534 } 535 } 536 537 ArrayRef<MCPhysReg> AO = RegClassInfo.getOrder(RC); 538 539 // First try to find a completely free register. 540 for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){ 541 unsigned PhysReg = *I; 542 if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) { 543 assignVirtToPhysReg(*LRI, PhysReg); 544 return LRI; 545 } 546 } 547 548 DEBUG(dbgs() << "Allocating " << PrintReg(VirtReg) << " from " 549 << RC->getName() << "\n"); 550 551 unsigned BestReg = 0, BestCost = spillImpossible; 552 for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){ 553 unsigned Cost = calcSpillCost(*I); 554 DEBUG(dbgs() << "\tRegister: " << PrintReg(*I, TRI) << "\n"); 555 DEBUG(dbgs() << "\tCost: " << Cost << "\n"); 556 DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n"); 557 // Cost is 0 when all aliases are already disabled. 558 if (Cost == 0) { 559 assignVirtToPhysReg(*LRI, *I); 560 return LRI; 561 } 562 if (Cost < BestCost) 563 BestReg = *I, BestCost = Cost; 564 } 565 566 if (BestReg) { 567 definePhysReg(MI, BestReg, regFree); 568 // definePhysReg may kill virtual registers and modify LiveVirtRegs. 569 // That invalidates LRI, so run a new lookup for VirtReg. 570 return assignVirtToPhysReg(VirtReg, BestReg); 571 } 572 573 // Nothing we can do. Report an error and keep going with a bad allocation. 574 if (MI->isInlineAsm()) 575 MI->emitError("inline assembly requires more registers than available"); 576 else 577 MI->emitError("ran out of registers during register allocation"); 578 definePhysReg(MI, *AO.begin(), regFree); 579 return assignVirtToPhysReg(VirtReg, *AO.begin()); 580 } 581 582 /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 583 RAFast::LiveRegMap::iterator 584 RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, 585 unsigned VirtReg, unsigned Hint) { 586 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 587 "Not a virtual register"); 588 LiveRegMap::iterator LRI; 589 bool New; 590 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); 591 if (New) { 592 // If there is no hint, peek at the only use of this register. 593 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 594 MRI->hasOneNonDBGUse(VirtReg)) { 595 const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg); 596 // It's a copy, use the destination register as a hint. 597 if (UseMI.isCopyLike()) 598 Hint = UseMI.getOperand(0).getReg(); 599 } 600 LRI = allocVirtReg(MI, LRI, Hint); 601 } else if (LRI->LastUse) { 602 // Redefining a live register - kill at the last use, unless it is this 603 // instruction defining VirtReg multiple times. 604 if (LRI->LastUse != MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse()) 605 addKillFlag(*LRI); 606 } 607 assert(LRI->PhysReg && "Register not assigned"); 608 LRI->LastUse = MI; 609 LRI->LastOpNum = OpNum; 610 LRI->Dirty = true; 611 markRegUsedInInstr(LRI->PhysReg); 612 return LRI; 613 } 614 615 /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 616 RAFast::LiveRegMap::iterator 617 RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, 618 unsigned VirtReg, unsigned Hint) { 619 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 620 "Not a virtual register"); 621 LiveRegMap::iterator LRI; 622 bool New; 623 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); 624 MachineOperand &MO = MI->getOperand(OpNum); 625 if (New) { 626 LRI = allocVirtReg(MI, LRI, Hint); 627 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 628 int FrameIndex = getStackSpaceFor(VirtReg, RC); 629 DEBUG(dbgs() << "Reloading " << PrintReg(VirtReg, TRI) << " into " 630 << PrintReg(LRI->PhysReg, TRI) << "\n"); 631 TII->loadRegFromStackSlot(*MBB, MI, LRI->PhysReg, FrameIndex, RC, TRI); 632 ++NumLoads; 633 } else if (LRI->Dirty) { 634 if (isLastUseOfLocalReg(MO)) { 635 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 636 if (MO.isUse()) 637 MO.setIsKill(); 638 else 639 MO.setIsDead(); 640 } else if (MO.isKill()) { 641 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 642 MO.setIsKill(false); 643 } else if (MO.isDead()) { 644 DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n"); 645 MO.setIsDead(false); 646 } 647 } else if (MO.isKill()) { 648 // We must remove kill flags from uses of reloaded registers because the 649 // register would be killed immediately, and there might be a second use: 650 // %foo = OR %x<kill>, %x 651 // This would cause a second reload of %x into a different register. 652 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 653 MO.setIsKill(false); 654 } else if (MO.isDead()) { 655 DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n"); 656 MO.setIsDead(false); 657 } 658 assert(LRI->PhysReg && "Register not assigned"); 659 LRI->LastUse = MI; 660 LRI->LastOpNum = OpNum; 661 markRegUsedInInstr(LRI->PhysReg); 662 return LRI; 663 } 664 665 // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering 666 // subregs. This may invalidate any operand pointers. 667 // Return true if the operand kills its register. 668 bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { 669 MachineOperand &MO = MI->getOperand(OpNum); 670 bool Dead = MO.isDead(); 671 if (!MO.getSubReg()) { 672 MO.setReg(PhysReg); 673 return MO.isKill() || Dead; 674 } 675 676 // Handle subregister index. 677 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 678 MO.setSubReg(0); 679 680 // A kill flag implies killing the full register. Add corresponding super 681 // register kill. 682 if (MO.isKill()) { 683 MI->addRegisterKilled(PhysReg, TRI, true); 684 return true; 685 } 686 687 // A <def,read-undef> of a sub-register requires an implicit def of the full 688 // register. 689 if (MO.isDef() && MO.isUndef()) 690 MI->addRegisterDefined(PhysReg, TRI); 691 692 return Dead; 693 } 694 695 // Handle special instruction operand like early clobbers and tied ops when 696 // there are additional physreg defines. 697 void RAFast::handleThroughOperands(MachineInstr *MI, 698 SmallVectorImpl<unsigned> &VirtDead) { 699 DEBUG(dbgs() << "Scanning for through registers:"); 700 SmallSet<unsigned, 8> ThroughRegs; 701 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 702 MachineOperand &MO = MI->getOperand(i); 703 if (!MO.isReg()) continue; 704 unsigned Reg = MO.getReg(); 705 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 706 continue; 707 if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) || 708 (MO.getSubReg() && MI->readsVirtualRegister(Reg))) { 709 if (ThroughRegs.insert(Reg)) 710 DEBUG(dbgs() << ' ' << PrintReg(Reg)); 711 } 712 } 713 714 // If any physreg defines collide with preallocated through registers, 715 // we must spill and reallocate. 716 DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); 717 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 718 MachineOperand &MO = MI->getOperand(i); 719 if (!MO.isReg() || !MO.isDef()) continue; 720 unsigned Reg = MO.getReg(); 721 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 722 markRegUsedInInstr(Reg); 723 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 724 if (ThroughRegs.count(PhysRegState[*AI])) 725 definePhysReg(MI, *AI, regFree); 726 } 727 } 728 729 SmallVector<unsigned, 8> PartialDefs; 730 DEBUG(dbgs() << "Allocating tied uses.\n"); 731 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 732 MachineOperand &MO = MI->getOperand(i); 733 if (!MO.isReg()) continue; 734 unsigned Reg = MO.getReg(); 735 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 736 if (MO.isUse()) { 737 unsigned DefIdx = 0; 738 if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue; 739 DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand " 740 << DefIdx << ".\n"); 741 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 742 unsigned PhysReg = LRI->PhysReg; 743 setPhysReg(MI, i, PhysReg); 744 // Note: we don't update the def operand yet. That would cause the normal 745 // def-scan to attempt spilling. 746 } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) { 747 DEBUG(dbgs() << "Partial redefine: " << MO << "\n"); 748 // Reload the register, but don't assign to the operand just yet. 749 // That would confuse the later phys-def processing pass. 750 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 751 PartialDefs.push_back(LRI->PhysReg); 752 } 753 } 754 755 DEBUG(dbgs() << "Allocating early clobbers.\n"); 756 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 757 MachineOperand &MO = MI->getOperand(i); 758 if (!MO.isReg()) continue; 759 unsigned Reg = MO.getReg(); 760 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 761 if (!MO.isEarlyClobber()) 762 continue; 763 // Note: defineVirtReg may invalidate MO. 764 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); 765 unsigned PhysReg = LRI->PhysReg; 766 if (setPhysReg(MI, i, PhysReg)) 767 VirtDead.push_back(Reg); 768 } 769 770 // Restore UsedInInstr to a state usable for allocating normal virtual uses. 771 UsedInInstr.clear(); 772 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 773 MachineOperand &MO = MI->getOperand(i); 774 if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; 775 unsigned Reg = MO.getReg(); 776 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 777 DEBUG(dbgs() << "\tSetting " << PrintReg(Reg, TRI) 778 << " as used in instr\n"); 779 markRegUsedInInstr(Reg); 780 } 781 782 // Also mark PartialDefs as used to avoid reallocation. 783 for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i) 784 markRegUsedInInstr(PartialDefs[i]); 785 } 786 787 void RAFast::AllocateBasicBlock() { 788 DEBUG(dbgs() << "\nAllocating " << *MBB); 789 790 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 791 assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?"); 792 793 MachineBasicBlock::iterator MII = MBB->begin(); 794 795 // Add live-in registers as live. 796 for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(), 797 E = MBB->livein_end(); I != E; ++I) 798 if (MRI->isAllocatable(*I)) 799 definePhysReg(MII, *I, regReserved); 800 801 SmallVector<unsigned, 8> VirtDead; 802 SmallVector<MachineInstr*, 32> Coalesced; 803 804 // Otherwise, sequentially allocate each instruction in the MBB. 805 while (MII != MBB->end()) { 806 MachineInstr *MI = MII++; 807 const MCInstrDesc &MCID = MI->getDesc(); 808 DEBUG({ 809 dbgs() << "\n>> " << *MI << "Regs:"; 810 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 811 if (PhysRegState[Reg] == regDisabled) continue; 812 dbgs() << " " << TRI->getName(Reg); 813 switch(PhysRegState[Reg]) { 814 case regFree: 815 break; 816 case regReserved: 817 dbgs() << "*"; 818 break; 819 default: { 820 dbgs() << '=' << PrintReg(PhysRegState[Reg]); 821 LiveRegMap::iterator I = findLiveVirtReg(PhysRegState[Reg]); 822 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 823 if (I->Dirty) 824 dbgs() << "*"; 825 assert(I->PhysReg == Reg && "Bad inverse map"); 826 break; 827 } 828 } 829 } 830 dbgs() << '\n'; 831 // Check that LiveVirtRegs is the inverse. 832 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 833 e = LiveVirtRegs.end(); i != e; ++i) { 834 assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) && 835 "Bad map key"); 836 assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) && 837 "Bad map value"); 838 assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map"); 839 } 840 }); 841 842 // Debug values are not allowed to change codegen in any way. 843 if (MI->isDebugValue()) { 844 bool ScanDbgValue = true; 845 while (ScanDbgValue) { 846 ScanDbgValue = false; 847 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 848 MachineOperand &MO = MI->getOperand(i); 849 if (!MO.isReg()) continue; 850 unsigned Reg = MO.getReg(); 851 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 852 LiveRegMap::iterator LRI = findLiveVirtReg(Reg); 853 if (LRI != LiveVirtRegs.end()) 854 setPhysReg(MI, i, LRI->PhysReg); 855 else { 856 int SS = StackSlotForVirtReg[Reg]; 857 if (SS == -1) { 858 // We can't allocate a physreg for a DebugValue, sorry! 859 DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE"); 860 MO.setReg(0); 861 } 862 else { 863 // Modify DBG_VALUE now that the value is in a spill slot. 864 bool IsIndirect = MI->isIndirectDebugValue(); 865 uint64_t Offset = IsIndirect ? MI->getOperand(1).getImm() : 0; 866 const MDNode *MDPtr = 867 MI->getOperand(MI->getNumOperands()-1).getMetadata(); 868 DebugLoc DL = MI->getDebugLoc(); 869 MachineBasicBlock *MBB = MI->getParent(); 870 MachineInstr *NewDV = BuildMI(*MBB, MBB->erase(MI), DL, 871 TII->get(TargetOpcode::DBG_VALUE)) 872 .addFrameIndex(SS).addImm(Offset).addMetadata(MDPtr); 873 DEBUG(dbgs() << "Modifying debug info due to spill:" 874 << "\t" << *NewDV); 875 // Scan NewDV operands from the beginning. 876 MI = NewDV; 877 ScanDbgValue = true; 878 break; 879 } 880 } 881 LiveDbgValueMap[Reg].push_back(MI); 882 } 883 } 884 // Next instruction. 885 continue; 886 } 887 888 // If this is a copy, we may be able to coalesce. 889 unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0; 890 if (MI->isCopy()) { 891 CopyDst = MI->getOperand(0).getReg(); 892 CopySrc = MI->getOperand(1).getReg(); 893 CopyDstSub = MI->getOperand(0).getSubReg(); 894 CopySrcSub = MI->getOperand(1).getSubReg(); 895 } 896 897 // Track registers used by instruction. 898 UsedInInstr.clear(); 899 900 // First scan. 901 // Mark physreg uses and early clobbers as used. 902 // Find the end of the virtreg operands 903 unsigned VirtOpEnd = 0; 904 bool hasTiedOps = false; 905 bool hasEarlyClobbers = false; 906 bool hasPartialRedefs = false; 907 bool hasPhysDefs = false; 908 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 909 MachineOperand &MO = MI->getOperand(i); 910 // Make sure MRI knows about registers clobbered by regmasks. 911 if (MO.isRegMask()) { 912 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask()); 913 continue; 914 } 915 if (!MO.isReg()) continue; 916 unsigned Reg = MO.getReg(); 917 if (!Reg) continue; 918 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 919 VirtOpEnd = i+1; 920 if (MO.isUse()) { 921 hasTiedOps = hasTiedOps || 922 MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1; 923 } else { 924 if (MO.isEarlyClobber()) 925 hasEarlyClobbers = true; 926 if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) 927 hasPartialRedefs = true; 928 } 929 continue; 930 } 931 if (!MRI->isAllocatable(Reg)) continue; 932 if (MO.isUse()) { 933 usePhysReg(MO); 934 } else if (MO.isEarlyClobber()) { 935 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 936 regFree : regReserved); 937 hasEarlyClobbers = true; 938 } else 939 hasPhysDefs = true; 940 } 941 942 // The instruction may have virtual register operands that must be allocated 943 // the same register at use-time and def-time: early clobbers and tied 944 // operands. If there are also physical defs, these registers must avoid 945 // both physical defs and uses, making them more constrained than normal 946 // operands. 947 // Similarly, if there are multiple defs and tied operands, we must make 948 // sure the same register is allocated to uses and defs. 949 // We didn't detect inline asm tied operands above, so just make this extra 950 // pass for all inline asm. 951 if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || 952 (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) { 953 handleThroughOperands(MI, VirtDead); 954 // Don't attempt coalescing when we have funny stuff going on. 955 CopyDst = 0; 956 // Pretend we have early clobbers so the use operands get marked below. 957 // This is not necessary for the common case of a single tied use. 958 hasEarlyClobbers = true; 959 } 960 961 // Second scan. 962 // Allocate virtreg uses. 963 for (unsigned i = 0; i != VirtOpEnd; ++i) { 964 MachineOperand &MO = MI->getOperand(i); 965 if (!MO.isReg()) continue; 966 unsigned Reg = MO.getReg(); 967 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 968 if (MO.isUse()) { 969 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); 970 unsigned PhysReg = LRI->PhysReg; 971 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 972 if (setPhysReg(MI, i, PhysReg)) 973 killVirtReg(LRI); 974 } 975 } 976 977 for (UsedInInstrSet::iterator 978 I = UsedInInstr.begin(), E = UsedInInstr.end(); I != E; ++I) 979 MRI->setRegUnitUsed(*I); 980 981 // Track registers defined by instruction - early clobbers and tied uses at 982 // this point. 983 UsedInInstr.clear(); 984 if (hasEarlyClobbers) { 985 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 986 MachineOperand &MO = MI->getOperand(i); 987 if (!MO.isReg()) continue; 988 unsigned Reg = MO.getReg(); 989 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 990 // Look for physreg defs and tied uses. 991 if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue; 992 markRegUsedInInstr(Reg); 993 } 994 } 995 996 unsigned DefOpEnd = MI->getNumOperands(); 997 if (MI->isCall()) { 998 // Spill all virtregs before a call. This serves two purposes: 1. If an 999 // exception is thrown, the landing pad is going to expect to find 1000 // registers in their spill slots, and 2. we don't have to wade through 1001 // all the <imp-def> operands on the call instruction. 1002 DefOpEnd = VirtOpEnd; 1003 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 1004 spillAll(MI); 1005 1006 // The imp-defs are skipped below, but we still need to mark those 1007 // registers as used by the function. 1008 SkippedInstrs.insert(&MCID); 1009 } 1010 1011 // Third scan. 1012 // Allocate defs and collect dead defs. 1013 for (unsigned i = 0; i != DefOpEnd; ++i) { 1014 MachineOperand &MO = MI->getOperand(i); 1015 if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) 1016 continue; 1017 unsigned Reg = MO.getReg(); 1018 1019 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 1020 if (!MRI->isAllocatable(Reg)) continue; 1021 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 1022 regFree : regReserved); 1023 continue; 1024 } 1025 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); 1026 unsigned PhysReg = LRI->PhysReg; 1027 if (setPhysReg(MI, i, PhysReg)) { 1028 VirtDead.push_back(Reg); 1029 CopyDst = 0; // cancel coalescing; 1030 } else 1031 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 1032 } 1033 1034 // Kill dead defs after the scan to ensure that multiple defs of the same 1035 // register are allocated identically. We didn't need to do this for uses 1036 // because we are crerating our own kill flags, and they are always at the 1037 // last use. 1038 for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) 1039 killVirtReg(VirtDead[i]); 1040 VirtDead.clear(); 1041 1042 for (UsedInInstrSet::iterator 1043 I = UsedInInstr.begin(), E = UsedInInstr.end(); I != E; ++I) 1044 MRI->setRegUnitUsed(*I); 1045 1046 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 1047 DEBUG(dbgs() << "-- coalescing: " << *MI); 1048 Coalesced.push_back(MI); 1049 } else { 1050 DEBUG(dbgs() << "<< " << *MI); 1051 } 1052 } 1053 1054 // Spill all physical registers holding virtual registers now. 1055 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 1056 spillAll(MBB->getFirstTerminator()); 1057 1058 // Erase all the coalesced copies. We are delaying it until now because 1059 // LiveVirtRegs might refer to the instrs. 1060 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 1061 MBB->erase(Coalesced[i]); 1062 NumCopies += Coalesced.size(); 1063 1064 DEBUG(MBB->dump()); 1065 } 1066 1067 /// runOnMachineFunction - Register allocate the whole function 1068 /// 1069 bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 1070 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 1071 << "********** Function: " << Fn.getName() << '\n'); 1072 MF = &Fn; 1073 MRI = &MF->getRegInfo(); 1074 TM = &Fn.getTarget(); 1075 TRI = TM->getSubtargetImpl()->getRegisterInfo(); 1076 TII = TM->getSubtargetImpl()->getInstrInfo(); 1077 MRI->freezeReservedRegs(Fn); 1078 RegClassInfo.runOnMachineFunction(Fn); 1079 UsedInInstr.clear(); 1080 UsedInInstr.setUniverse(TRI->getNumRegUnits()); 1081 1082 assert(!MRI->isSSA() && "regalloc requires leaving SSA"); 1083 1084 // initialize the virtual->physical register map to have a 'null' 1085 // mapping for all virtual registers 1086 StackSlotForVirtReg.resize(MRI->getNumVirtRegs()); 1087 LiveVirtRegs.setUniverse(MRI->getNumVirtRegs()); 1088 1089 // Loop over all of the basic blocks, eliminating virtual register references 1090 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 1091 MBBi != MBBe; ++MBBi) { 1092 MBB = &*MBBi; 1093 AllocateBasicBlock(); 1094 } 1095 1096 // Add the clobber lists for all the instructions we skipped earlier. 1097 for (const MCInstrDesc *Desc : SkippedInstrs) 1098 if (const uint16_t *Defs = Desc->getImplicitDefs()) 1099 while (*Defs) 1100 MRI->setPhysRegUsed(*Defs++); 1101 1102 // All machine operands and other references to virtual registers have been 1103 // replaced. Remove the virtual registers. 1104 MRI->clearVirtRegs(); 1105 1106 SkippedInstrs.clear(); 1107 StackSlotForVirtReg.clear(); 1108 LiveDbgValueMap.clear(); 1109 return true; 1110 } 1111 1112 FunctionPass *llvm::createFastRegisterAllocator() { 1113 return new RAFast(); 1114 } 1115