1 //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This register allocator allocates registers to a basic block at a time, 11 // attempting to keep values in registers and reusing registers as appropriate. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/CodeGen/Passes.h" 16 #include "llvm/ADT/DenseMap.h" 17 #include "llvm/ADT/IndexedMap.h" 18 #include "llvm/ADT/STLExtras.h" 19 #include "llvm/ADT/SmallSet.h" 20 #include "llvm/ADT/SmallVector.h" 21 #include "llvm/ADT/SparseSet.h" 22 #include "llvm/ADT/Statistic.h" 23 #include "llvm/CodeGen/MachineFrameInfo.h" 24 #include "llvm/CodeGen/MachineFunctionPass.h" 25 #include "llvm/CodeGen/MachineInstr.h" 26 #include "llvm/CodeGen/MachineInstrBuilder.h" 27 #include "llvm/CodeGen/MachineRegisterInfo.h" 28 #include "llvm/CodeGen/RegAllocRegistry.h" 29 #include "llvm/CodeGen/RegisterClassInfo.h" 30 #include "llvm/IR/BasicBlock.h" 31 #include "llvm/Support/CommandLine.h" 32 #include "llvm/Support/Debug.h" 33 #include "llvm/Support/ErrorHandling.h" 34 #include "llvm/Support/raw_ostream.h" 35 #include "llvm/Target/TargetInstrInfo.h" 36 #include "llvm/Target/TargetMachine.h" 37 #include "llvm/Target/TargetSubtargetInfo.h" 38 #include <algorithm> 39 using namespace llvm; 40 41 #define DEBUG_TYPE "regalloc" 42 43 STATISTIC(NumStores, "Number of stores added"); 44 STATISTIC(NumLoads , "Number of loads added"); 45 STATISTIC(NumCopies, "Number of copies coalesced"); 46 47 static RegisterRegAlloc 48 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 49 50 namespace { 51 class RAFast : public MachineFunctionPass { 52 public: 53 static char ID; 54 RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1), 55 isBulkSpilling(false) {} 56 private: 57 const TargetMachine *TM; 58 MachineFunction *MF; 59 MachineRegisterInfo *MRI; 60 const TargetRegisterInfo *TRI; 61 const TargetInstrInfo *TII; 62 RegisterClassInfo RegClassInfo; 63 64 // Basic block currently being allocated. 65 MachineBasicBlock *MBB; 66 67 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 68 // values are spilled. 69 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 70 71 // Everything we know about a live virtual register. 72 struct LiveReg { 73 MachineInstr *LastUse; // Last instr to use reg. 74 unsigned VirtReg; // Virtual register number. 75 unsigned PhysReg; // Currently held here. 76 unsigned short LastOpNum; // OpNum on LastUse. 77 bool Dirty; // Register needs spill. 78 79 explicit LiveReg(unsigned v) 80 : LastUse(nullptr), VirtReg(v), PhysReg(0), LastOpNum(0), Dirty(false){} 81 82 unsigned getSparseSetIndex() const { 83 return TargetRegisterInfo::virtReg2Index(VirtReg); 84 } 85 }; 86 87 typedef SparseSet<LiveReg> LiveRegMap; 88 89 // LiveVirtRegs - This map contains entries for each virtual register 90 // that is currently available in a physical register. 91 LiveRegMap LiveVirtRegs; 92 93 DenseMap<unsigned, SmallVector<MachineInstr *, 4> > LiveDbgValueMap; 94 95 // RegState - Track the state of a physical register. 96 enum RegState { 97 // A disabled register is not available for allocation, but an alias may 98 // be in use. A register can only be moved out of the disabled state if 99 // all aliases are disabled. 100 regDisabled, 101 102 // A free register is not currently in use and can be allocated 103 // immediately without checking aliases. 104 regFree, 105 106 // A reserved register has been assigned explicitly (e.g., setting up a 107 // call parameter), and it remains reserved until it is used. 108 regReserved 109 110 // A register state may also be a virtual register number, indication that 111 // the physical register is currently allocated to a virtual register. In 112 // that case, LiveVirtRegs contains the inverse mapping. 113 }; 114 115 // PhysRegState - One of the RegState enums, or a virtreg. 116 std::vector<unsigned> PhysRegState; 117 118 // Set of register units. 119 typedef SparseSet<unsigned> UsedInInstrSet; 120 121 // Set of register units that are used in the current instruction, and so 122 // cannot be allocated. 123 UsedInInstrSet UsedInInstr; 124 125 // Mark a physreg as used in this instruction. 126 void markRegUsedInInstr(unsigned PhysReg) { 127 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) 128 UsedInInstr.insert(*Units); 129 } 130 131 // Check if a physreg or any of its aliases are used in this instruction. 132 bool isRegUsedInInstr(unsigned PhysReg) const { 133 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) 134 if (UsedInInstr.count(*Units)) 135 return true; 136 return false; 137 } 138 139 // SkippedInstrs - Descriptors of instructions whose clobber list was 140 // ignored because all registers were spilled. It is still necessary to 141 // mark all the clobbered registers as used by the function. 142 SmallPtrSet<const MCInstrDesc*, 4> SkippedInstrs; 143 144 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 145 // completely after spilling all live registers. LiveRegMap entries should 146 // not be erased. 147 bool isBulkSpilling; 148 149 enum : unsigned { 150 spillClean = 1, 151 spillDirty = 100, 152 spillImpossible = ~0u 153 }; 154 public: 155 const char *getPassName() const override { 156 return "Fast Register Allocator"; 157 } 158 159 void getAnalysisUsage(AnalysisUsage &AU) const override { 160 AU.setPreservesCFG(); 161 MachineFunctionPass::getAnalysisUsage(AU); 162 } 163 164 private: 165 bool runOnMachineFunction(MachineFunction &Fn) override; 166 void AllocateBasicBlock(); 167 void handleThroughOperands(MachineInstr *MI, 168 SmallVectorImpl<unsigned> &VirtDead); 169 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 170 bool isLastUseOfLocalReg(MachineOperand&); 171 172 void addKillFlag(const LiveReg&); 173 void killVirtReg(LiveRegMap::iterator); 174 void killVirtReg(unsigned VirtReg); 175 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 176 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 177 178 void usePhysReg(MachineOperand&); 179 void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); 180 unsigned calcSpillCost(unsigned PhysReg) const; 181 void assignVirtToPhysReg(LiveReg&, unsigned PhysReg); 182 LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) { 183 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); 184 } 185 LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const { 186 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); 187 } 188 LiveRegMap::iterator assignVirtToPhysReg(unsigned VReg, unsigned PhysReg); 189 LiveRegMap::iterator allocVirtReg(MachineInstr *MI, LiveRegMap::iterator, 190 unsigned Hint); 191 LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, 192 unsigned VirtReg, unsigned Hint); 193 LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, 194 unsigned VirtReg, unsigned Hint); 195 void spillAll(MachineBasicBlock::iterator MI); 196 bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); 197 }; 198 char RAFast::ID = 0; 199 } 200 201 /// getStackSpaceFor - This allocates space for the specified virtual register 202 /// to be held on the stack. 203 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 204 // Find the location Reg would belong... 205 int SS = StackSlotForVirtReg[VirtReg]; 206 if (SS != -1) 207 return SS; // Already has space allocated? 208 209 // Allocate a new stack object for this spill location... 210 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 211 RC->getAlignment()); 212 213 // Assign the slot. 214 StackSlotForVirtReg[VirtReg] = FrameIdx; 215 return FrameIdx; 216 } 217 218 /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 219 /// its virtual register, and it is guaranteed to be a block-local register. 220 /// 221 bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 222 // If the register has ever been spilled or reloaded, we conservatively assume 223 // it is a global register used in multiple blocks. 224 if (StackSlotForVirtReg[MO.getReg()] != -1) 225 return false; 226 227 // Check that the use/def chain has exactly one operand - MO. 228 MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg()); 229 if (&*I != &MO) 230 return false; 231 return ++I == MRI->reg_nodbg_end(); 232 } 233 234 /// addKillFlag - Set kill flags on last use of a virtual register. 235 void RAFast::addKillFlag(const LiveReg &LR) { 236 if (!LR.LastUse) return; 237 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 238 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 239 if (MO.getReg() == LR.PhysReg) 240 MO.setIsKill(); 241 else 242 LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true); 243 } 244 } 245 246 /// killVirtReg - Mark virtreg as no longer available. 247 void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 248 addKillFlag(*LRI); 249 assert(PhysRegState[LRI->PhysReg] == LRI->VirtReg && 250 "Broken RegState mapping"); 251 PhysRegState[LRI->PhysReg] = regFree; 252 // Erase from LiveVirtRegs unless we're spilling in bulk. 253 if (!isBulkSpilling) 254 LiveVirtRegs.erase(LRI); 255 } 256 257 /// killVirtReg - Mark virtreg as no longer available. 258 void RAFast::killVirtReg(unsigned VirtReg) { 259 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 260 "killVirtReg needs a virtual register"); 261 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 262 if (LRI != LiveVirtRegs.end()) 263 killVirtReg(LRI); 264 } 265 266 /// spillVirtReg - This method spills the value specified by VirtReg into the 267 /// corresponding stack slot if needed. 268 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 269 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 270 "Spilling a physical register is illegal!"); 271 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 272 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 273 spillVirtReg(MI, LRI); 274 } 275 276 /// spillVirtReg - Do the actual work of spilling. 277 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 278 LiveRegMap::iterator LRI) { 279 LiveReg &LR = *LRI; 280 assert(PhysRegState[LR.PhysReg] == LRI->VirtReg && "Broken RegState mapping"); 281 282 if (LR.Dirty) { 283 // If this physreg is used by the instruction, we want to kill it on the 284 // instruction, not on the spill. 285 bool SpillKill = LR.LastUse != MI; 286 LR.Dirty = false; 287 DEBUG(dbgs() << "Spilling " << PrintReg(LRI->VirtReg, TRI) 288 << " in " << PrintReg(LR.PhysReg, TRI)); 289 const TargetRegisterClass *RC = MRI->getRegClass(LRI->VirtReg); 290 int FI = getStackSpaceFor(LRI->VirtReg, RC); 291 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 292 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 293 ++NumStores; // Update statistics 294 295 // If this register is used by DBG_VALUE then insert new DBG_VALUE to 296 // identify spilled location as the place to find corresponding variable's 297 // value. 298 SmallVectorImpl<MachineInstr *> &LRIDbgValues = 299 LiveDbgValueMap[LRI->VirtReg]; 300 for (unsigned li = 0, le = LRIDbgValues.size(); li != le; ++li) { 301 MachineInstr *DBG = LRIDbgValues[li]; 302 const MDNode *Var = DBG->getDebugVariable(); 303 const MDNode *Expr = DBG->getDebugExpression(); 304 bool IsIndirect = DBG->isIndirectDebugValue(); 305 uint64_t Offset = IsIndirect ? DBG->getOperand(1).getImm() : 0; 306 DebugLoc DL; 307 if (MI == MBB->end()) { 308 // If MI is at basic block end then use last instruction's location. 309 MachineBasicBlock::iterator EI = MI; 310 DL = (--EI)->getDebugLoc(); 311 } else 312 DL = MI->getDebugLoc(); 313 MachineInstr *NewDV = 314 BuildMI(*MBB, MI, DL, TII->get(TargetOpcode::DBG_VALUE)) 315 .addFrameIndex(FI) 316 .addImm(Offset) 317 .addMetadata(Var) 318 .addMetadata(Expr); 319 assert(NewDV->getParent() == MBB && "dangling parent pointer"); 320 (void)NewDV; 321 DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV); 322 } 323 // Now this register is spilled there is should not be any DBG_VALUE 324 // pointing to this register because they are all pointing to spilled value 325 // now. 326 LRIDbgValues.clear(); 327 if (SpillKill) 328 LR.LastUse = nullptr; // Don't kill register again 329 } 330 killVirtReg(LRI); 331 } 332 333 /// spillAll - Spill all dirty virtregs without killing them. 334 void RAFast::spillAll(MachineBasicBlock::iterator MI) { 335 if (LiveVirtRegs.empty()) return; 336 isBulkSpilling = true; 337 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 338 // of spilling here is deterministic, if arbitrary. 339 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 340 i != e; ++i) 341 spillVirtReg(MI, i); 342 LiveVirtRegs.clear(); 343 isBulkSpilling = false; 344 } 345 346 /// usePhysReg - Handle the direct use of a physical register. 347 /// Check that the register is not used by a virtreg. 348 /// Kill the physreg, marking it free. 349 /// This may add implicit kills to MO->getParent() and invalidate MO. 350 void RAFast::usePhysReg(MachineOperand &MO) { 351 unsigned PhysReg = MO.getReg(); 352 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 353 "Bad usePhysReg operand"); 354 markRegUsedInInstr(PhysReg); 355 switch (PhysRegState[PhysReg]) { 356 case regDisabled: 357 break; 358 case regReserved: 359 PhysRegState[PhysReg] = regFree; 360 // Fall through 361 case regFree: 362 MO.setIsKill(); 363 return; 364 default: 365 // The physreg was allocated to a virtual register. That means the value we 366 // wanted has been clobbered. 367 llvm_unreachable("Instruction uses an allocated register"); 368 } 369 370 // Maybe a superregister is reserved? 371 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 372 unsigned Alias = *AI; 373 switch (PhysRegState[Alias]) { 374 case regDisabled: 375 break; 376 case regReserved: 377 assert(TRI->isSuperRegister(PhysReg, Alias) && 378 "Instruction is not using a subregister of a reserved register"); 379 // Leave the superregister in the working set. 380 PhysRegState[Alias] = regFree; 381 MO.getParent()->addRegisterKilled(Alias, TRI, true); 382 return; 383 case regFree: 384 if (TRI->isSuperRegister(PhysReg, Alias)) { 385 // Leave the superregister in the working set. 386 MO.getParent()->addRegisterKilled(Alias, TRI, true); 387 return; 388 } 389 // Some other alias was in the working set - clear it. 390 PhysRegState[Alias] = regDisabled; 391 break; 392 default: 393 llvm_unreachable("Instruction uses an alias of an allocated register"); 394 } 395 } 396 397 // All aliases are disabled, bring register into working set. 398 PhysRegState[PhysReg] = regFree; 399 MO.setIsKill(); 400 } 401 402 /// definePhysReg - Mark PhysReg as reserved or free after spilling any 403 /// virtregs. This is very similar to defineVirtReg except the physreg is 404 /// reserved instead of allocated. 405 void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, 406 RegState NewState) { 407 markRegUsedInInstr(PhysReg); 408 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 409 case regDisabled: 410 break; 411 default: 412 spillVirtReg(MI, VirtReg); 413 // Fall through. 414 case regFree: 415 case regReserved: 416 PhysRegState[PhysReg] = NewState; 417 return; 418 } 419 420 // This is a disabled register, disable all aliases. 421 PhysRegState[PhysReg] = NewState; 422 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 423 unsigned Alias = *AI; 424 switch (unsigned VirtReg = PhysRegState[Alias]) { 425 case regDisabled: 426 break; 427 default: 428 spillVirtReg(MI, VirtReg); 429 // Fall through. 430 case regFree: 431 case regReserved: 432 PhysRegState[Alias] = regDisabled; 433 if (TRI->isSuperRegister(PhysReg, Alias)) 434 return; 435 break; 436 } 437 } 438 } 439 440 441 // calcSpillCost - Return the cost of spilling clearing out PhysReg and 442 // aliases so it is free for allocation. 443 // Returns 0 when PhysReg is free or disabled with all aliases disabled - it 444 // can be allocated directly. 445 // Returns spillImpossible when PhysReg or an alias can't be spilled. 446 unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 447 if (isRegUsedInInstr(PhysReg)) { 448 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is already used in instr.\n"); 449 return spillImpossible; 450 } 451 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 452 case regDisabled: 453 break; 454 case regFree: 455 return 0; 456 case regReserved: 457 DEBUG(dbgs() << PrintReg(VirtReg, TRI) << " corresponding " 458 << PrintReg(PhysReg, TRI) << " is reserved already.\n"); 459 return spillImpossible; 460 default: { 461 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); 462 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 463 return I->Dirty ? spillDirty : spillClean; 464 } 465 } 466 467 // This is a disabled register, add up cost of aliases. 468 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is disabled.\n"); 469 unsigned Cost = 0; 470 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 471 unsigned Alias = *AI; 472 switch (unsigned VirtReg = PhysRegState[Alias]) { 473 case regDisabled: 474 break; 475 case regFree: 476 ++Cost; 477 break; 478 case regReserved: 479 return spillImpossible; 480 default: { 481 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); 482 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 483 Cost += I->Dirty ? spillDirty : spillClean; 484 break; 485 } 486 } 487 } 488 return Cost; 489 } 490 491 492 /// assignVirtToPhysReg - This method updates local state so that we know 493 /// that PhysReg is the proper container for VirtReg now. The physical 494 /// register must not be used for anything else when this is called. 495 /// 496 void RAFast::assignVirtToPhysReg(LiveReg &LR, unsigned PhysReg) { 497 DEBUG(dbgs() << "Assigning " << PrintReg(LR.VirtReg, TRI) << " to " 498 << PrintReg(PhysReg, TRI) << "\n"); 499 PhysRegState[PhysReg] = LR.VirtReg; 500 assert(!LR.PhysReg && "Already assigned a physreg"); 501 LR.PhysReg = PhysReg; 502 } 503 504 RAFast::LiveRegMap::iterator 505 RAFast::assignVirtToPhysReg(unsigned VirtReg, unsigned PhysReg) { 506 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 507 assert(LRI != LiveVirtRegs.end() && "VirtReg disappeared"); 508 assignVirtToPhysReg(*LRI, PhysReg); 509 return LRI; 510 } 511 512 /// allocVirtReg - Allocate a physical register for VirtReg. 513 RAFast::LiveRegMap::iterator RAFast::allocVirtReg(MachineInstr *MI, 514 LiveRegMap::iterator LRI, 515 unsigned Hint) { 516 const unsigned VirtReg = LRI->VirtReg; 517 518 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 519 "Can only allocate virtual registers"); 520 521 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 522 523 // Ignore invalid hints. 524 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 525 !RC->contains(Hint) || !MRI->isAllocatable(Hint))) 526 Hint = 0; 527 528 // Take hint when possible. 529 if (Hint) { 530 // Ignore the hint if we would have to spill a dirty register. 531 unsigned Cost = calcSpillCost(Hint); 532 if (Cost < spillDirty) { 533 if (Cost) 534 definePhysReg(MI, Hint, regFree); 535 // definePhysReg may kill virtual registers and modify LiveVirtRegs. 536 // That invalidates LRI, so run a new lookup for VirtReg. 537 return assignVirtToPhysReg(VirtReg, Hint); 538 } 539 } 540 541 ArrayRef<MCPhysReg> AO = RegClassInfo.getOrder(RC); 542 543 // First try to find a completely free register. 544 for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){ 545 unsigned PhysReg = *I; 546 if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) { 547 assignVirtToPhysReg(*LRI, PhysReg); 548 return LRI; 549 } 550 } 551 552 DEBUG(dbgs() << "Allocating " << PrintReg(VirtReg) << " from " 553 << RC->getName() << "\n"); 554 555 unsigned BestReg = 0, BestCost = spillImpossible; 556 for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){ 557 unsigned Cost = calcSpillCost(*I); 558 DEBUG(dbgs() << "\tRegister: " << PrintReg(*I, TRI) << "\n"); 559 DEBUG(dbgs() << "\tCost: " << Cost << "\n"); 560 DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n"); 561 // Cost is 0 when all aliases are already disabled. 562 if (Cost == 0) { 563 assignVirtToPhysReg(*LRI, *I); 564 return LRI; 565 } 566 if (Cost < BestCost) 567 BestReg = *I, BestCost = Cost; 568 } 569 570 if (BestReg) { 571 definePhysReg(MI, BestReg, regFree); 572 // definePhysReg may kill virtual registers and modify LiveVirtRegs. 573 // That invalidates LRI, so run a new lookup for VirtReg. 574 return assignVirtToPhysReg(VirtReg, BestReg); 575 } 576 577 // Nothing we can do. Report an error and keep going with a bad allocation. 578 if (MI->isInlineAsm()) 579 MI->emitError("inline assembly requires more registers than available"); 580 else 581 MI->emitError("ran out of registers during register allocation"); 582 definePhysReg(MI, *AO.begin(), regFree); 583 return assignVirtToPhysReg(VirtReg, *AO.begin()); 584 } 585 586 /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 587 RAFast::LiveRegMap::iterator 588 RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, 589 unsigned VirtReg, unsigned Hint) { 590 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 591 "Not a virtual register"); 592 LiveRegMap::iterator LRI; 593 bool New; 594 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); 595 if (New) { 596 // If there is no hint, peek at the only use of this register. 597 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 598 MRI->hasOneNonDBGUse(VirtReg)) { 599 const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg); 600 // It's a copy, use the destination register as a hint. 601 if (UseMI.isCopyLike()) 602 Hint = UseMI.getOperand(0).getReg(); 603 } 604 LRI = allocVirtReg(MI, LRI, Hint); 605 } else if (LRI->LastUse) { 606 // Redefining a live register - kill at the last use, unless it is this 607 // instruction defining VirtReg multiple times. 608 if (LRI->LastUse != MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse()) 609 addKillFlag(*LRI); 610 } 611 assert(LRI->PhysReg && "Register not assigned"); 612 LRI->LastUse = MI; 613 LRI->LastOpNum = OpNum; 614 LRI->Dirty = true; 615 markRegUsedInInstr(LRI->PhysReg); 616 return LRI; 617 } 618 619 /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 620 RAFast::LiveRegMap::iterator 621 RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, 622 unsigned VirtReg, unsigned Hint) { 623 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 624 "Not a virtual register"); 625 LiveRegMap::iterator LRI; 626 bool New; 627 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); 628 MachineOperand &MO = MI->getOperand(OpNum); 629 if (New) { 630 LRI = allocVirtReg(MI, LRI, Hint); 631 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 632 int FrameIndex = getStackSpaceFor(VirtReg, RC); 633 DEBUG(dbgs() << "Reloading " << PrintReg(VirtReg, TRI) << " into " 634 << PrintReg(LRI->PhysReg, TRI) << "\n"); 635 TII->loadRegFromStackSlot(*MBB, MI, LRI->PhysReg, FrameIndex, RC, TRI); 636 ++NumLoads; 637 } else if (LRI->Dirty) { 638 if (isLastUseOfLocalReg(MO)) { 639 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 640 if (MO.isUse()) 641 MO.setIsKill(); 642 else 643 MO.setIsDead(); 644 } else if (MO.isKill()) { 645 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 646 MO.setIsKill(false); 647 } else if (MO.isDead()) { 648 DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n"); 649 MO.setIsDead(false); 650 } 651 } else if (MO.isKill()) { 652 // We must remove kill flags from uses of reloaded registers because the 653 // register would be killed immediately, and there might be a second use: 654 // %foo = OR %x<kill>, %x 655 // This would cause a second reload of %x into a different register. 656 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 657 MO.setIsKill(false); 658 } else if (MO.isDead()) { 659 DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n"); 660 MO.setIsDead(false); 661 } 662 assert(LRI->PhysReg && "Register not assigned"); 663 LRI->LastUse = MI; 664 LRI->LastOpNum = OpNum; 665 markRegUsedInInstr(LRI->PhysReg); 666 return LRI; 667 } 668 669 // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering 670 // subregs. This may invalidate any operand pointers. 671 // Return true if the operand kills its register. 672 bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { 673 MachineOperand &MO = MI->getOperand(OpNum); 674 bool Dead = MO.isDead(); 675 if (!MO.getSubReg()) { 676 MO.setReg(PhysReg); 677 return MO.isKill() || Dead; 678 } 679 680 // Handle subregister index. 681 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 682 MO.setSubReg(0); 683 684 // A kill flag implies killing the full register. Add corresponding super 685 // register kill. 686 if (MO.isKill()) { 687 MI->addRegisterKilled(PhysReg, TRI, true); 688 return true; 689 } 690 691 // A <def,read-undef> of a sub-register requires an implicit def of the full 692 // register. 693 if (MO.isDef() && MO.isUndef()) 694 MI->addRegisterDefined(PhysReg, TRI); 695 696 return Dead; 697 } 698 699 // Handle special instruction operand like early clobbers and tied ops when 700 // there are additional physreg defines. 701 void RAFast::handleThroughOperands(MachineInstr *MI, 702 SmallVectorImpl<unsigned> &VirtDead) { 703 DEBUG(dbgs() << "Scanning for through registers:"); 704 SmallSet<unsigned, 8> ThroughRegs; 705 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 706 MachineOperand &MO = MI->getOperand(i); 707 if (!MO.isReg()) continue; 708 unsigned Reg = MO.getReg(); 709 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 710 continue; 711 if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) || 712 (MO.getSubReg() && MI->readsVirtualRegister(Reg))) { 713 if (ThroughRegs.insert(Reg)) 714 DEBUG(dbgs() << ' ' << PrintReg(Reg)); 715 } 716 } 717 718 // If any physreg defines collide with preallocated through registers, 719 // we must spill and reallocate. 720 DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); 721 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 722 MachineOperand &MO = MI->getOperand(i); 723 if (!MO.isReg() || !MO.isDef()) continue; 724 unsigned Reg = MO.getReg(); 725 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 726 markRegUsedInInstr(Reg); 727 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 728 if (ThroughRegs.count(PhysRegState[*AI])) 729 definePhysReg(MI, *AI, regFree); 730 } 731 } 732 733 SmallVector<unsigned, 8> PartialDefs; 734 DEBUG(dbgs() << "Allocating tied uses.\n"); 735 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 736 MachineOperand &MO = MI->getOperand(i); 737 if (!MO.isReg()) continue; 738 unsigned Reg = MO.getReg(); 739 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 740 if (MO.isUse()) { 741 unsigned DefIdx = 0; 742 if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue; 743 DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand " 744 << DefIdx << ".\n"); 745 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 746 unsigned PhysReg = LRI->PhysReg; 747 setPhysReg(MI, i, PhysReg); 748 // Note: we don't update the def operand yet. That would cause the normal 749 // def-scan to attempt spilling. 750 } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) { 751 DEBUG(dbgs() << "Partial redefine: " << MO << "\n"); 752 // Reload the register, but don't assign to the operand just yet. 753 // That would confuse the later phys-def processing pass. 754 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 755 PartialDefs.push_back(LRI->PhysReg); 756 } 757 } 758 759 DEBUG(dbgs() << "Allocating early clobbers.\n"); 760 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 761 MachineOperand &MO = MI->getOperand(i); 762 if (!MO.isReg()) continue; 763 unsigned Reg = MO.getReg(); 764 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 765 if (!MO.isEarlyClobber()) 766 continue; 767 // Note: defineVirtReg may invalidate MO. 768 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); 769 unsigned PhysReg = LRI->PhysReg; 770 if (setPhysReg(MI, i, PhysReg)) 771 VirtDead.push_back(Reg); 772 } 773 774 // Restore UsedInInstr to a state usable for allocating normal virtual uses. 775 UsedInInstr.clear(); 776 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 777 MachineOperand &MO = MI->getOperand(i); 778 if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; 779 unsigned Reg = MO.getReg(); 780 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 781 DEBUG(dbgs() << "\tSetting " << PrintReg(Reg, TRI) 782 << " as used in instr\n"); 783 markRegUsedInInstr(Reg); 784 } 785 786 // Also mark PartialDefs as used to avoid reallocation. 787 for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i) 788 markRegUsedInInstr(PartialDefs[i]); 789 } 790 791 void RAFast::AllocateBasicBlock() { 792 DEBUG(dbgs() << "\nAllocating " << *MBB); 793 794 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 795 assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?"); 796 797 MachineBasicBlock::iterator MII = MBB->begin(); 798 799 // Add live-in registers as live. 800 for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(), 801 E = MBB->livein_end(); I != E; ++I) 802 if (MRI->isAllocatable(*I)) 803 definePhysReg(MII, *I, regReserved); 804 805 SmallVector<unsigned, 8> VirtDead; 806 SmallVector<MachineInstr*, 32> Coalesced; 807 808 // Otherwise, sequentially allocate each instruction in the MBB. 809 while (MII != MBB->end()) { 810 MachineInstr *MI = MII++; 811 const MCInstrDesc &MCID = MI->getDesc(); 812 DEBUG({ 813 dbgs() << "\n>> " << *MI << "Regs:"; 814 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 815 if (PhysRegState[Reg] == regDisabled) continue; 816 dbgs() << " " << TRI->getName(Reg); 817 switch(PhysRegState[Reg]) { 818 case regFree: 819 break; 820 case regReserved: 821 dbgs() << "*"; 822 break; 823 default: { 824 dbgs() << '=' << PrintReg(PhysRegState[Reg]); 825 LiveRegMap::iterator I = findLiveVirtReg(PhysRegState[Reg]); 826 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 827 if (I->Dirty) 828 dbgs() << "*"; 829 assert(I->PhysReg == Reg && "Bad inverse map"); 830 break; 831 } 832 } 833 } 834 dbgs() << '\n'; 835 // Check that LiveVirtRegs is the inverse. 836 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 837 e = LiveVirtRegs.end(); i != e; ++i) { 838 assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) && 839 "Bad map key"); 840 assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) && 841 "Bad map value"); 842 assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map"); 843 } 844 }); 845 846 // Debug values are not allowed to change codegen in any way. 847 if (MI->isDebugValue()) { 848 bool ScanDbgValue = true; 849 while (ScanDbgValue) { 850 ScanDbgValue = false; 851 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 852 MachineOperand &MO = MI->getOperand(i); 853 if (!MO.isReg()) continue; 854 unsigned Reg = MO.getReg(); 855 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 856 LiveRegMap::iterator LRI = findLiveVirtReg(Reg); 857 if (LRI != LiveVirtRegs.end()) 858 setPhysReg(MI, i, LRI->PhysReg); 859 else { 860 int SS = StackSlotForVirtReg[Reg]; 861 if (SS == -1) { 862 // We can't allocate a physreg for a DebugValue, sorry! 863 DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE"); 864 MO.setReg(0); 865 } 866 else { 867 // Modify DBG_VALUE now that the value is in a spill slot. 868 bool IsIndirect = MI->isIndirectDebugValue(); 869 uint64_t Offset = IsIndirect ? MI->getOperand(1).getImm() : 0; 870 const MDNode *Var = MI->getDebugVariable(); 871 const MDNode *Expr = MI->getDebugExpression(); 872 DebugLoc DL = MI->getDebugLoc(); 873 MachineBasicBlock *MBB = MI->getParent(); 874 MachineInstr *NewDV = BuildMI(*MBB, MBB->erase(MI), DL, 875 TII->get(TargetOpcode::DBG_VALUE)) 876 .addFrameIndex(SS) 877 .addImm(Offset) 878 .addMetadata(Var) 879 .addMetadata(Expr); 880 DEBUG(dbgs() << "Modifying debug info due to spill:" 881 << "\t" << *NewDV); 882 // Scan NewDV operands from the beginning. 883 MI = NewDV; 884 ScanDbgValue = true; 885 break; 886 } 887 } 888 LiveDbgValueMap[Reg].push_back(MI); 889 } 890 } 891 // Next instruction. 892 continue; 893 } 894 895 // If this is a copy, we may be able to coalesce. 896 unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0; 897 if (MI->isCopy()) { 898 CopyDst = MI->getOperand(0).getReg(); 899 CopySrc = MI->getOperand(1).getReg(); 900 CopyDstSub = MI->getOperand(0).getSubReg(); 901 CopySrcSub = MI->getOperand(1).getSubReg(); 902 } 903 904 // Track registers used by instruction. 905 UsedInInstr.clear(); 906 907 // First scan. 908 // Mark physreg uses and early clobbers as used. 909 // Find the end of the virtreg operands 910 unsigned VirtOpEnd = 0; 911 bool hasTiedOps = false; 912 bool hasEarlyClobbers = false; 913 bool hasPartialRedefs = false; 914 bool hasPhysDefs = false; 915 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 916 MachineOperand &MO = MI->getOperand(i); 917 // Make sure MRI knows about registers clobbered by regmasks. 918 if (MO.isRegMask()) { 919 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask()); 920 continue; 921 } 922 if (!MO.isReg()) continue; 923 unsigned Reg = MO.getReg(); 924 if (!Reg) continue; 925 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 926 VirtOpEnd = i+1; 927 if (MO.isUse()) { 928 hasTiedOps = hasTiedOps || 929 MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1; 930 } else { 931 if (MO.isEarlyClobber()) 932 hasEarlyClobbers = true; 933 if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) 934 hasPartialRedefs = true; 935 } 936 continue; 937 } 938 if (!MRI->isAllocatable(Reg)) continue; 939 if (MO.isUse()) { 940 usePhysReg(MO); 941 } else if (MO.isEarlyClobber()) { 942 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 943 regFree : regReserved); 944 hasEarlyClobbers = true; 945 } else 946 hasPhysDefs = true; 947 } 948 949 // The instruction may have virtual register operands that must be allocated 950 // the same register at use-time and def-time: early clobbers and tied 951 // operands. If there are also physical defs, these registers must avoid 952 // both physical defs and uses, making them more constrained than normal 953 // operands. 954 // Similarly, if there are multiple defs and tied operands, we must make 955 // sure the same register is allocated to uses and defs. 956 // We didn't detect inline asm tied operands above, so just make this extra 957 // pass for all inline asm. 958 if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || 959 (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) { 960 handleThroughOperands(MI, VirtDead); 961 // Don't attempt coalescing when we have funny stuff going on. 962 CopyDst = 0; 963 // Pretend we have early clobbers so the use operands get marked below. 964 // This is not necessary for the common case of a single tied use. 965 hasEarlyClobbers = true; 966 } 967 968 // Second scan. 969 // Allocate virtreg uses. 970 for (unsigned i = 0; i != VirtOpEnd; ++i) { 971 MachineOperand &MO = MI->getOperand(i); 972 if (!MO.isReg()) continue; 973 unsigned Reg = MO.getReg(); 974 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 975 if (MO.isUse()) { 976 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); 977 unsigned PhysReg = LRI->PhysReg; 978 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 979 if (setPhysReg(MI, i, PhysReg)) 980 killVirtReg(LRI); 981 } 982 } 983 984 for (UsedInInstrSet::iterator 985 I = UsedInInstr.begin(), E = UsedInInstr.end(); I != E; ++I) 986 MRI->setRegUnitUsed(*I); 987 988 // Track registers defined by instruction - early clobbers and tied uses at 989 // this point. 990 UsedInInstr.clear(); 991 if (hasEarlyClobbers) { 992 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 993 MachineOperand &MO = MI->getOperand(i); 994 if (!MO.isReg()) continue; 995 unsigned Reg = MO.getReg(); 996 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 997 // Look for physreg defs and tied uses. 998 if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue; 999 markRegUsedInInstr(Reg); 1000 } 1001 } 1002 1003 unsigned DefOpEnd = MI->getNumOperands(); 1004 if (MI->isCall()) { 1005 // Spill all virtregs before a call. This serves two purposes: 1. If an 1006 // exception is thrown, the landing pad is going to expect to find 1007 // registers in their spill slots, and 2. we don't have to wade through 1008 // all the <imp-def> operands on the call instruction. 1009 DefOpEnd = VirtOpEnd; 1010 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 1011 spillAll(MI); 1012 1013 // The imp-defs are skipped below, but we still need to mark those 1014 // registers as used by the function. 1015 SkippedInstrs.insert(&MCID); 1016 } 1017 1018 // Third scan. 1019 // Allocate defs and collect dead defs. 1020 for (unsigned i = 0; i != DefOpEnd; ++i) { 1021 MachineOperand &MO = MI->getOperand(i); 1022 if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) 1023 continue; 1024 unsigned Reg = MO.getReg(); 1025 1026 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 1027 if (!MRI->isAllocatable(Reg)) continue; 1028 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 1029 regFree : regReserved); 1030 continue; 1031 } 1032 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); 1033 unsigned PhysReg = LRI->PhysReg; 1034 if (setPhysReg(MI, i, PhysReg)) { 1035 VirtDead.push_back(Reg); 1036 CopyDst = 0; // cancel coalescing; 1037 } else 1038 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 1039 } 1040 1041 // Kill dead defs after the scan to ensure that multiple defs of the same 1042 // register are allocated identically. We didn't need to do this for uses 1043 // because we are crerating our own kill flags, and they are always at the 1044 // last use. 1045 for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) 1046 killVirtReg(VirtDead[i]); 1047 VirtDead.clear(); 1048 1049 for (UsedInInstrSet::iterator 1050 I = UsedInInstr.begin(), E = UsedInInstr.end(); I != E; ++I) 1051 MRI->setRegUnitUsed(*I); 1052 1053 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 1054 DEBUG(dbgs() << "-- coalescing: " << *MI); 1055 Coalesced.push_back(MI); 1056 } else { 1057 DEBUG(dbgs() << "<< " << *MI); 1058 } 1059 } 1060 1061 // Spill all physical registers holding virtual registers now. 1062 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 1063 spillAll(MBB->getFirstTerminator()); 1064 1065 // Erase all the coalesced copies. We are delaying it until now because 1066 // LiveVirtRegs might refer to the instrs. 1067 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 1068 MBB->erase(Coalesced[i]); 1069 NumCopies += Coalesced.size(); 1070 1071 DEBUG(MBB->dump()); 1072 } 1073 1074 /// runOnMachineFunction - Register allocate the whole function 1075 /// 1076 bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 1077 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 1078 << "********** Function: " << Fn.getName() << '\n'); 1079 MF = &Fn; 1080 MRI = &MF->getRegInfo(); 1081 TM = &Fn.getTarget(); 1082 TRI = TM->getSubtargetImpl()->getRegisterInfo(); 1083 TII = TM->getSubtargetImpl()->getInstrInfo(); 1084 MRI->freezeReservedRegs(Fn); 1085 RegClassInfo.runOnMachineFunction(Fn); 1086 UsedInInstr.clear(); 1087 UsedInInstr.setUniverse(TRI->getNumRegUnits()); 1088 1089 assert(!MRI->isSSA() && "regalloc requires leaving SSA"); 1090 1091 // initialize the virtual->physical register map to have a 'null' 1092 // mapping for all virtual registers 1093 StackSlotForVirtReg.resize(MRI->getNumVirtRegs()); 1094 LiveVirtRegs.setUniverse(MRI->getNumVirtRegs()); 1095 1096 // Loop over all of the basic blocks, eliminating virtual register references 1097 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 1098 MBBi != MBBe; ++MBBi) { 1099 MBB = &*MBBi; 1100 AllocateBasicBlock(); 1101 } 1102 1103 // Add the clobber lists for all the instructions we skipped earlier. 1104 for (const MCInstrDesc *Desc : SkippedInstrs) 1105 if (const uint16_t *Defs = Desc->getImplicitDefs()) 1106 while (*Defs) 1107 MRI->setPhysRegUsed(*Defs++); 1108 1109 // All machine operands and other references to virtual registers have been 1110 // replaced. Remove the virtual registers. 1111 MRI->clearVirtRegs(); 1112 1113 SkippedInstrs.clear(); 1114 StackSlotForVirtReg.clear(); 1115 LiveDbgValueMap.clear(); 1116 return true; 1117 } 1118 1119 FunctionPass *llvm::createFastRegisterAllocator() { 1120 return new RAFast(); 1121 } 1122