1 //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This register allocator allocates registers to a basic block at a time, 11 // attempting to keep values in registers and reusing registers as appropriate. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/ADT/DenseMap.h" 16 #include "llvm/ADT/IndexedMap.h" 17 #include "llvm/ADT/STLExtras.h" 18 #include "llvm/ADT/SmallSet.h" 19 #include "llvm/ADT/SmallVector.h" 20 #include "llvm/ADT/SparseSet.h" 21 #include "llvm/ADT/Statistic.h" 22 #include "llvm/CodeGen/MachineFrameInfo.h" 23 #include "llvm/CodeGen/MachineFunctionPass.h" 24 #include "llvm/CodeGen/MachineInstr.h" 25 #include "llvm/CodeGen/MachineInstrBuilder.h" 26 #include "llvm/CodeGen/MachineRegisterInfo.h" 27 #include "llvm/CodeGen/Passes.h" 28 #include "llvm/CodeGen/RegAllocRegistry.h" 29 #include "llvm/CodeGen/RegisterClassInfo.h" 30 #include "llvm/IR/DebugInfo.h" 31 #include "llvm/Support/Debug.h" 32 #include "llvm/Support/ErrorHandling.h" 33 #include "llvm/Target/TargetInstrInfo.h" 34 #include "llvm/Target/TargetSubtargetInfo.h" 35 #include <algorithm> 36 using namespace llvm; 37 38 #define DEBUG_TYPE "regalloc" 39 40 STATISTIC(NumStores, "Number of stores added"); 41 STATISTIC(NumLoads , "Number of loads added"); 42 STATISTIC(NumCopies, "Number of copies coalesced"); 43 44 static RegisterRegAlloc 45 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 46 47 namespace { 48 class RAFast : public MachineFunctionPass { 49 public: 50 static char ID; 51 RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1), 52 isBulkSpilling(false) {} 53 54 private: 55 MachineFunction *MF; 56 MachineRegisterInfo *MRI; 57 const TargetRegisterInfo *TRI; 58 const TargetInstrInfo *TII; 59 RegisterClassInfo RegClassInfo; 60 61 // Basic block currently being allocated. 62 MachineBasicBlock *MBB; 63 64 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 65 // values are spilled. 66 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 67 68 // Everything we know about a live virtual register. 69 struct LiveReg { 70 MachineInstr *LastUse; // Last instr to use reg. 71 unsigned VirtReg; // Virtual register number. 72 unsigned PhysReg; // Currently held here. 73 unsigned short LastOpNum; // OpNum on LastUse. 74 bool Dirty; // Register needs spill. 75 76 explicit LiveReg(unsigned v) 77 : LastUse(nullptr), VirtReg(v), PhysReg(0), LastOpNum(0), Dirty(false){} 78 79 unsigned getSparseSetIndex() const { 80 return TargetRegisterInfo::virtReg2Index(VirtReg); 81 } 82 }; 83 84 typedef SparseSet<LiveReg> LiveRegMap; 85 86 // LiveVirtRegs - This map contains entries for each virtual register 87 // that is currently available in a physical register. 88 LiveRegMap LiveVirtRegs; 89 90 DenseMap<unsigned, SmallVector<MachineInstr *, 4> > LiveDbgValueMap; 91 92 // RegState - Track the state of a physical register. 93 enum RegState { 94 // A disabled register is not available for allocation, but an alias may 95 // be in use. A register can only be moved out of the disabled state if 96 // all aliases are disabled. 97 regDisabled, 98 99 // A free register is not currently in use and can be allocated 100 // immediately without checking aliases. 101 regFree, 102 103 // A reserved register has been assigned explicitly (e.g., setting up a 104 // call parameter), and it remains reserved until it is used. 105 regReserved 106 107 // A register state may also be a virtual register number, indication that 108 // the physical register is currently allocated to a virtual register. In 109 // that case, LiveVirtRegs contains the inverse mapping. 110 }; 111 112 // PhysRegState - One of the RegState enums, or a virtreg. 113 std::vector<unsigned> PhysRegState; 114 115 // Set of register units. 116 typedef SparseSet<unsigned> UsedInInstrSet; 117 118 // Set of register units that are used in the current instruction, and so 119 // cannot be allocated. 120 UsedInInstrSet UsedInInstr; 121 122 // Mark a physreg as used in this instruction. 123 void markRegUsedInInstr(unsigned PhysReg) { 124 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) 125 UsedInInstr.insert(*Units); 126 } 127 128 // Check if a physreg or any of its aliases are used in this instruction. 129 bool isRegUsedInInstr(unsigned PhysReg) const { 130 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) 131 if (UsedInInstr.count(*Units)) 132 return true; 133 return false; 134 } 135 136 // SkippedInstrs - Descriptors of instructions whose clobber list was 137 // ignored because all registers were spilled. It is still necessary to 138 // mark all the clobbered registers as used by the function. 139 SmallPtrSet<const MCInstrDesc*, 4> SkippedInstrs; 140 141 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 142 // completely after spilling all live registers. LiveRegMap entries should 143 // not be erased. 144 bool isBulkSpilling; 145 146 enum : unsigned { 147 spillClean = 1, 148 spillDirty = 100, 149 spillImpossible = ~0u 150 }; 151 public: 152 const char *getPassName() const override { 153 return "Fast Register Allocator"; 154 } 155 156 void getAnalysisUsage(AnalysisUsage &AU) const override { 157 AU.setPreservesCFG(); 158 MachineFunctionPass::getAnalysisUsage(AU); 159 } 160 161 MachineFunctionProperties getSetProperties() const override { 162 return MachineFunctionProperties().set( 163 MachineFunctionProperties::Property::AllVRegsAllocated); 164 } 165 166 private: 167 bool runOnMachineFunction(MachineFunction &Fn) override; 168 void AllocateBasicBlock(); 169 void handleThroughOperands(MachineInstr *MI, 170 SmallVectorImpl<unsigned> &VirtDead); 171 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 172 bool isLastUseOfLocalReg(MachineOperand&); 173 174 void addKillFlag(const LiveReg&); 175 void killVirtReg(LiveRegMap::iterator); 176 void killVirtReg(unsigned VirtReg); 177 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 178 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 179 180 void usePhysReg(MachineOperand&); 181 void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); 182 unsigned calcSpillCost(unsigned PhysReg) const; 183 void assignVirtToPhysReg(LiveReg&, unsigned PhysReg); 184 LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) { 185 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); 186 } 187 LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const { 188 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); 189 } 190 LiveRegMap::iterator assignVirtToPhysReg(unsigned VReg, unsigned PhysReg); 191 LiveRegMap::iterator allocVirtReg(MachineInstr *MI, LiveRegMap::iterator, 192 unsigned Hint); 193 LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, 194 unsigned VirtReg, unsigned Hint); 195 LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, 196 unsigned VirtReg, unsigned Hint); 197 void spillAll(MachineBasicBlock::iterator MI); 198 bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); 199 }; 200 char RAFast::ID = 0; 201 } 202 203 /// getStackSpaceFor - This allocates space for the specified virtual register 204 /// to be held on the stack. 205 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 206 // Find the location Reg would belong... 207 int SS = StackSlotForVirtReg[VirtReg]; 208 if (SS != -1) 209 return SS; // Already has space allocated? 210 211 // Allocate a new stack object for this spill location... 212 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 213 RC->getAlignment()); 214 215 // Assign the slot. 216 StackSlotForVirtReg[VirtReg] = FrameIdx; 217 return FrameIdx; 218 } 219 220 /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 221 /// its virtual register, and it is guaranteed to be a block-local register. 222 /// 223 bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 224 // If the register has ever been spilled or reloaded, we conservatively assume 225 // it is a global register used in multiple blocks. 226 if (StackSlotForVirtReg[MO.getReg()] != -1) 227 return false; 228 229 // Check that the use/def chain has exactly one operand - MO. 230 MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg()); 231 if (&*I != &MO) 232 return false; 233 return ++I == MRI->reg_nodbg_end(); 234 } 235 236 /// addKillFlag - Set kill flags on last use of a virtual register. 237 void RAFast::addKillFlag(const LiveReg &LR) { 238 if (!LR.LastUse) return; 239 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 240 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 241 if (MO.getReg() == LR.PhysReg) 242 MO.setIsKill(); 243 else 244 LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true); 245 } 246 } 247 248 /// killVirtReg - Mark virtreg as no longer available. 249 void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 250 addKillFlag(*LRI); 251 assert(PhysRegState[LRI->PhysReg] == LRI->VirtReg && 252 "Broken RegState mapping"); 253 PhysRegState[LRI->PhysReg] = regFree; 254 // Erase from LiveVirtRegs unless we're spilling in bulk. 255 if (!isBulkSpilling) 256 LiveVirtRegs.erase(LRI); 257 } 258 259 /// killVirtReg - Mark virtreg as no longer available. 260 void RAFast::killVirtReg(unsigned VirtReg) { 261 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 262 "killVirtReg needs a virtual register"); 263 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 264 if (LRI != LiveVirtRegs.end()) 265 killVirtReg(LRI); 266 } 267 268 /// spillVirtReg - This method spills the value specified by VirtReg into the 269 /// corresponding stack slot if needed. 270 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 271 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 272 "Spilling a physical register is illegal!"); 273 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 274 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 275 spillVirtReg(MI, LRI); 276 } 277 278 /// spillVirtReg - Do the actual work of spilling. 279 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 280 LiveRegMap::iterator LRI) { 281 LiveReg &LR = *LRI; 282 assert(PhysRegState[LR.PhysReg] == LRI->VirtReg && "Broken RegState mapping"); 283 284 if (LR.Dirty) { 285 // If this physreg is used by the instruction, we want to kill it on the 286 // instruction, not on the spill. 287 bool SpillKill = LR.LastUse != MI; 288 LR.Dirty = false; 289 DEBUG(dbgs() << "Spilling " << PrintReg(LRI->VirtReg, TRI) 290 << " in " << PrintReg(LR.PhysReg, TRI)); 291 const TargetRegisterClass *RC = MRI->getRegClass(LRI->VirtReg); 292 int FI = getStackSpaceFor(LRI->VirtReg, RC); 293 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 294 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 295 ++NumStores; // Update statistics 296 297 // If this register is used by DBG_VALUE then insert new DBG_VALUE to 298 // identify spilled location as the place to find corresponding variable's 299 // value. 300 SmallVectorImpl<MachineInstr *> &LRIDbgValues = 301 LiveDbgValueMap[LRI->VirtReg]; 302 for (unsigned li = 0, le = LRIDbgValues.size(); li != le; ++li) { 303 MachineInstr *DBG = LRIDbgValues[li]; 304 const MDNode *Var = DBG->getDebugVariable(); 305 const MDNode *Expr = DBG->getDebugExpression(); 306 bool IsIndirect = DBG->isIndirectDebugValue(); 307 uint64_t Offset = IsIndirect ? DBG->getOperand(1).getImm() : 0; 308 DebugLoc DL = DBG->getDebugLoc(); 309 assert(cast<DILocalVariable>(Var)->isValidLocationForIntrinsic(DL) && 310 "Expected inlined-at fields to agree"); 311 MachineInstr *NewDV = 312 BuildMI(*MBB, MI, DL, TII->get(TargetOpcode::DBG_VALUE)) 313 .addFrameIndex(FI) 314 .addImm(Offset) 315 .addMetadata(Var) 316 .addMetadata(Expr); 317 assert(NewDV->getParent() == MBB && "dangling parent pointer"); 318 (void)NewDV; 319 DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV); 320 } 321 // Now this register is spilled there is should not be any DBG_VALUE 322 // pointing to this register because they are all pointing to spilled value 323 // now. 324 LRIDbgValues.clear(); 325 if (SpillKill) 326 LR.LastUse = nullptr; // Don't kill register again 327 } 328 killVirtReg(LRI); 329 } 330 331 /// spillAll - Spill all dirty virtregs without killing them. 332 void RAFast::spillAll(MachineBasicBlock::iterator MI) { 333 if (LiveVirtRegs.empty()) return; 334 isBulkSpilling = true; 335 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 336 // of spilling here is deterministic, if arbitrary. 337 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 338 i != e; ++i) 339 spillVirtReg(MI, i); 340 LiveVirtRegs.clear(); 341 isBulkSpilling = false; 342 } 343 344 /// usePhysReg - Handle the direct use of a physical register. 345 /// Check that the register is not used by a virtreg. 346 /// Kill the physreg, marking it free. 347 /// This may add implicit kills to MO->getParent() and invalidate MO. 348 void RAFast::usePhysReg(MachineOperand &MO) { 349 unsigned PhysReg = MO.getReg(); 350 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 351 "Bad usePhysReg operand"); 352 353 // Ignore undef uses. 354 if (MO.isUndef()) 355 return; 356 357 markRegUsedInInstr(PhysReg); 358 switch (PhysRegState[PhysReg]) { 359 case regDisabled: 360 break; 361 case regReserved: 362 PhysRegState[PhysReg] = regFree; 363 // Fall through 364 case regFree: 365 MO.setIsKill(); 366 return; 367 default: 368 // The physreg was allocated to a virtual register. That means the value we 369 // wanted has been clobbered. 370 llvm_unreachable("Instruction uses an allocated register"); 371 } 372 373 // Maybe a superregister is reserved? 374 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 375 unsigned Alias = *AI; 376 switch (PhysRegState[Alias]) { 377 case regDisabled: 378 break; 379 case regReserved: 380 // Either PhysReg is a subregister of Alias and we mark the 381 // whole register as free, or PhysReg is the superregister of 382 // Alias and we mark all the aliases as disabled before freeing 383 // PhysReg. 384 // In the latter case, since PhysReg was disabled, this means that 385 // its value is defined only by physical sub-registers. This check 386 // is performed by the assert of the default case in this loop. 387 // Note: The value of the superregister may only be partial 388 // defined, that is why regDisabled is a valid state for aliases. 389 assert((TRI->isSuperRegister(PhysReg, Alias) || 390 TRI->isSuperRegister(Alias, PhysReg)) && 391 "Instruction is not using a subregister of a reserved register"); 392 // Fall through. 393 case regFree: 394 if (TRI->isSuperRegister(PhysReg, Alias)) { 395 // Leave the superregister in the working set. 396 PhysRegState[Alias] = regFree; 397 MO.getParent()->addRegisterKilled(Alias, TRI, true); 398 return; 399 } 400 // Some other alias was in the working set - clear it. 401 PhysRegState[Alias] = regDisabled; 402 break; 403 default: 404 llvm_unreachable("Instruction uses an alias of an allocated register"); 405 } 406 } 407 408 // All aliases are disabled, bring register into working set. 409 PhysRegState[PhysReg] = regFree; 410 MO.setIsKill(); 411 } 412 413 /// definePhysReg - Mark PhysReg as reserved or free after spilling any 414 /// virtregs. This is very similar to defineVirtReg except the physreg is 415 /// reserved instead of allocated. 416 void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, 417 RegState NewState) { 418 markRegUsedInInstr(PhysReg); 419 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 420 case regDisabled: 421 break; 422 default: 423 spillVirtReg(MI, VirtReg); 424 // Fall through. 425 case regFree: 426 case regReserved: 427 PhysRegState[PhysReg] = NewState; 428 return; 429 } 430 431 // This is a disabled register, disable all aliases. 432 PhysRegState[PhysReg] = NewState; 433 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 434 unsigned Alias = *AI; 435 switch (unsigned VirtReg = PhysRegState[Alias]) { 436 case regDisabled: 437 break; 438 default: 439 spillVirtReg(MI, VirtReg); 440 // Fall through. 441 case regFree: 442 case regReserved: 443 PhysRegState[Alias] = regDisabled; 444 if (TRI->isSuperRegister(PhysReg, Alias)) 445 return; 446 break; 447 } 448 } 449 } 450 451 452 // calcSpillCost - Return the cost of spilling clearing out PhysReg and 453 // aliases so it is free for allocation. 454 // Returns 0 when PhysReg is free or disabled with all aliases disabled - it 455 // can be allocated directly. 456 // Returns spillImpossible when PhysReg or an alias can't be spilled. 457 unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 458 if (isRegUsedInInstr(PhysReg)) { 459 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is already used in instr.\n"); 460 return spillImpossible; 461 } 462 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 463 case regDisabled: 464 break; 465 case regFree: 466 return 0; 467 case regReserved: 468 DEBUG(dbgs() << PrintReg(VirtReg, TRI) << " corresponding " 469 << PrintReg(PhysReg, TRI) << " is reserved already.\n"); 470 return spillImpossible; 471 default: { 472 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); 473 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 474 return I->Dirty ? spillDirty : spillClean; 475 } 476 } 477 478 // This is a disabled register, add up cost of aliases. 479 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is disabled.\n"); 480 unsigned Cost = 0; 481 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 482 unsigned Alias = *AI; 483 switch (unsigned VirtReg = PhysRegState[Alias]) { 484 case regDisabled: 485 break; 486 case regFree: 487 ++Cost; 488 break; 489 case regReserved: 490 return spillImpossible; 491 default: { 492 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); 493 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 494 Cost += I->Dirty ? spillDirty : spillClean; 495 break; 496 } 497 } 498 } 499 return Cost; 500 } 501 502 503 /// assignVirtToPhysReg - This method updates local state so that we know 504 /// that PhysReg is the proper container for VirtReg now. The physical 505 /// register must not be used for anything else when this is called. 506 /// 507 void RAFast::assignVirtToPhysReg(LiveReg &LR, unsigned PhysReg) { 508 DEBUG(dbgs() << "Assigning " << PrintReg(LR.VirtReg, TRI) << " to " 509 << PrintReg(PhysReg, TRI) << "\n"); 510 PhysRegState[PhysReg] = LR.VirtReg; 511 assert(!LR.PhysReg && "Already assigned a physreg"); 512 LR.PhysReg = PhysReg; 513 } 514 515 RAFast::LiveRegMap::iterator 516 RAFast::assignVirtToPhysReg(unsigned VirtReg, unsigned PhysReg) { 517 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 518 assert(LRI != LiveVirtRegs.end() && "VirtReg disappeared"); 519 assignVirtToPhysReg(*LRI, PhysReg); 520 return LRI; 521 } 522 523 /// allocVirtReg - Allocate a physical register for VirtReg. 524 RAFast::LiveRegMap::iterator RAFast::allocVirtReg(MachineInstr *MI, 525 LiveRegMap::iterator LRI, 526 unsigned Hint) { 527 const unsigned VirtReg = LRI->VirtReg; 528 529 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 530 "Can only allocate virtual registers"); 531 532 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 533 534 // Ignore invalid hints. 535 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 536 !RC->contains(Hint) || !MRI->isAllocatable(Hint))) 537 Hint = 0; 538 539 // Take hint when possible. 540 if (Hint) { 541 // Ignore the hint if we would have to spill a dirty register. 542 unsigned Cost = calcSpillCost(Hint); 543 if (Cost < spillDirty) { 544 if (Cost) 545 definePhysReg(MI, Hint, regFree); 546 // definePhysReg may kill virtual registers and modify LiveVirtRegs. 547 // That invalidates LRI, so run a new lookup for VirtReg. 548 return assignVirtToPhysReg(VirtReg, Hint); 549 } 550 } 551 552 ArrayRef<MCPhysReg> AO = RegClassInfo.getOrder(RC); 553 554 // First try to find a completely free register. 555 for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){ 556 unsigned PhysReg = *I; 557 if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) { 558 assignVirtToPhysReg(*LRI, PhysReg); 559 return LRI; 560 } 561 } 562 563 DEBUG(dbgs() << "Allocating " << PrintReg(VirtReg) << " from " 564 << TRI->getRegClassName(RC) << "\n"); 565 566 unsigned BestReg = 0, BestCost = spillImpossible; 567 for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){ 568 unsigned Cost = calcSpillCost(*I); 569 DEBUG(dbgs() << "\tRegister: " << PrintReg(*I, TRI) << "\n"); 570 DEBUG(dbgs() << "\tCost: " << Cost << "\n"); 571 DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n"); 572 // Cost is 0 when all aliases are already disabled. 573 if (Cost == 0) { 574 assignVirtToPhysReg(*LRI, *I); 575 return LRI; 576 } 577 if (Cost < BestCost) 578 BestReg = *I, BestCost = Cost; 579 } 580 581 if (BestReg) { 582 definePhysReg(MI, BestReg, regFree); 583 // definePhysReg may kill virtual registers and modify LiveVirtRegs. 584 // That invalidates LRI, so run a new lookup for VirtReg. 585 return assignVirtToPhysReg(VirtReg, BestReg); 586 } 587 588 // Nothing we can do. Report an error and keep going with a bad allocation. 589 if (MI->isInlineAsm()) 590 MI->emitError("inline assembly requires more registers than available"); 591 else 592 MI->emitError("ran out of registers during register allocation"); 593 definePhysReg(MI, *AO.begin(), regFree); 594 return assignVirtToPhysReg(VirtReg, *AO.begin()); 595 } 596 597 /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 598 RAFast::LiveRegMap::iterator 599 RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, 600 unsigned VirtReg, unsigned Hint) { 601 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 602 "Not a virtual register"); 603 LiveRegMap::iterator LRI; 604 bool New; 605 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); 606 if (New) { 607 // If there is no hint, peek at the only use of this register. 608 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 609 MRI->hasOneNonDBGUse(VirtReg)) { 610 const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg); 611 // It's a copy, use the destination register as a hint. 612 if (UseMI.isCopyLike()) 613 Hint = UseMI.getOperand(0).getReg(); 614 } 615 LRI = allocVirtReg(MI, LRI, Hint); 616 } else if (LRI->LastUse) { 617 // Redefining a live register - kill at the last use, unless it is this 618 // instruction defining VirtReg multiple times. 619 if (LRI->LastUse != MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse()) 620 addKillFlag(*LRI); 621 } 622 assert(LRI->PhysReg && "Register not assigned"); 623 LRI->LastUse = MI; 624 LRI->LastOpNum = OpNum; 625 LRI->Dirty = true; 626 markRegUsedInInstr(LRI->PhysReg); 627 return LRI; 628 } 629 630 /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 631 RAFast::LiveRegMap::iterator 632 RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, 633 unsigned VirtReg, unsigned Hint) { 634 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 635 "Not a virtual register"); 636 LiveRegMap::iterator LRI; 637 bool New; 638 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); 639 MachineOperand &MO = MI->getOperand(OpNum); 640 if (New) { 641 LRI = allocVirtReg(MI, LRI, Hint); 642 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 643 int FrameIndex = getStackSpaceFor(VirtReg, RC); 644 DEBUG(dbgs() << "Reloading " << PrintReg(VirtReg, TRI) << " into " 645 << PrintReg(LRI->PhysReg, TRI) << "\n"); 646 TII->loadRegFromStackSlot(*MBB, MI, LRI->PhysReg, FrameIndex, RC, TRI); 647 ++NumLoads; 648 } else if (LRI->Dirty) { 649 if (isLastUseOfLocalReg(MO)) { 650 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 651 if (MO.isUse()) 652 MO.setIsKill(); 653 else 654 MO.setIsDead(); 655 } else if (MO.isKill()) { 656 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 657 MO.setIsKill(false); 658 } else if (MO.isDead()) { 659 DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n"); 660 MO.setIsDead(false); 661 } 662 } else if (MO.isKill()) { 663 // We must remove kill flags from uses of reloaded registers because the 664 // register would be killed immediately, and there might be a second use: 665 // %foo = OR %x<kill>, %x 666 // This would cause a second reload of %x into a different register. 667 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 668 MO.setIsKill(false); 669 } else if (MO.isDead()) { 670 DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n"); 671 MO.setIsDead(false); 672 } 673 assert(LRI->PhysReg && "Register not assigned"); 674 LRI->LastUse = MI; 675 LRI->LastOpNum = OpNum; 676 markRegUsedInInstr(LRI->PhysReg); 677 return LRI; 678 } 679 680 // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering 681 // subregs. This may invalidate any operand pointers. 682 // Return true if the operand kills its register. 683 bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { 684 MachineOperand &MO = MI->getOperand(OpNum); 685 bool Dead = MO.isDead(); 686 if (!MO.getSubReg()) { 687 MO.setReg(PhysReg); 688 return MO.isKill() || Dead; 689 } 690 691 // Handle subregister index. 692 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 693 MO.setSubReg(0); 694 695 // A kill flag implies killing the full register. Add corresponding super 696 // register kill. 697 if (MO.isKill()) { 698 MI->addRegisterKilled(PhysReg, TRI, true); 699 return true; 700 } 701 702 // A <def,read-undef> of a sub-register requires an implicit def of the full 703 // register. 704 if (MO.isDef() && MO.isUndef()) 705 MI->addRegisterDefined(PhysReg, TRI); 706 707 return Dead; 708 } 709 710 // Handle special instruction operand like early clobbers and tied ops when 711 // there are additional physreg defines. 712 void RAFast::handleThroughOperands(MachineInstr *MI, 713 SmallVectorImpl<unsigned> &VirtDead) { 714 DEBUG(dbgs() << "Scanning for through registers:"); 715 SmallSet<unsigned, 8> ThroughRegs; 716 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 717 MachineOperand &MO = MI->getOperand(i); 718 if (!MO.isReg()) continue; 719 unsigned Reg = MO.getReg(); 720 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 721 continue; 722 if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) || 723 (MO.getSubReg() && MI->readsVirtualRegister(Reg))) { 724 if (ThroughRegs.insert(Reg).second) 725 DEBUG(dbgs() << ' ' << PrintReg(Reg)); 726 } 727 } 728 729 // If any physreg defines collide with preallocated through registers, 730 // we must spill and reallocate. 731 DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); 732 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 733 MachineOperand &MO = MI->getOperand(i); 734 if (!MO.isReg() || !MO.isDef()) continue; 735 unsigned Reg = MO.getReg(); 736 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 737 markRegUsedInInstr(Reg); 738 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 739 if (ThroughRegs.count(PhysRegState[*AI])) 740 definePhysReg(MI, *AI, regFree); 741 } 742 } 743 744 SmallVector<unsigned, 8> PartialDefs; 745 DEBUG(dbgs() << "Allocating tied uses.\n"); 746 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 747 MachineOperand &MO = MI->getOperand(i); 748 if (!MO.isReg()) continue; 749 unsigned Reg = MO.getReg(); 750 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 751 if (MO.isUse()) { 752 unsigned DefIdx = 0; 753 if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue; 754 DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand " 755 << DefIdx << ".\n"); 756 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 757 unsigned PhysReg = LRI->PhysReg; 758 setPhysReg(MI, i, PhysReg); 759 // Note: we don't update the def operand yet. That would cause the normal 760 // def-scan to attempt spilling. 761 } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) { 762 DEBUG(dbgs() << "Partial redefine: " << MO << "\n"); 763 // Reload the register, but don't assign to the operand just yet. 764 // That would confuse the later phys-def processing pass. 765 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 766 PartialDefs.push_back(LRI->PhysReg); 767 } 768 } 769 770 DEBUG(dbgs() << "Allocating early clobbers.\n"); 771 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 772 MachineOperand &MO = MI->getOperand(i); 773 if (!MO.isReg()) continue; 774 unsigned Reg = MO.getReg(); 775 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 776 if (!MO.isEarlyClobber()) 777 continue; 778 // Note: defineVirtReg may invalidate MO. 779 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); 780 unsigned PhysReg = LRI->PhysReg; 781 if (setPhysReg(MI, i, PhysReg)) 782 VirtDead.push_back(Reg); 783 } 784 785 // Restore UsedInInstr to a state usable for allocating normal virtual uses. 786 UsedInInstr.clear(); 787 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 788 MachineOperand &MO = MI->getOperand(i); 789 if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; 790 unsigned Reg = MO.getReg(); 791 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 792 DEBUG(dbgs() << "\tSetting " << PrintReg(Reg, TRI) 793 << " as used in instr\n"); 794 markRegUsedInInstr(Reg); 795 } 796 797 // Also mark PartialDefs as used to avoid reallocation. 798 for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i) 799 markRegUsedInInstr(PartialDefs[i]); 800 } 801 802 void RAFast::AllocateBasicBlock() { 803 DEBUG(dbgs() << "\nAllocating " << *MBB); 804 805 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 806 assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?"); 807 808 MachineBasicBlock::iterator MII = MBB->begin(); 809 810 // Add live-in registers as live. 811 for (const auto &LI : MBB->liveins()) 812 if (MRI->isAllocatable(LI.PhysReg)) 813 definePhysReg(MII, LI.PhysReg, regReserved); 814 815 SmallVector<unsigned, 8> VirtDead; 816 SmallVector<MachineInstr*, 32> Coalesced; 817 818 // Otherwise, sequentially allocate each instruction in the MBB. 819 while (MII != MBB->end()) { 820 MachineInstr *MI = MII++; 821 const MCInstrDesc &MCID = MI->getDesc(); 822 DEBUG({ 823 dbgs() << "\n>> " << *MI << "Regs:"; 824 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 825 if (PhysRegState[Reg] == regDisabled) continue; 826 dbgs() << " " << TRI->getName(Reg); 827 switch(PhysRegState[Reg]) { 828 case regFree: 829 break; 830 case regReserved: 831 dbgs() << "*"; 832 break; 833 default: { 834 dbgs() << '=' << PrintReg(PhysRegState[Reg]); 835 LiveRegMap::iterator I = findLiveVirtReg(PhysRegState[Reg]); 836 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 837 if (I->Dirty) 838 dbgs() << "*"; 839 assert(I->PhysReg == Reg && "Bad inverse map"); 840 break; 841 } 842 } 843 } 844 dbgs() << '\n'; 845 // Check that LiveVirtRegs is the inverse. 846 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 847 e = LiveVirtRegs.end(); i != e; ++i) { 848 assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) && 849 "Bad map key"); 850 assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) && 851 "Bad map value"); 852 assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map"); 853 } 854 }); 855 856 // Debug values are not allowed to change codegen in any way. 857 if (MI->isDebugValue()) { 858 bool ScanDbgValue = true; 859 while (ScanDbgValue) { 860 ScanDbgValue = false; 861 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 862 MachineOperand &MO = MI->getOperand(i); 863 if (!MO.isReg()) continue; 864 unsigned Reg = MO.getReg(); 865 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 866 LiveRegMap::iterator LRI = findLiveVirtReg(Reg); 867 if (LRI != LiveVirtRegs.end()) 868 setPhysReg(MI, i, LRI->PhysReg); 869 else { 870 int SS = StackSlotForVirtReg[Reg]; 871 if (SS == -1) { 872 // We can't allocate a physreg for a DebugValue, sorry! 873 DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE"); 874 MO.setReg(0); 875 } 876 else { 877 // Modify DBG_VALUE now that the value is in a spill slot. 878 bool IsIndirect = MI->isIndirectDebugValue(); 879 uint64_t Offset = IsIndirect ? MI->getOperand(1).getImm() : 0; 880 const MDNode *Var = MI->getDebugVariable(); 881 const MDNode *Expr = MI->getDebugExpression(); 882 DebugLoc DL = MI->getDebugLoc(); 883 MachineBasicBlock *MBB = MI->getParent(); 884 assert( 885 cast<DILocalVariable>(Var)->isValidLocationForIntrinsic(DL) && 886 "Expected inlined-at fields to agree"); 887 MachineInstr *NewDV = BuildMI(*MBB, MBB->erase(MI), DL, 888 TII->get(TargetOpcode::DBG_VALUE)) 889 .addFrameIndex(SS) 890 .addImm(Offset) 891 .addMetadata(Var) 892 .addMetadata(Expr); 893 DEBUG(dbgs() << "Modifying debug info due to spill:" 894 << "\t" << *NewDV); 895 // Scan NewDV operands from the beginning. 896 MI = NewDV; 897 ScanDbgValue = true; 898 break; 899 } 900 } 901 LiveDbgValueMap[Reg].push_back(MI); 902 } 903 } 904 // Next instruction. 905 continue; 906 } 907 908 // If this is a copy, we may be able to coalesce. 909 unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0; 910 if (MI->isCopy()) { 911 CopyDst = MI->getOperand(0).getReg(); 912 CopySrc = MI->getOperand(1).getReg(); 913 CopyDstSub = MI->getOperand(0).getSubReg(); 914 CopySrcSub = MI->getOperand(1).getSubReg(); 915 } 916 917 // Track registers used by instruction. 918 UsedInInstr.clear(); 919 920 // First scan. 921 // Mark physreg uses and early clobbers as used. 922 // Find the end of the virtreg operands 923 unsigned VirtOpEnd = 0; 924 bool hasTiedOps = false; 925 bool hasEarlyClobbers = false; 926 bool hasPartialRedefs = false; 927 bool hasPhysDefs = false; 928 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 929 MachineOperand &MO = MI->getOperand(i); 930 // Make sure MRI knows about registers clobbered by regmasks. 931 if (MO.isRegMask()) { 932 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask()); 933 continue; 934 } 935 if (!MO.isReg()) continue; 936 unsigned Reg = MO.getReg(); 937 if (!Reg) continue; 938 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 939 VirtOpEnd = i+1; 940 if (MO.isUse()) { 941 hasTiedOps = hasTiedOps || 942 MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1; 943 } else { 944 if (MO.isEarlyClobber()) 945 hasEarlyClobbers = true; 946 if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) 947 hasPartialRedefs = true; 948 } 949 continue; 950 } 951 if (!MRI->isAllocatable(Reg)) continue; 952 if (MO.isUse()) { 953 usePhysReg(MO); 954 } else if (MO.isEarlyClobber()) { 955 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 956 regFree : regReserved); 957 hasEarlyClobbers = true; 958 } else 959 hasPhysDefs = true; 960 } 961 962 // The instruction may have virtual register operands that must be allocated 963 // the same register at use-time and def-time: early clobbers and tied 964 // operands. If there are also physical defs, these registers must avoid 965 // both physical defs and uses, making them more constrained than normal 966 // operands. 967 // Similarly, if there are multiple defs and tied operands, we must make 968 // sure the same register is allocated to uses and defs. 969 // We didn't detect inline asm tied operands above, so just make this extra 970 // pass for all inline asm. 971 if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || 972 (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) { 973 handleThroughOperands(MI, VirtDead); 974 // Don't attempt coalescing when we have funny stuff going on. 975 CopyDst = 0; 976 // Pretend we have early clobbers so the use operands get marked below. 977 // This is not necessary for the common case of a single tied use. 978 hasEarlyClobbers = true; 979 } 980 981 // Second scan. 982 // Allocate virtreg uses. 983 for (unsigned i = 0; i != VirtOpEnd; ++i) { 984 MachineOperand &MO = MI->getOperand(i); 985 if (!MO.isReg()) continue; 986 unsigned Reg = MO.getReg(); 987 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 988 if (MO.isUse()) { 989 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); 990 unsigned PhysReg = LRI->PhysReg; 991 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 992 if (setPhysReg(MI, i, PhysReg)) 993 killVirtReg(LRI); 994 } 995 } 996 997 // Track registers defined by instruction - early clobbers and tied uses at 998 // this point. 999 UsedInInstr.clear(); 1000 if (hasEarlyClobbers) { 1001 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 1002 MachineOperand &MO = MI->getOperand(i); 1003 if (!MO.isReg()) continue; 1004 unsigned Reg = MO.getReg(); 1005 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 1006 // Look for physreg defs and tied uses. 1007 if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue; 1008 markRegUsedInInstr(Reg); 1009 } 1010 } 1011 1012 unsigned DefOpEnd = MI->getNumOperands(); 1013 if (MI->isCall()) { 1014 // Spill all virtregs before a call. This serves one purpose: If an 1015 // exception is thrown, the landing pad is going to expect to find 1016 // registers in their spill slots. 1017 // Note: although this is appealing to just consider all definitions 1018 // as call-clobbered, this is not correct because some of those 1019 // definitions may be used later on and we do not want to reuse 1020 // those for virtual registers in between. 1021 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 1022 spillAll(MI); 1023 1024 // The imp-defs are skipped below, but we still need to mark those 1025 // registers as used by the function. 1026 SkippedInstrs.insert(&MCID); 1027 } 1028 1029 // Third scan. 1030 // Allocate defs and collect dead defs. 1031 for (unsigned i = 0; i != DefOpEnd; ++i) { 1032 MachineOperand &MO = MI->getOperand(i); 1033 if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) 1034 continue; 1035 unsigned Reg = MO.getReg(); 1036 1037 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 1038 if (!MRI->isAllocatable(Reg)) continue; 1039 definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved); 1040 continue; 1041 } 1042 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); 1043 unsigned PhysReg = LRI->PhysReg; 1044 if (setPhysReg(MI, i, PhysReg)) { 1045 VirtDead.push_back(Reg); 1046 CopyDst = 0; // cancel coalescing; 1047 } else 1048 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 1049 } 1050 1051 // Kill dead defs after the scan to ensure that multiple defs of the same 1052 // register are allocated identically. We didn't need to do this for uses 1053 // because we are crerating our own kill flags, and they are always at the 1054 // last use. 1055 for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) 1056 killVirtReg(VirtDead[i]); 1057 VirtDead.clear(); 1058 1059 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 1060 DEBUG(dbgs() << "-- coalescing: " << *MI); 1061 Coalesced.push_back(MI); 1062 } else { 1063 DEBUG(dbgs() << "<< " << *MI); 1064 } 1065 } 1066 1067 // Spill all physical registers holding virtual registers now. 1068 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 1069 spillAll(MBB->getFirstTerminator()); 1070 1071 // Erase all the coalesced copies. We are delaying it until now because 1072 // LiveVirtRegs might refer to the instrs. 1073 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 1074 MBB->erase(Coalesced[i]); 1075 NumCopies += Coalesced.size(); 1076 1077 DEBUG(MBB->dump()); 1078 } 1079 1080 /// runOnMachineFunction - Register allocate the whole function 1081 /// 1082 bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 1083 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 1084 << "********** Function: " << Fn.getName() << '\n'); 1085 MF = &Fn; 1086 MRI = &MF->getRegInfo(); 1087 TRI = MF->getSubtarget().getRegisterInfo(); 1088 TII = MF->getSubtarget().getInstrInfo(); 1089 MRI->freezeReservedRegs(Fn); 1090 RegClassInfo.runOnMachineFunction(Fn); 1091 UsedInInstr.clear(); 1092 UsedInInstr.setUniverse(TRI->getNumRegUnits()); 1093 1094 assert(!MRI->isSSA() && "regalloc requires leaving SSA"); 1095 1096 // initialize the virtual->physical register map to have a 'null' 1097 // mapping for all virtual registers 1098 StackSlotForVirtReg.resize(MRI->getNumVirtRegs()); 1099 LiveVirtRegs.setUniverse(MRI->getNumVirtRegs()); 1100 1101 // Loop over all of the basic blocks, eliminating virtual register references 1102 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 1103 MBBi != MBBe; ++MBBi) { 1104 MBB = &*MBBi; 1105 AllocateBasicBlock(); 1106 } 1107 1108 // All machine operands and other references to virtual registers have been 1109 // replaced. Remove the virtual registers. 1110 MRI->clearVirtRegs(); 1111 1112 SkippedInstrs.clear(); 1113 StackSlotForVirtReg.clear(); 1114 LiveDbgValueMap.clear(); 1115 return true; 1116 } 1117 1118 FunctionPass *llvm::createFastRegisterAllocator() { 1119 return new RAFast(); 1120 } 1121