1 //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This register allocator allocates registers to a basic block at a time, 11 // attempting to keep values in registers and reusing registers as appropriate. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/ADT/DenseMap.h" 16 #include "llvm/ADT/IndexedMap.h" 17 #include "llvm/ADT/STLExtras.h" 18 #include "llvm/ADT/SmallSet.h" 19 #include "llvm/ADT/SmallVector.h" 20 #include "llvm/ADT/SparseSet.h" 21 #include "llvm/ADT/Statistic.h" 22 #include "llvm/CodeGen/MachineFrameInfo.h" 23 #include "llvm/CodeGen/MachineFunctionPass.h" 24 #include "llvm/CodeGen/MachineInstr.h" 25 #include "llvm/CodeGen/MachineInstrBuilder.h" 26 #include "llvm/CodeGen/MachineRegisterInfo.h" 27 #include "llvm/CodeGen/Passes.h" 28 #include "llvm/CodeGen/RegAllocRegistry.h" 29 #include "llvm/CodeGen/RegisterClassInfo.h" 30 #include "llvm/IR/DebugInfo.h" 31 #include "llvm/Support/Debug.h" 32 #include "llvm/Support/ErrorHandling.h" 33 #include "llvm/Target/TargetInstrInfo.h" 34 #include "llvm/Target/TargetSubtargetInfo.h" 35 #include <algorithm> 36 using namespace llvm; 37 38 #define DEBUG_TYPE "regalloc" 39 40 STATISTIC(NumStores, "Number of stores added"); 41 STATISTIC(NumLoads , "Number of loads added"); 42 STATISTIC(NumCopies, "Number of copies coalesced"); 43 44 static RegisterRegAlloc 45 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 46 47 namespace { 48 class RAFast : public MachineFunctionPass { 49 public: 50 static char ID; 51 RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1), 52 isBulkSpilling(false) {} 53 54 private: 55 MachineFunction *MF; 56 MachineRegisterInfo *MRI; 57 const TargetRegisterInfo *TRI; 58 const TargetInstrInfo *TII; 59 RegisterClassInfo RegClassInfo; 60 61 // Basic block currently being allocated. 62 MachineBasicBlock *MBB; 63 64 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 65 // values are spilled. 66 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 67 68 // Everything we know about a live virtual register. 69 struct LiveReg { 70 MachineInstr *LastUse; // Last instr to use reg. 71 unsigned VirtReg; // Virtual register number. 72 unsigned PhysReg; // Currently held here. 73 unsigned short LastOpNum; // OpNum on LastUse. 74 bool Dirty; // Register needs spill. 75 76 explicit LiveReg(unsigned v) 77 : LastUse(nullptr), VirtReg(v), PhysReg(0), LastOpNum(0), Dirty(false){} 78 79 unsigned getSparseSetIndex() const { 80 return TargetRegisterInfo::virtReg2Index(VirtReg); 81 } 82 }; 83 84 typedef SparseSet<LiveReg> LiveRegMap; 85 86 // LiveVirtRegs - This map contains entries for each virtual register 87 // that is currently available in a physical register. 88 LiveRegMap LiveVirtRegs; 89 90 DenseMap<unsigned, SmallVector<MachineInstr *, 4> > LiveDbgValueMap; 91 92 // RegState - Track the state of a physical register. 93 enum RegState { 94 // A disabled register is not available for allocation, but an alias may 95 // be in use. A register can only be moved out of the disabled state if 96 // all aliases are disabled. 97 regDisabled, 98 99 // A free register is not currently in use and can be allocated 100 // immediately without checking aliases. 101 regFree, 102 103 // A reserved register has been assigned explicitly (e.g., setting up a 104 // call parameter), and it remains reserved until it is used. 105 regReserved 106 107 // A register state may also be a virtual register number, indication that 108 // the physical register is currently allocated to a virtual register. In 109 // that case, LiveVirtRegs contains the inverse mapping. 110 }; 111 112 // PhysRegState - One of the RegState enums, or a virtreg. 113 std::vector<unsigned> PhysRegState; 114 115 // Set of register units. 116 typedef SparseSet<unsigned> UsedInInstrSet; 117 118 // Set of register units that are used in the current instruction, and so 119 // cannot be allocated. 120 UsedInInstrSet UsedInInstr; 121 122 // Mark a physreg as used in this instruction. 123 void markRegUsedInInstr(unsigned PhysReg) { 124 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) 125 UsedInInstr.insert(*Units); 126 } 127 128 // Check if a physreg or any of its aliases are used in this instruction. 129 bool isRegUsedInInstr(unsigned PhysReg) const { 130 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) 131 if (UsedInInstr.count(*Units)) 132 return true; 133 return false; 134 } 135 136 // SkippedInstrs - Descriptors of instructions whose clobber list was 137 // ignored because all registers were spilled. It is still necessary to 138 // mark all the clobbered registers as used by the function. 139 SmallPtrSet<const MCInstrDesc*, 4> SkippedInstrs; 140 141 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 142 // completely after spilling all live registers. LiveRegMap entries should 143 // not be erased. 144 bool isBulkSpilling; 145 146 enum : unsigned { 147 spillClean = 1, 148 spillDirty = 100, 149 spillImpossible = ~0u 150 }; 151 public: 152 StringRef getPassName() const override { return "Fast Register Allocator"; } 153 154 void getAnalysisUsage(AnalysisUsage &AU) const override { 155 AU.setPreservesCFG(); 156 MachineFunctionPass::getAnalysisUsage(AU); 157 } 158 159 MachineFunctionProperties getRequiredProperties() const override { 160 return MachineFunctionProperties().set( 161 MachineFunctionProperties::Property::NoPHIs); 162 } 163 164 MachineFunctionProperties getSetProperties() const override { 165 return MachineFunctionProperties().set( 166 MachineFunctionProperties::Property::NoVRegs); 167 } 168 169 private: 170 bool runOnMachineFunction(MachineFunction &Fn) override; 171 void AllocateBasicBlock(); 172 void handleThroughOperands(MachineInstr *MI, 173 SmallVectorImpl<unsigned> &VirtDead); 174 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 175 bool isLastUseOfLocalReg(MachineOperand&); 176 177 void addKillFlag(const LiveReg&); 178 void killVirtReg(LiveRegMap::iterator); 179 void killVirtReg(unsigned VirtReg); 180 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 181 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 182 183 void usePhysReg(MachineOperand&); 184 void definePhysReg(MachineInstr &MI, unsigned PhysReg, RegState NewState); 185 unsigned calcSpillCost(unsigned PhysReg) const; 186 void assignVirtToPhysReg(LiveReg&, unsigned PhysReg); 187 LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) { 188 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); 189 } 190 LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const { 191 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); 192 } 193 LiveRegMap::iterator assignVirtToPhysReg(unsigned VReg, unsigned PhysReg); 194 LiveRegMap::iterator allocVirtReg(MachineInstr &MI, LiveRegMap::iterator, 195 unsigned Hint); 196 LiveRegMap::iterator defineVirtReg(MachineInstr &MI, unsigned OpNum, 197 unsigned VirtReg, unsigned Hint); 198 LiveRegMap::iterator reloadVirtReg(MachineInstr &MI, unsigned OpNum, 199 unsigned VirtReg, unsigned Hint); 200 void spillAll(MachineBasicBlock::iterator MI); 201 bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); 202 }; 203 char RAFast::ID = 0; 204 } 205 206 INITIALIZE_PASS(RAFast, "regallocfast", "Fast Register Allocator", false, false) 207 208 /// getStackSpaceFor - This allocates space for the specified virtual register 209 /// to be held on the stack. 210 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 211 // Find the location Reg would belong... 212 int SS = StackSlotForVirtReg[VirtReg]; 213 if (SS != -1) 214 return SS; // Already has space allocated? 215 216 // Allocate a new stack object for this spill location... 217 unsigned Size = TRI->getSpillSize(*RC); 218 unsigned Align = TRI->getSpillAlignment(*RC); 219 int FrameIdx = MF->getFrameInfo().CreateSpillStackObject(Size, Align); 220 221 // Assign the slot. 222 StackSlotForVirtReg[VirtReg] = FrameIdx; 223 return FrameIdx; 224 } 225 226 /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 227 /// its virtual register, and it is guaranteed to be a block-local register. 228 /// 229 bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 230 // If the register has ever been spilled or reloaded, we conservatively assume 231 // it is a global register used in multiple blocks. 232 if (StackSlotForVirtReg[MO.getReg()] != -1) 233 return false; 234 235 // Check that the use/def chain has exactly one operand - MO. 236 MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg()); 237 if (&*I != &MO) 238 return false; 239 return ++I == MRI->reg_nodbg_end(); 240 } 241 242 /// addKillFlag - Set kill flags on last use of a virtual register. 243 void RAFast::addKillFlag(const LiveReg &LR) { 244 if (!LR.LastUse) return; 245 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 246 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 247 if (MO.getReg() == LR.PhysReg) 248 MO.setIsKill(); 249 // else, don't do anything we are problably redefining a 250 // subreg of this register and given we don't track which 251 // lanes are actually dead, we cannot insert a kill flag here. 252 // Otherwise we may end up in a situation like this: 253 // ... = (MO) physreg:sub1, physreg <implicit-use, kill> 254 // ... <== Here we would allow later pass to reuse physreg:sub1 255 // which is potentially wrong. 256 // LR:sub0 = ... 257 // ... = LR.sub1 <== This is going to use physreg:sub1 258 } 259 } 260 261 /// killVirtReg - Mark virtreg as no longer available. 262 void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 263 addKillFlag(*LRI); 264 assert(PhysRegState[LRI->PhysReg] == LRI->VirtReg && 265 "Broken RegState mapping"); 266 PhysRegState[LRI->PhysReg] = regFree; 267 // Erase from LiveVirtRegs unless we're spilling in bulk. 268 if (!isBulkSpilling) 269 LiveVirtRegs.erase(LRI); 270 } 271 272 /// killVirtReg - Mark virtreg as no longer available. 273 void RAFast::killVirtReg(unsigned VirtReg) { 274 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 275 "killVirtReg needs a virtual register"); 276 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 277 if (LRI != LiveVirtRegs.end()) 278 killVirtReg(LRI); 279 } 280 281 /// spillVirtReg - This method spills the value specified by VirtReg into the 282 /// corresponding stack slot if needed. 283 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 284 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 285 "Spilling a physical register is illegal!"); 286 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 287 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 288 spillVirtReg(MI, LRI); 289 } 290 291 /// spillVirtReg - Do the actual work of spilling. 292 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 293 LiveRegMap::iterator LRI) { 294 LiveReg &LR = *LRI; 295 assert(PhysRegState[LR.PhysReg] == LRI->VirtReg && "Broken RegState mapping"); 296 297 if (LR.Dirty) { 298 // If this physreg is used by the instruction, we want to kill it on the 299 // instruction, not on the spill. 300 bool SpillKill = MachineBasicBlock::iterator(LR.LastUse) != MI; 301 LR.Dirty = false; 302 DEBUG(dbgs() << "Spilling " << PrintReg(LRI->VirtReg, TRI) 303 << " in " << PrintReg(LR.PhysReg, TRI)); 304 const TargetRegisterClass *RC = MRI->getRegClass(LRI->VirtReg); 305 int FI = getStackSpaceFor(LRI->VirtReg, RC); 306 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 307 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 308 ++NumStores; // Update statistics 309 310 // If this register is used by DBG_VALUE then insert new DBG_VALUE to 311 // identify spilled location as the place to find corresponding variable's 312 // value. 313 SmallVectorImpl<MachineInstr *> &LRIDbgValues = 314 LiveDbgValueMap[LRI->VirtReg]; 315 for (unsigned li = 0, le = LRIDbgValues.size(); li != le; ++li) { 316 MachineInstr *DBG = LRIDbgValues[li]; 317 MachineInstr *NewDV = buildDbgValueForSpill(*MBB, MI, *DBG, FI); 318 assert(NewDV->getParent() == MBB && "dangling parent pointer"); 319 (void)NewDV; 320 DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV); 321 } 322 // Now this register is spilled there is should not be any DBG_VALUE 323 // pointing to this register because they are all pointing to spilled value 324 // now. 325 LRIDbgValues.clear(); 326 if (SpillKill) 327 LR.LastUse = nullptr; // Don't kill register again 328 } 329 killVirtReg(LRI); 330 } 331 332 /// spillAll - Spill all dirty virtregs without killing them. 333 void RAFast::spillAll(MachineBasicBlock::iterator MI) { 334 if (LiveVirtRegs.empty()) return; 335 isBulkSpilling = true; 336 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 337 // of spilling here is deterministic, if arbitrary. 338 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 339 i != e; ++i) 340 spillVirtReg(MI, i); 341 LiveVirtRegs.clear(); 342 isBulkSpilling = false; 343 } 344 345 /// usePhysReg - Handle the direct use of a physical register. 346 /// Check that the register is not used by a virtreg. 347 /// Kill the physreg, marking it free. 348 /// This may add implicit kills to MO->getParent() and invalidate MO. 349 void RAFast::usePhysReg(MachineOperand &MO) { 350 unsigned PhysReg = MO.getReg(); 351 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 352 "Bad usePhysReg operand"); 353 354 // Ignore undef uses. 355 if (MO.isUndef()) 356 return; 357 358 markRegUsedInInstr(PhysReg); 359 switch (PhysRegState[PhysReg]) { 360 case regDisabled: 361 break; 362 case regReserved: 363 PhysRegState[PhysReg] = regFree; 364 LLVM_FALLTHROUGH; 365 case regFree: 366 MO.setIsKill(); 367 return; 368 default: 369 // The physreg was allocated to a virtual register. That means the value we 370 // wanted has been clobbered. 371 llvm_unreachable("Instruction uses an allocated register"); 372 } 373 374 // Maybe a superregister is reserved? 375 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 376 unsigned Alias = *AI; 377 switch (PhysRegState[Alias]) { 378 case regDisabled: 379 break; 380 case regReserved: 381 // Either PhysReg is a subregister of Alias and we mark the 382 // whole register as free, or PhysReg is the superregister of 383 // Alias and we mark all the aliases as disabled before freeing 384 // PhysReg. 385 // In the latter case, since PhysReg was disabled, this means that 386 // its value is defined only by physical sub-registers. This check 387 // is performed by the assert of the default case in this loop. 388 // Note: The value of the superregister may only be partial 389 // defined, that is why regDisabled is a valid state for aliases. 390 assert((TRI->isSuperRegister(PhysReg, Alias) || 391 TRI->isSuperRegister(Alias, PhysReg)) && 392 "Instruction is not using a subregister of a reserved register"); 393 LLVM_FALLTHROUGH; 394 case regFree: 395 if (TRI->isSuperRegister(PhysReg, Alias)) { 396 // Leave the superregister in the working set. 397 PhysRegState[Alias] = regFree; 398 MO.getParent()->addRegisterKilled(Alias, TRI, true); 399 return; 400 } 401 // Some other alias was in the working set - clear it. 402 PhysRegState[Alias] = regDisabled; 403 break; 404 default: 405 llvm_unreachable("Instruction uses an alias of an allocated register"); 406 } 407 } 408 409 // All aliases are disabled, bring register into working set. 410 PhysRegState[PhysReg] = regFree; 411 MO.setIsKill(); 412 } 413 414 /// definePhysReg - Mark PhysReg as reserved or free after spilling any 415 /// virtregs. This is very similar to defineVirtReg except the physreg is 416 /// reserved instead of allocated. 417 void RAFast::definePhysReg(MachineInstr &MI, unsigned PhysReg, 418 RegState NewState) { 419 markRegUsedInInstr(PhysReg); 420 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 421 case regDisabled: 422 break; 423 default: 424 spillVirtReg(MI, VirtReg); 425 LLVM_FALLTHROUGH; 426 case regFree: 427 case regReserved: 428 PhysRegState[PhysReg] = NewState; 429 return; 430 } 431 432 // This is a disabled register, disable all aliases. 433 PhysRegState[PhysReg] = NewState; 434 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 435 unsigned Alias = *AI; 436 switch (unsigned VirtReg = PhysRegState[Alias]) { 437 case regDisabled: 438 break; 439 default: 440 spillVirtReg(MI, VirtReg); 441 LLVM_FALLTHROUGH; 442 case regFree: 443 case regReserved: 444 PhysRegState[Alias] = regDisabled; 445 if (TRI->isSuperRegister(PhysReg, Alias)) 446 return; 447 break; 448 } 449 } 450 } 451 452 453 // calcSpillCost - Return the cost of spilling clearing out PhysReg and 454 // aliases so it is free for allocation. 455 // Returns 0 when PhysReg is free or disabled with all aliases disabled - it 456 // can be allocated directly. 457 // Returns spillImpossible when PhysReg or an alias can't be spilled. 458 unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 459 if (isRegUsedInInstr(PhysReg)) { 460 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is already used in instr.\n"); 461 return spillImpossible; 462 } 463 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 464 case regDisabled: 465 break; 466 case regFree: 467 return 0; 468 case regReserved: 469 DEBUG(dbgs() << PrintReg(VirtReg, TRI) << " corresponding " 470 << PrintReg(PhysReg, TRI) << " is reserved already.\n"); 471 return spillImpossible; 472 default: { 473 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); 474 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 475 return I->Dirty ? spillDirty : spillClean; 476 } 477 } 478 479 // This is a disabled register, add up cost of aliases. 480 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is disabled.\n"); 481 unsigned Cost = 0; 482 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 483 unsigned Alias = *AI; 484 switch (unsigned VirtReg = PhysRegState[Alias]) { 485 case regDisabled: 486 break; 487 case regFree: 488 ++Cost; 489 break; 490 case regReserved: 491 return spillImpossible; 492 default: { 493 LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg); 494 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 495 Cost += I->Dirty ? spillDirty : spillClean; 496 break; 497 } 498 } 499 } 500 return Cost; 501 } 502 503 504 /// assignVirtToPhysReg - This method updates local state so that we know 505 /// that PhysReg is the proper container for VirtReg now. The physical 506 /// register must not be used for anything else when this is called. 507 /// 508 void RAFast::assignVirtToPhysReg(LiveReg &LR, unsigned PhysReg) { 509 DEBUG(dbgs() << "Assigning " << PrintReg(LR.VirtReg, TRI) << " to " 510 << PrintReg(PhysReg, TRI) << "\n"); 511 PhysRegState[PhysReg] = LR.VirtReg; 512 assert(!LR.PhysReg && "Already assigned a physreg"); 513 LR.PhysReg = PhysReg; 514 } 515 516 RAFast::LiveRegMap::iterator 517 RAFast::assignVirtToPhysReg(unsigned VirtReg, unsigned PhysReg) { 518 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 519 assert(LRI != LiveVirtRegs.end() && "VirtReg disappeared"); 520 assignVirtToPhysReg(*LRI, PhysReg); 521 return LRI; 522 } 523 524 /// allocVirtReg - Allocate a physical register for VirtReg. 525 RAFast::LiveRegMap::iterator RAFast::allocVirtReg(MachineInstr &MI, 526 LiveRegMap::iterator LRI, 527 unsigned Hint) { 528 const unsigned VirtReg = LRI->VirtReg; 529 530 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 531 "Can only allocate virtual registers"); 532 533 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 534 535 // Ignore invalid hints. 536 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 537 !RC->contains(Hint) || !MRI->isAllocatable(Hint))) 538 Hint = 0; 539 540 // Take hint when possible. 541 if (Hint) { 542 // Ignore the hint if we would have to spill a dirty register. 543 unsigned Cost = calcSpillCost(Hint); 544 if (Cost < spillDirty) { 545 if (Cost) 546 definePhysReg(MI, Hint, regFree); 547 // definePhysReg may kill virtual registers and modify LiveVirtRegs. 548 // That invalidates LRI, so run a new lookup for VirtReg. 549 return assignVirtToPhysReg(VirtReg, Hint); 550 } 551 } 552 553 ArrayRef<MCPhysReg> AO = RegClassInfo.getOrder(RC); 554 555 // First try to find a completely free register. 556 for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){ 557 unsigned PhysReg = *I; 558 if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) { 559 assignVirtToPhysReg(*LRI, PhysReg); 560 return LRI; 561 } 562 } 563 564 DEBUG(dbgs() << "Allocating " << PrintReg(VirtReg) << " from " 565 << TRI->getRegClassName(RC) << "\n"); 566 567 unsigned BestReg = 0, BestCost = spillImpossible; 568 for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){ 569 unsigned Cost = calcSpillCost(*I); 570 DEBUG(dbgs() << "\tRegister: " << PrintReg(*I, TRI) << "\n"); 571 DEBUG(dbgs() << "\tCost: " << Cost << "\n"); 572 DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n"); 573 // Cost is 0 when all aliases are already disabled. 574 if (Cost == 0) { 575 assignVirtToPhysReg(*LRI, *I); 576 return LRI; 577 } 578 if (Cost < BestCost) 579 BestReg = *I, BestCost = Cost; 580 } 581 582 if (BestReg) { 583 definePhysReg(MI, BestReg, regFree); 584 // definePhysReg may kill virtual registers and modify LiveVirtRegs. 585 // That invalidates LRI, so run a new lookup for VirtReg. 586 return assignVirtToPhysReg(VirtReg, BestReg); 587 } 588 589 // Nothing we can do. Report an error and keep going with a bad allocation. 590 if (MI.isInlineAsm()) 591 MI.emitError("inline assembly requires more registers than available"); 592 else 593 MI.emitError("ran out of registers during register allocation"); 594 definePhysReg(MI, *AO.begin(), regFree); 595 return assignVirtToPhysReg(VirtReg, *AO.begin()); 596 } 597 598 /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 599 RAFast::LiveRegMap::iterator RAFast::defineVirtReg(MachineInstr &MI, 600 unsigned OpNum, 601 unsigned VirtReg, 602 unsigned Hint) { 603 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 604 "Not a virtual register"); 605 LiveRegMap::iterator LRI; 606 bool New; 607 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); 608 if (New) { 609 // If there is no hint, peek at the only use of this register. 610 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 611 MRI->hasOneNonDBGUse(VirtReg)) { 612 const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg); 613 // It's a copy, use the destination register as a hint. 614 if (UseMI.isCopyLike()) 615 Hint = UseMI.getOperand(0).getReg(); 616 } 617 LRI = allocVirtReg(MI, LRI, Hint); 618 } else if (LRI->LastUse) { 619 // Redefining a live register - kill at the last use, unless it is this 620 // instruction defining VirtReg multiple times. 621 if (LRI->LastUse != &MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse()) 622 addKillFlag(*LRI); 623 } 624 assert(LRI->PhysReg && "Register not assigned"); 625 LRI->LastUse = &MI; 626 LRI->LastOpNum = OpNum; 627 LRI->Dirty = true; 628 markRegUsedInInstr(LRI->PhysReg); 629 return LRI; 630 } 631 632 /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 633 RAFast::LiveRegMap::iterator RAFast::reloadVirtReg(MachineInstr &MI, 634 unsigned OpNum, 635 unsigned VirtReg, 636 unsigned Hint) { 637 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 638 "Not a virtual register"); 639 LiveRegMap::iterator LRI; 640 bool New; 641 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); 642 MachineOperand &MO = MI.getOperand(OpNum); 643 if (New) { 644 LRI = allocVirtReg(MI, LRI, Hint); 645 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 646 int FrameIndex = getStackSpaceFor(VirtReg, RC); 647 DEBUG(dbgs() << "Reloading " << PrintReg(VirtReg, TRI) << " into " 648 << PrintReg(LRI->PhysReg, TRI) << "\n"); 649 TII->loadRegFromStackSlot(*MBB, MI, LRI->PhysReg, FrameIndex, RC, TRI); 650 ++NumLoads; 651 } else if (LRI->Dirty) { 652 if (isLastUseOfLocalReg(MO)) { 653 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 654 if (MO.isUse()) 655 MO.setIsKill(); 656 else 657 MO.setIsDead(); 658 } else if (MO.isKill()) { 659 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 660 MO.setIsKill(false); 661 } else if (MO.isDead()) { 662 DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n"); 663 MO.setIsDead(false); 664 } 665 } else if (MO.isKill()) { 666 // We must remove kill flags from uses of reloaded registers because the 667 // register would be killed immediately, and there might be a second use: 668 // %foo = OR %x<kill>, %x 669 // This would cause a second reload of %x into a different register. 670 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 671 MO.setIsKill(false); 672 } else if (MO.isDead()) { 673 DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n"); 674 MO.setIsDead(false); 675 } 676 assert(LRI->PhysReg && "Register not assigned"); 677 LRI->LastUse = &MI; 678 LRI->LastOpNum = OpNum; 679 markRegUsedInInstr(LRI->PhysReg); 680 return LRI; 681 } 682 683 // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering 684 // subregs. This may invalidate any operand pointers. 685 // Return true if the operand kills its register. 686 bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { 687 MachineOperand &MO = MI->getOperand(OpNum); 688 bool Dead = MO.isDead(); 689 if (!MO.getSubReg()) { 690 MO.setReg(PhysReg); 691 return MO.isKill() || Dead; 692 } 693 694 // Handle subregister index. 695 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 696 MO.setSubReg(0); 697 698 // A kill flag implies killing the full register. Add corresponding super 699 // register kill. 700 if (MO.isKill()) { 701 MI->addRegisterKilled(PhysReg, TRI, true); 702 return true; 703 } 704 705 // A <def,read-undef> of a sub-register requires an implicit def of the full 706 // register. 707 if (MO.isDef() && MO.isUndef()) 708 MI->addRegisterDefined(PhysReg, TRI); 709 710 return Dead; 711 } 712 713 // Handle special instruction operand like early clobbers and tied ops when 714 // there are additional physreg defines. 715 void RAFast::handleThroughOperands(MachineInstr *MI, 716 SmallVectorImpl<unsigned> &VirtDead) { 717 DEBUG(dbgs() << "Scanning for through registers:"); 718 SmallSet<unsigned, 8> ThroughRegs; 719 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 720 MachineOperand &MO = MI->getOperand(i); 721 if (!MO.isReg()) continue; 722 unsigned Reg = MO.getReg(); 723 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 724 continue; 725 if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) || 726 (MO.getSubReg() && MI->readsVirtualRegister(Reg))) { 727 if (ThroughRegs.insert(Reg).second) 728 DEBUG(dbgs() << ' ' << PrintReg(Reg)); 729 } 730 } 731 732 // If any physreg defines collide with preallocated through registers, 733 // we must spill and reallocate. 734 DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); 735 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 736 MachineOperand &MO = MI->getOperand(i); 737 if (!MO.isReg() || !MO.isDef()) continue; 738 unsigned Reg = MO.getReg(); 739 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 740 markRegUsedInInstr(Reg); 741 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 742 if (ThroughRegs.count(PhysRegState[*AI])) 743 definePhysReg(*MI, *AI, regFree); 744 } 745 } 746 747 SmallVector<unsigned, 8> PartialDefs; 748 DEBUG(dbgs() << "Allocating tied uses.\n"); 749 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 750 MachineOperand &MO = MI->getOperand(i); 751 if (!MO.isReg()) continue; 752 unsigned Reg = MO.getReg(); 753 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 754 if (MO.isUse()) { 755 unsigned DefIdx = 0; 756 if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue; 757 DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand " 758 << DefIdx << ".\n"); 759 LiveRegMap::iterator LRI = reloadVirtReg(*MI, i, Reg, 0); 760 unsigned PhysReg = LRI->PhysReg; 761 setPhysReg(MI, i, PhysReg); 762 // Note: we don't update the def operand yet. That would cause the normal 763 // def-scan to attempt spilling. 764 } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) { 765 DEBUG(dbgs() << "Partial redefine: " << MO << "\n"); 766 // Reload the register, but don't assign to the operand just yet. 767 // That would confuse the later phys-def processing pass. 768 LiveRegMap::iterator LRI = reloadVirtReg(*MI, i, Reg, 0); 769 PartialDefs.push_back(LRI->PhysReg); 770 } 771 } 772 773 DEBUG(dbgs() << "Allocating early clobbers.\n"); 774 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 775 MachineOperand &MO = MI->getOperand(i); 776 if (!MO.isReg()) continue; 777 unsigned Reg = MO.getReg(); 778 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 779 if (!MO.isEarlyClobber()) 780 continue; 781 // Note: defineVirtReg may invalidate MO. 782 LiveRegMap::iterator LRI = defineVirtReg(*MI, i, Reg, 0); 783 unsigned PhysReg = LRI->PhysReg; 784 if (setPhysReg(MI, i, PhysReg)) 785 VirtDead.push_back(Reg); 786 } 787 788 // Restore UsedInInstr to a state usable for allocating normal virtual uses. 789 UsedInInstr.clear(); 790 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 791 MachineOperand &MO = MI->getOperand(i); 792 if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; 793 unsigned Reg = MO.getReg(); 794 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 795 DEBUG(dbgs() << "\tSetting " << PrintReg(Reg, TRI) 796 << " as used in instr\n"); 797 markRegUsedInInstr(Reg); 798 } 799 800 // Also mark PartialDefs as used to avoid reallocation. 801 for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i) 802 markRegUsedInInstr(PartialDefs[i]); 803 } 804 805 void RAFast::AllocateBasicBlock() { 806 DEBUG(dbgs() << "\nAllocating " << *MBB); 807 808 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 809 assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?"); 810 811 MachineBasicBlock::iterator MII = MBB->begin(); 812 813 // Add live-in registers as live. 814 for (const auto &LI : MBB->liveins()) 815 if (MRI->isAllocatable(LI.PhysReg)) 816 definePhysReg(*MII, LI.PhysReg, regReserved); 817 818 SmallVector<unsigned, 8> VirtDead; 819 SmallVector<MachineInstr*, 32> Coalesced; 820 821 // Otherwise, sequentially allocate each instruction in the MBB. 822 while (MII != MBB->end()) { 823 MachineInstr *MI = &*MII++; 824 const MCInstrDesc &MCID = MI->getDesc(); 825 DEBUG({ 826 dbgs() << "\n>> " << *MI << "Regs:"; 827 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 828 if (PhysRegState[Reg] == regDisabled) continue; 829 dbgs() << " " << TRI->getName(Reg); 830 switch(PhysRegState[Reg]) { 831 case regFree: 832 break; 833 case regReserved: 834 dbgs() << "*"; 835 break; 836 default: { 837 dbgs() << '=' << PrintReg(PhysRegState[Reg]); 838 LiveRegMap::iterator I = findLiveVirtReg(PhysRegState[Reg]); 839 assert(I != LiveVirtRegs.end() && "Missing VirtReg entry"); 840 if (I->Dirty) 841 dbgs() << "*"; 842 assert(I->PhysReg == Reg && "Bad inverse map"); 843 break; 844 } 845 } 846 } 847 dbgs() << '\n'; 848 // Check that LiveVirtRegs is the inverse. 849 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 850 e = LiveVirtRegs.end(); i != e; ++i) { 851 assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) && 852 "Bad map key"); 853 assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) && 854 "Bad map value"); 855 assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map"); 856 } 857 }); 858 859 // Debug values are not allowed to change codegen in any way. 860 if (MI->isDebugValue()) { 861 bool ScanDbgValue = true; 862 while (ScanDbgValue) { 863 ScanDbgValue = false; 864 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 865 MachineOperand &MO = MI->getOperand(i); 866 if (!MO.isReg()) continue; 867 unsigned Reg = MO.getReg(); 868 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 869 LiveRegMap::iterator LRI = findLiveVirtReg(Reg); 870 if (LRI != LiveVirtRegs.end()) 871 setPhysReg(MI, i, LRI->PhysReg); 872 else { 873 int SS = StackSlotForVirtReg[Reg]; 874 if (SS == -1) { 875 // We can't allocate a physreg for a DebugValue, sorry! 876 DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE"); 877 MO.setReg(0); 878 } 879 else { 880 // Modify DBG_VALUE now that the value is in a spill slot. 881 bool IsIndirect = MI->isIndirectDebugValue(); 882 uint64_t Offset = IsIndirect ? MI->getOperand(1).getImm() : 0; 883 const MDNode *Var = MI->getDebugVariable(); 884 const MDNode *Expr = MI->getDebugExpression(); 885 DebugLoc DL = MI->getDebugLoc(); 886 MachineBasicBlock *MBB = MI->getParent(); 887 assert( 888 cast<DILocalVariable>(Var)->isValidLocationForIntrinsic(DL) && 889 "Expected inlined-at fields to agree"); 890 MachineInstr *NewDV = BuildMI(*MBB, MBB->erase(MI), DL, 891 TII->get(TargetOpcode::DBG_VALUE)) 892 .addFrameIndex(SS) 893 .addImm(Offset) 894 .addMetadata(Var) 895 .addMetadata(Expr); 896 DEBUG(dbgs() << "Modifying debug info due to spill:" 897 << "\t" << *NewDV); 898 // Scan NewDV operands from the beginning. 899 MI = NewDV; 900 ScanDbgValue = true; 901 break; 902 } 903 } 904 LiveDbgValueMap[Reg].push_back(MI); 905 } 906 } 907 // Next instruction. 908 continue; 909 } 910 911 // If this is a copy, we may be able to coalesce. 912 unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0; 913 if (MI->isCopy()) { 914 CopyDst = MI->getOperand(0).getReg(); 915 CopySrc = MI->getOperand(1).getReg(); 916 CopyDstSub = MI->getOperand(0).getSubReg(); 917 CopySrcSub = MI->getOperand(1).getSubReg(); 918 } 919 920 // Track registers used by instruction. 921 UsedInInstr.clear(); 922 923 // First scan. 924 // Mark physreg uses and early clobbers as used. 925 // Find the end of the virtreg operands 926 unsigned VirtOpEnd = 0; 927 bool hasTiedOps = false; 928 bool hasEarlyClobbers = false; 929 bool hasPartialRedefs = false; 930 bool hasPhysDefs = false; 931 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 932 MachineOperand &MO = MI->getOperand(i); 933 // Make sure MRI knows about registers clobbered by regmasks. 934 if (MO.isRegMask()) { 935 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask()); 936 continue; 937 } 938 if (!MO.isReg()) continue; 939 unsigned Reg = MO.getReg(); 940 if (!Reg) continue; 941 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 942 VirtOpEnd = i+1; 943 if (MO.isUse()) { 944 hasTiedOps = hasTiedOps || 945 MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1; 946 } else { 947 if (MO.isEarlyClobber()) 948 hasEarlyClobbers = true; 949 if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) 950 hasPartialRedefs = true; 951 } 952 continue; 953 } 954 if (!MRI->isAllocatable(Reg)) continue; 955 if (MO.isUse()) { 956 usePhysReg(MO); 957 } else if (MO.isEarlyClobber()) { 958 definePhysReg(*MI, Reg, 959 (MO.isImplicit() || MO.isDead()) ? regFree : regReserved); 960 hasEarlyClobbers = true; 961 } else 962 hasPhysDefs = true; 963 } 964 965 // The instruction may have virtual register operands that must be allocated 966 // the same register at use-time and def-time: early clobbers and tied 967 // operands. If there are also physical defs, these registers must avoid 968 // both physical defs and uses, making them more constrained than normal 969 // operands. 970 // Similarly, if there are multiple defs and tied operands, we must make 971 // sure the same register is allocated to uses and defs. 972 // We didn't detect inline asm tied operands above, so just make this extra 973 // pass for all inline asm. 974 if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || 975 (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) { 976 handleThroughOperands(MI, VirtDead); 977 // Don't attempt coalescing when we have funny stuff going on. 978 CopyDst = 0; 979 // Pretend we have early clobbers so the use operands get marked below. 980 // This is not necessary for the common case of a single tied use. 981 hasEarlyClobbers = true; 982 } 983 984 // Second scan. 985 // Allocate virtreg uses. 986 for (unsigned i = 0; i != VirtOpEnd; ++i) { 987 MachineOperand &MO = MI->getOperand(i); 988 if (!MO.isReg()) continue; 989 unsigned Reg = MO.getReg(); 990 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 991 if (MO.isUse()) { 992 LiveRegMap::iterator LRI = reloadVirtReg(*MI, i, Reg, CopyDst); 993 unsigned PhysReg = LRI->PhysReg; 994 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 995 if (setPhysReg(MI, i, PhysReg)) 996 killVirtReg(LRI); 997 } 998 } 999 1000 // Track registers defined by instruction - early clobbers and tied uses at 1001 // this point. 1002 UsedInInstr.clear(); 1003 if (hasEarlyClobbers) { 1004 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 1005 MachineOperand &MO = MI->getOperand(i); 1006 if (!MO.isReg()) continue; 1007 unsigned Reg = MO.getReg(); 1008 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 1009 // Look for physreg defs and tied uses. 1010 if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue; 1011 markRegUsedInInstr(Reg); 1012 } 1013 } 1014 1015 unsigned DefOpEnd = MI->getNumOperands(); 1016 if (MI->isCall()) { 1017 // Spill all virtregs before a call. This serves one purpose: If an 1018 // exception is thrown, the landing pad is going to expect to find 1019 // registers in their spill slots. 1020 // Note: although this is appealing to just consider all definitions 1021 // as call-clobbered, this is not correct because some of those 1022 // definitions may be used later on and we do not want to reuse 1023 // those for virtual registers in between. 1024 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 1025 spillAll(MI); 1026 1027 // The imp-defs are skipped below, but we still need to mark those 1028 // registers as used by the function. 1029 SkippedInstrs.insert(&MCID); 1030 } 1031 1032 // Third scan. 1033 // Allocate defs and collect dead defs. 1034 for (unsigned i = 0; i != DefOpEnd; ++i) { 1035 MachineOperand &MO = MI->getOperand(i); 1036 if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) 1037 continue; 1038 unsigned Reg = MO.getReg(); 1039 1040 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 1041 if (!MRI->isAllocatable(Reg)) continue; 1042 definePhysReg(*MI, Reg, MO.isDead() ? regFree : regReserved); 1043 continue; 1044 } 1045 LiveRegMap::iterator LRI = defineVirtReg(*MI, i, Reg, CopySrc); 1046 unsigned PhysReg = LRI->PhysReg; 1047 if (setPhysReg(MI, i, PhysReg)) { 1048 VirtDead.push_back(Reg); 1049 CopyDst = 0; // cancel coalescing; 1050 } else 1051 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 1052 } 1053 1054 // Kill dead defs after the scan to ensure that multiple defs of the same 1055 // register are allocated identically. We didn't need to do this for uses 1056 // because we are crerating our own kill flags, and they are always at the 1057 // last use. 1058 for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) 1059 killVirtReg(VirtDead[i]); 1060 VirtDead.clear(); 1061 1062 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 1063 DEBUG(dbgs() << "-- coalescing: " << *MI); 1064 Coalesced.push_back(MI); 1065 } else { 1066 DEBUG(dbgs() << "<< " << *MI); 1067 } 1068 } 1069 1070 // Spill all physical registers holding virtual registers now. 1071 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 1072 spillAll(MBB->getFirstTerminator()); 1073 1074 // Erase all the coalesced copies. We are delaying it until now because 1075 // LiveVirtRegs might refer to the instrs. 1076 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 1077 MBB->erase(Coalesced[i]); 1078 NumCopies += Coalesced.size(); 1079 1080 DEBUG(MBB->dump()); 1081 } 1082 1083 /// runOnMachineFunction - Register allocate the whole function 1084 /// 1085 bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 1086 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 1087 << "********** Function: " << Fn.getName() << '\n'); 1088 MF = &Fn; 1089 MRI = &MF->getRegInfo(); 1090 TRI = MF->getSubtarget().getRegisterInfo(); 1091 TII = MF->getSubtarget().getInstrInfo(); 1092 MRI->freezeReservedRegs(Fn); 1093 RegClassInfo.runOnMachineFunction(Fn); 1094 UsedInInstr.clear(); 1095 UsedInInstr.setUniverse(TRI->getNumRegUnits()); 1096 1097 // initialize the virtual->physical register map to have a 'null' 1098 // mapping for all virtual registers 1099 StackSlotForVirtReg.resize(MRI->getNumVirtRegs()); 1100 LiveVirtRegs.setUniverse(MRI->getNumVirtRegs()); 1101 1102 // Loop over all of the basic blocks, eliminating virtual register references 1103 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 1104 MBBi != MBBe; ++MBBi) { 1105 MBB = &*MBBi; 1106 AllocateBasicBlock(); 1107 } 1108 1109 // All machine operands and other references to virtual registers have been 1110 // replaced. Remove the virtual registers. 1111 MRI->clearVirtRegs(); 1112 1113 SkippedInstrs.clear(); 1114 StackSlotForVirtReg.clear(); 1115 LiveDbgValueMap.clear(); 1116 return true; 1117 } 1118 1119 FunctionPass *llvm::createFastRegisterAllocator() { 1120 return new RAFast(); 1121 } 1122