1 //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This register allocator allocates registers to a basic block at a time, 11 // attempting to keep values in registers and reusing registers as appropriate. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #define DEBUG_TYPE "regalloc" 16 #include "RegisterClassInfo.h" 17 #include "llvm/BasicBlock.h" 18 #include "llvm/CodeGen/MachineFunctionPass.h" 19 #include "llvm/CodeGen/MachineInstr.h" 20 #include "llvm/CodeGen/MachineInstrBuilder.h" 21 #include "llvm/CodeGen/MachineFrameInfo.h" 22 #include "llvm/CodeGen/MachineRegisterInfo.h" 23 #include "llvm/CodeGen/Passes.h" 24 #include "llvm/CodeGen/RegAllocRegistry.h" 25 #include "llvm/Target/TargetInstrInfo.h" 26 #include "llvm/Target/TargetMachine.h" 27 #include "llvm/Support/CommandLine.h" 28 #include "llvm/Support/Debug.h" 29 #include "llvm/Support/ErrorHandling.h" 30 #include "llvm/Support/raw_ostream.h" 31 #include "llvm/ADT/DenseMap.h" 32 #include "llvm/ADT/IndexedMap.h" 33 #include "llvm/ADT/SmallSet.h" 34 #include "llvm/ADT/SmallVector.h" 35 #include "llvm/ADT/Statistic.h" 36 #include "llvm/ADT/STLExtras.h" 37 #include <algorithm> 38 using namespace llvm; 39 40 STATISTIC(NumStores, "Number of stores added"); 41 STATISTIC(NumLoads , "Number of loads added"); 42 STATISTIC(NumCopies, "Number of copies coalesced"); 43 44 static RegisterRegAlloc 45 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 46 47 namespace { 48 class RAFast : public MachineFunctionPass { 49 public: 50 static char ID; 51 RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1), 52 isBulkSpilling(false) { 53 initializePHIEliminationPass(*PassRegistry::getPassRegistry()); 54 initializeTwoAddressInstructionPassPass(*PassRegistry::getPassRegistry()); 55 } 56 private: 57 const TargetMachine *TM; 58 MachineFunction *MF; 59 MachineRegisterInfo *MRI; 60 const TargetRegisterInfo *TRI; 61 const TargetInstrInfo *TII; 62 RegisterClassInfo RegClassInfo; 63 64 // Basic block currently being allocated. 65 MachineBasicBlock *MBB; 66 67 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 68 // values are spilled. 69 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 70 71 // Everything we know about a live virtual register. 72 struct LiveReg { 73 MachineInstr *LastUse; // Last instr to use reg. 74 unsigned PhysReg; // Currently held here. 75 unsigned short LastOpNum; // OpNum on LastUse. 76 bool Dirty; // Register needs spill. 77 78 LiveReg(unsigned p=0) : LastUse(0), PhysReg(p), LastOpNum(0), 79 Dirty(false) {} 80 }; 81 82 typedef DenseMap<unsigned, LiveReg> LiveRegMap; 83 typedef LiveRegMap::value_type LiveRegEntry; 84 85 // LiveVirtRegs - This map contains entries for each virtual register 86 // that is currently available in a physical register. 87 LiveRegMap LiveVirtRegs; 88 89 DenseMap<unsigned, SmallVector<MachineInstr *, 4> > LiveDbgValueMap; 90 91 // RegState - Track the state of a physical register. 92 enum RegState { 93 // A disabled register is not available for allocation, but an alias may 94 // be in use. A register can only be moved out of the disabled state if 95 // all aliases are disabled. 96 regDisabled, 97 98 // A free register is not currently in use and can be allocated 99 // immediately without checking aliases. 100 regFree, 101 102 // A reserved register has been assigned explicitly (e.g., setting up a 103 // call parameter), and it remains reserved until it is used. 104 regReserved 105 106 // A register state may also be a virtual register number, indication that 107 // the physical register is currently allocated to a virtual register. In 108 // that case, LiveVirtRegs contains the inverse mapping. 109 }; 110 111 // PhysRegState - One of the RegState enums, or a virtreg. 112 std::vector<unsigned> PhysRegState; 113 114 // UsedInInstr - BitVector of physregs that are used in the current 115 // instruction, and so cannot be allocated. 116 BitVector UsedInInstr; 117 118 // SkippedInstrs - Descriptors of instructions whose clobber list was 119 // ignored because all registers were spilled. It is still necessary to 120 // mark all the clobbered registers as used by the function. 121 SmallPtrSet<const MCInstrDesc*, 4> SkippedInstrs; 122 123 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 124 // completely after spilling all live registers. LiveRegMap entries should 125 // not be erased. 126 bool isBulkSpilling; 127 128 enum { 129 spillClean = 1, 130 spillDirty = 100, 131 spillImpossible = ~0u 132 }; 133 public: 134 virtual const char *getPassName() const { 135 return "Fast Register Allocator"; 136 } 137 138 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 139 AU.setPreservesCFG(); 140 AU.addRequiredID(PHIEliminationID); 141 AU.addRequiredID(TwoAddressInstructionPassID); 142 MachineFunctionPass::getAnalysisUsage(AU); 143 } 144 145 private: 146 bool runOnMachineFunction(MachineFunction &Fn); 147 void AllocateBasicBlock(); 148 void handleThroughOperands(MachineInstr *MI, 149 SmallVectorImpl<unsigned> &VirtDead); 150 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 151 bool isLastUseOfLocalReg(MachineOperand&); 152 153 void addKillFlag(const LiveReg&); 154 void killVirtReg(LiveRegMap::iterator); 155 void killVirtReg(unsigned VirtReg); 156 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 157 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 158 159 void usePhysReg(MachineOperand&); 160 void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); 161 unsigned calcSpillCost(unsigned PhysReg) const; 162 void assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg); 163 void allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint); 164 LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, 165 unsigned VirtReg, unsigned Hint); 166 LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, 167 unsigned VirtReg, unsigned Hint); 168 void spillAll(MachineInstr *MI); 169 bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); 170 void addRetOperands(MachineBasicBlock *MBB); 171 }; 172 char RAFast::ID = 0; 173 } 174 175 /// getStackSpaceFor - This allocates space for the specified virtual register 176 /// to be held on the stack. 177 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 178 // Find the location Reg would belong... 179 int SS = StackSlotForVirtReg[VirtReg]; 180 if (SS != -1) 181 return SS; // Already has space allocated? 182 183 // Allocate a new stack object for this spill location... 184 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 185 RC->getAlignment()); 186 187 // Assign the slot. 188 StackSlotForVirtReg[VirtReg] = FrameIdx; 189 return FrameIdx; 190 } 191 192 /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 193 /// its virtual register, and it is guaranteed to be a block-local register. 194 /// 195 bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 196 // Check for non-debug uses or defs following MO. 197 // This is the most likely way to fail - fast path it. 198 MachineOperand *Next = &MO; 199 while ((Next = Next->getNextOperandForReg())) 200 if (!Next->isDebug()) 201 return false; 202 203 // If the register has ever been spilled or reloaded, we conservatively assume 204 // it is a global register used in multiple blocks. 205 if (StackSlotForVirtReg[MO.getReg()] != -1) 206 return false; 207 208 // Check that the use/def chain has exactly one operand - MO. 209 return &MRI->reg_nodbg_begin(MO.getReg()).getOperand() == &MO; 210 } 211 212 /// addKillFlag - Set kill flags on last use of a virtual register. 213 void RAFast::addKillFlag(const LiveReg &LR) { 214 if (!LR.LastUse) return; 215 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 216 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 217 if (MO.getReg() == LR.PhysReg) 218 MO.setIsKill(); 219 else 220 LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true); 221 } 222 } 223 224 /// killVirtReg - Mark virtreg as no longer available. 225 void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 226 addKillFlag(LRI->second); 227 const LiveReg &LR = LRI->second; 228 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 229 PhysRegState[LR.PhysReg] = regFree; 230 // Erase from LiveVirtRegs unless we're spilling in bulk. 231 if (!isBulkSpilling) 232 LiveVirtRegs.erase(LRI); 233 } 234 235 /// killVirtReg - Mark virtreg as no longer available. 236 void RAFast::killVirtReg(unsigned VirtReg) { 237 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 238 "killVirtReg needs a virtual register"); 239 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 240 if (LRI != LiveVirtRegs.end()) 241 killVirtReg(LRI); 242 } 243 244 /// spillVirtReg - This method spills the value specified by VirtReg into the 245 /// corresponding stack slot if needed. 246 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 247 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 248 "Spilling a physical register is illegal!"); 249 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 250 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 251 spillVirtReg(MI, LRI); 252 } 253 254 /// spillVirtReg - Do the actual work of spilling. 255 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 256 LiveRegMap::iterator LRI) { 257 LiveReg &LR = LRI->second; 258 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 259 260 if (LR.Dirty) { 261 // If this physreg is used by the instruction, we want to kill it on the 262 // instruction, not on the spill. 263 bool SpillKill = LR.LastUse != MI; 264 LR.Dirty = false; 265 DEBUG(dbgs() << "Spilling " << PrintReg(LRI->first, TRI) 266 << " in " << PrintReg(LR.PhysReg, TRI)); 267 const TargetRegisterClass *RC = MRI->getRegClass(LRI->first); 268 int FI = getStackSpaceFor(LRI->first, RC); 269 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 270 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 271 ++NumStores; // Update statistics 272 273 // If this register is used by DBG_VALUE then insert new DBG_VALUE to 274 // identify spilled location as the place to find corresponding variable's 275 // value. 276 SmallVector<MachineInstr *, 4> &LRIDbgValues = LiveDbgValueMap[LRI->first]; 277 for (unsigned li = 0, le = LRIDbgValues.size(); li != le; ++li) { 278 MachineInstr *DBG = LRIDbgValues[li]; 279 const MDNode *MDPtr = 280 DBG->getOperand(DBG->getNumOperands()-1).getMetadata(); 281 int64_t Offset = 0; 282 if (DBG->getOperand(1).isImm()) 283 Offset = DBG->getOperand(1).getImm(); 284 DebugLoc DL; 285 if (MI == MBB->end()) { 286 // If MI is at basic block end then use last instruction's location. 287 MachineBasicBlock::iterator EI = MI; 288 DL = (--EI)->getDebugLoc(); 289 } 290 else 291 DL = MI->getDebugLoc(); 292 if (MachineInstr *NewDV = 293 TII->emitFrameIndexDebugValue(*MF, FI, Offset, MDPtr, DL)) { 294 MachineBasicBlock *MBB = DBG->getParent(); 295 MBB->insert(MI, NewDV); 296 DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV); 297 } 298 } 299 // Now this register is spilled there is should not be any DBG_VALUE pointing 300 // to this register because they are all pointing to spilled value now. 301 LRIDbgValues.clear(); 302 if (SpillKill) 303 LR.LastUse = 0; // Don't kill register again 304 } 305 killVirtReg(LRI); 306 } 307 308 /// spillAll - Spill all dirty virtregs without killing them. 309 void RAFast::spillAll(MachineInstr *MI) { 310 if (LiveVirtRegs.empty()) return; 311 isBulkSpilling = true; 312 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 313 // of spilling here is deterministic, if arbitrary. 314 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 315 i != e; ++i) 316 spillVirtReg(MI, i); 317 LiveVirtRegs.clear(); 318 isBulkSpilling = false; 319 } 320 321 /// usePhysReg - Handle the direct use of a physical register. 322 /// Check that the register is not used by a virtreg. 323 /// Kill the physreg, marking it free. 324 /// This may add implicit kills to MO->getParent() and invalidate MO. 325 void RAFast::usePhysReg(MachineOperand &MO) { 326 unsigned PhysReg = MO.getReg(); 327 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 328 "Bad usePhysReg operand"); 329 330 switch (PhysRegState[PhysReg]) { 331 case regDisabled: 332 break; 333 case regReserved: 334 PhysRegState[PhysReg] = regFree; 335 // Fall through 336 case regFree: 337 UsedInInstr.set(PhysReg); 338 MO.setIsKill(); 339 return; 340 default: 341 // The physreg was allocated to a virtual register. That means the value we 342 // wanted has been clobbered. 343 llvm_unreachable("Instruction uses an allocated register"); 344 } 345 346 // Maybe a superregister is reserved? 347 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 348 unsigned Alias = *AS; ++AS) { 349 switch (PhysRegState[Alias]) { 350 case regDisabled: 351 break; 352 case regReserved: 353 assert(TRI->isSuperRegister(PhysReg, Alias) && 354 "Instruction is not using a subregister of a reserved register"); 355 // Leave the superregister in the working set. 356 PhysRegState[Alias] = regFree; 357 UsedInInstr.set(Alias); 358 MO.getParent()->addRegisterKilled(Alias, TRI, true); 359 return; 360 case regFree: 361 if (TRI->isSuperRegister(PhysReg, Alias)) { 362 // Leave the superregister in the working set. 363 UsedInInstr.set(Alias); 364 MO.getParent()->addRegisterKilled(Alias, TRI, true); 365 return; 366 } 367 // Some other alias was in the working set - clear it. 368 PhysRegState[Alias] = regDisabled; 369 break; 370 default: 371 llvm_unreachable("Instruction uses an alias of an allocated register"); 372 } 373 } 374 375 // All aliases are disabled, bring register into working set. 376 PhysRegState[PhysReg] = regFree; 377 UsedInInstr.set(PhysReg); 378 MO.setIsKill(); 379 } 380 381 /// definePhysReg - Mark PhysReg as reserved or free after spilling any 382 /// virtregs. This is very similar to defineVirtReg except the physreg is 383 /// reserved instead of allocated. 384 void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, 385 RegState NewState) { 386 UsedInInstr.set(PhysReg); 387 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 388 case regDisabled: 389 break; 390 default: 391 spillVirtReg(MI, VirtReg); 392 // Fall through. 393 case regFree: 394 case regReserved: 395 PhysRegState[PhysReg] = NewState; 396 return; 397 } 398 399 // This is a disabled register, disable all aliases. 400 PhysRegState[PhysReg] = NewState; 401 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 402 unsigned Alias = *AS; ++AS) { 403 switch (unsigned VirtReg = PhysRegState[Alias]) { 404 case regDisabled: 405 break; 406 default: 407 spillVirtReg(MI, VirtReg); 408 // Fall through. 409 case regFree: 410 case regReserved: 411 PhysRegState[Alias] = regDisabled; 412 if (TRI->isSuperRegister(PhysReg, Alias)) 413 return; 414 break; 415 } 416 } 417 } 418 419 420 // calcSpillCost - Return the cost of spilling clearing out PhysReg and 421 // aliases so it is free for allocation. 422 // Returns 0 when PhysReg is free or disabled with all aliases disabled - it 423 // can be allocated directly. 424 // Returns spillImpossible when PhysReg or an alias can't be spilled. 425 unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 426 if (UsedInInstr.test(PhysReg)) { 427 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is already used in instr.\n"); 428 return spillImpossible; 429 } 430 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 431 case regDisabled: 432 break; 433 case regFree: 434 return 0; 435 case regReserved: 436 DEBUG(dbgs() << PrintReg(VirtReg, TRI) << " corresponding " 437 << PrintReg(PhysReg, TRI) << " is reserved already.\n"); 438 return spillImpossible; 439 default: 440 return LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 441 } 442 443 // This is a disabled register, add up cost of aliases. 444 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is disabled.\n"); 445 unsigned Cost = 0; 446 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 447 unsigned Alias = *AS; ++AS) { 448 if (UsedInInstr.test(Alias)) 449 return spillImpossible; 450 switch (unsigned VirtReg = PhysRegState[Alias]) { 451 case regDisabled: 452 break; 453 case regFree: 454 ++Cost; 455 break; 456 case regReserved: 457 return spillImpossible; 458 default: 459 Cost += LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 460 break; 461 } 462 } 463 return Cost; 464 } 465 466 467 /// assignVirtToPhysReg - This method updates local state so that we know 468 /// that PhysReg is the proper container for VirtReg now. The physical 469 /// register must not be used for anything else when this is called. 470 /// 471 void RAFast::assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg) { 472 DEBUG(dbgs() << "Assigning " << PrintReg(LRE.first, TRI) << " to " 473 << PrintReg(PhysReg, TRI) << "\n"); 474 PhysRegState[PhysReg] = LRE.first; 475 assert(!LRE.second.PhysReg && "Already assigned a physreg"); 476 LRE.second.PhysReg = PhysReg; 477 } 478 479 /// allocVirtReg - Allocate a physical register for VirtReg. 480 void RAFast::allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint) { 481 const unsigned VirtReg = LRE.first; 482 483 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 484 "Can only allocate virtual registers"); 485 486 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 487 488 // Ignore invalid hints. 489 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 490 !RC->contains(Hint) || !RegClassInfo.isAllocatable(Hint))) 491 Hint = 0; 492 493 // Take hint when possible. 494 if (Hint) { 495 // Ignore the hint if we would have to spill a dirty register. 496 unsigned Cost = calcSpillCost(Hint); 497 if (Cost < spillDirty) { 498 if (Cost) 499 definePhysReg(MI, Hint, regFree); 500 return assignVirtToPhysReg(LRE, Hint); 501 } 502 } 503 504 ArrayRef<unsigned> AO = RegClassInfo.getOrder(RC); 505 506 // First try to find a completely free register. 507 for (ArrayRef<unsigned>::iterator I = AO.begin(), E = AO.end(); I != E; ++I) { 508 unsigned PhysReg = *I; 509 if (PhysRegState[PhysReg] == regFree && !UsedInInstr.test(PhysReg)) 510 return assignVirtToPhysReg(LRE, PhysReg); 511 } 512 513 DEBUG(dbgs() << "Allocating " << PrintReg(VirtReg) << " from " 514 << RC->getName() << "\n"); 515 516 unsigned BestReg = 0, BestCost = spillImpossible; 517 for (ArrayRef<unsigned>::iterator I = AO.begin(), E = AO.end(); I != E; ++I) { 518 unsigned Cost = calcSpillCost(*I); 519 DEBUG(dbgs() << "\tRegister: " << PrintReg(*I, TRI) << "\n"); 520 DEBUG(dbgs() << "\tCost: " << Cost << "\n"); 521 DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n"); 522 // Cost is 0 when all aliases are already disabled. 523 if (Cost == 0) 524 return assignVirtToPhysReg(LRE, *I); 525 if (Cost < BestCost) 526 BestReg = *I, BestCost = Cost; 527 } 528 529 if (BestReg) { 530 definePhysReg(MI, BestReg, regFree); 531 return assignVirtToPhysReg(LRE, BestReg); 532 } 533 534 // Nothing we can do. Report an error and keep going with a bad allocation. 535 MI->emitError("ran out of registers during register allocation"); 536 definePhysReg(MI, *AO.begin(), regFree); 537 assignVirtToPhysReg(LRE, *AO.begin()); 538 } 539 540 /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 541 RAFast::LiveRegMap::iterator 542 RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, 543 unsigned VirtReg, unsigned Hint) { 544 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 545 "Not a virtual register"); 546 LiveRegMap::iterator LRI; 547 bool New; 548 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 549 LiveReg &LR = LRI->second; 550 if (New) { 551 // If there is no hint, peek at the only use of this register. 552 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 553 MRI->hasOneNonDBGUse(VirtReg)) { 554 const MachineInstr &UseMI = *MRI->use_nodbg_begin(VirtReg); 555 // It's a copy, use the destination register as a hint. 556 if (UseMI.isCopyLike()) 557 Hint = UseMI.getOperand(0).getReg(); 558 } 559 allocVirtReg(MI, *LRI, Hint); 560 } else if (LR.LastUse) { 561 // Redefining a live register - kill at the last use, unless it is this 562 // instruction defining VirtReg multiple times. 563 if (LR.LastUse != MI || LR.LastUse->getOperand(LR.LastOpNum).isUse()) 564 addKillFlag(LR); 565 } 566 assert(LR.PhysReg && "Register not assigned"); 567 LR.LastUse = MI; 568 LR.LastOpNum = OpNum; 569 LR.Dirty = true; 570 UsedInInstr.set(LR.PhysReg); 571 return LRI; 572 } 573 574 /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 575 RAFast::LiveRegMap::iterator 576 RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, 577 unsigned VirtReg, unsigned Hint) { 578 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 579 "Not a virtual register"); 580 LiveRegMap::iterator LRI; 581 bool New; 582 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 583 LiveReg &LR = LRI->second; 584 MachineOperand &MO = MI->getOperand(OpNum); 585 if (New) { 586 allocVirtReg(MI, *LRI, Hint); 587 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 588 int FrameIndex = getStackSpaceFor(VirtReg, RC); 589 DEBUG(dbgs() << "Reloading " << PrintReg(VirtReg, TRI) << " into " 590 << PrintReg(LR.PhysReg, TRI) << "\n"); 591 TII->loadRegFromStackSlot(*MBB, MI, LR.PhysReg, FrameIndex, RC, TRI); 592 ++NumLoads; 593 } else if (LR.Dirty) { 594 if (isLastUseOfLocalReg(MO)) { 595 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 596 if (MO.isUse()) 597 MO.setIsKill(); 598 else 599 MO.setIsDead(); 600 } else if (MO.isKill()) { 601 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 602 MO.setIsKill(false); 603 } else if (MO.isDead()) { 604 DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n"); 605 MO.setIsDead(false); 606 } 607 } else if (MO.isKill()) { 608 // We must remove kill flags from uses of reloaded registers because the 609 // register would be killed immediately, and there might be a second use: 610 // %foo = OR %x<kill>, %x 611 // This would cause a second reload of %x into a different register. 612 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 613 MO.setIsKill(false); 614 } else if (MO.isDead()) { 615 DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n"); 616 MO.setIsDead(false); 617 } 618 assert(LR.PhysReg && "Register not assigned"); 619 LR.LastUse = MI; 620 LR.LastOpNum = OpNum; 621 UsedInInstr.set(LR.PhysReg); 622 return LRI; 623 } 624 625 // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering 626 // subregs. This may invalidate any operand pointers. 627 // Return true if the operand kills its register. 628 bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { 629 MachineOperand &MO = MI->getOperand(OpNum); 630 if (!MO.getSubReg()) { 631 MO.setReg(PhysReg); 632 return MO.isKill() || MO.isDead(); 633 } 634 635 // Handle subregister index. 636 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 637 MO.setSubReg(0); 638 639 // A kill flag implies killing the full register. Add corresponding super 640 // register kill. 641 if (MO.isKill()) { 642 MI->addRegisterKilled(PhysReg, TRI, true); 643 return true; 644 } 645 return MO.isDead(); 646 } 647 648 // Handle special instruction operand like early clobbers and tied ops when 649 // there are additional physreg defines. 650 void RAFast::handleThroughOperands(MachineInstr *MI, 651 SmallVectorImpl<unsigned> &VirtDead) { 652 DEBUG(dbgs() << "Scanning for through registers:"); 653 SmallSet<unsigned, 8> ThroughRegs; 654 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 655 MachineOperand &MO = MI->getOperand(i); 656 if (!MO.isReg()) continue; 657 unsigned Reg = MO.getReg(); 658 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 659 continue; 660 if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) || 661 (MO.getSubReg() && MI->readsVirtualRegister(Reg))) { 662 if (ThroughRegs.insert(Reg)) 663 DEBUG(dbgs() << ' ' << PrintReg(Reg)); 664 } 665 } 666 667 // If any physreg defines collide with preallocated through registers, 668 // we must spill and reallocate. 669 DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); 670 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 671 MachineOperand &MO = MI->getOperand(i); 672 if (!MO.isReg() || !MO.isDef()) continue; 673 unsigned Reg = MO.getReg(); 674 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 675 UsedInInstr.set(Reg); 676 if (ThroughRegs.count(PhysRegState[Reg])) 677 definePhysReg(MI, Reg, regFree); 678 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) { 679 UsedInInstr.set(*AS); 680 if (ThroughRegs.count(PhysRegState[*AS])) 681 definePhysReg(MI, *AS, regFree); 682 } 683 } 684 685 SmallVector<unsigned, 8> PartialDefs; 686 DEBUG(dbgs() << "Allocating tied uses.\n"); 687 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 688 MachineOperand &MO = MI->getOperand(i); 689 if (!MO.isReg()) continue; 690 unsigned Reg = MO.getReg(); 691 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 692 if (MO.isUse()) { 693 unsigned DefIdx = 0; 694 if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue; 695 DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand " 696 << DefIdx << ".\n"); 697 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 698 unsigned PhysReg = LRI->second.PhysReg; 699 setPhysReg(MI, i, PhysReg); 700 // Note: we don't update the def operand yet. That would cause the normal 701 // def-scan to attempt spilling. 702 } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) { 703 DEBUG(dbgs() << "Partial redefine: " << MO << "\n"); 704 // Reload the register, but don't assign to the operand just yet. 705 // That would confuse the later phys-def processing pass. 706 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 707 PartialDefs.push_back(LRI->second.PhysReg); 708 } 709 } 710 711 DEBUG(dbgs() << "Allocating early clobbers.\n"); 712 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 713 MachineOperand &MO = MI->getOperand(i); 714 if (!MO.isReg()) continue; 715 unsigned Reg = MO.getReg(); 716 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 717 if (!MO.isEarlyClobber()) 718 continue; 719 // Note: defineVirtReg may invalidate MO. 720 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); 721 unsigned PhysReg = LRI->second.PhysReg; 722 if (setPhysReg(MI, i, PhysReg)) 723 VirtDead.push_back(Reg); 724 } 725 726 // Restore UsedInInstr to a state usable for allocating normal virtual uses. 727 UsedInInstr.reset(); 728 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 729 MachineOperand &MO = MI->getOperand(i); 730 if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; 731 unsigned Reg = MO.getReg(); 732 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 733 DEBUG(dbgs() << "\tSetting " << PrintReg(Reg, TRI) 734 << " as used in instr\n"); 735 UsedInInstr.set(Reg); 736 } 737 738 // Also mark PartialDefs as used to avoid reallocation. 739 for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i) 740 UsedInInstr.set(PartialDefs[i]); 741 } 742 743 /// addRetOperand - ensure that a return instruction has an operand for each 744 /// value live out of the function. 745 /// 746 /// Things marked both call and return are tail calls; do not do this for them. 747 /// The tail callee need not take the same registers as input that it produces 748 /// as output, and there are dependencies for its input registers elsewhere. 749 /// 750 /// FIXME: This should be done as part of instruction selection, and this helper 751 /// should be deleted. Until then, we use custom logic here to create the proper 752 /// operand under all circumstances. We can't use addRegisterKilled because that 753 /// doesn't make sense for undefined values. We can't simply avoid calling it 754 /// for undefined values, because we must ensure that the operand always exists. 755 void RAFast::addRetOperands(MachineBasicBlock *MBB) { 756 if (MBB->empty() || !MBB->back().isReturn() || MBB->back().isCall()) 757 return; 758 759 MachineInstr *MI = &MBB->back(); 760 761 for (MachineRegisterInfo::liveout_iterator 762 I = MBB->getParent()->getRegInfo().liveout_begin(), 763 E = MBB->getParent()->getRegInfo().liveout_end(); I != E; ++I) { 764 unsigned Reg = *I; 765 assert(TargetRegisterInfo::isPhysicalRegister(Reg) && 766 "Cannot have a live-out virtual register."); 767 768 bool hasDef = PhysRegState[Reg] == regReserved; 769 770 // Check if this register already has an operand. 771 bool Found = false; 772 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 773 MachineOperand &MO = MI->getOperand(i); 774 if (!MO.isReg() || !MO.isUse()) 775 continue; 776 777 unsigned OperReg = MO.getReg(); 778 if (!TargetRegisterInfo::isPhysicalRegister(OperReg)) 779 continue; 780 781 if (OperReg == Reg || TRI->isSuperRegister(OperReg, Reg)) { 782 // If the ret already has an operand for this physreg or a superset, 783 // don't duplicate it. Set the kill flag if the value is defined. 784 if (hasDef && !MO.isKill()) 785 MO.setIsKill(); 786 Found = true; 787 break; 788 } 789 } 790 if (!Found) 791 MI->addOperand(MachineOperand::CreateReg(Reg, 792 false /*IsDef*/, 793 true /*IsImp*/, 794 hasDef/*IsKill*/)); 795 } 796 } 797 798 void RAFast::AllocateBasicBlock() { 799 DEBUG(dbgs() << "\nAllocating " << *MBB); 800 801 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 802 assert(LiveVirtRegs.empty() && "Mapping not cleared form last block?"); 803 804 MachineBasicBlock::iterator MII = MBB->begin(); 805 806 // Add live-in registers as live. 807 for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(), 808 E = MBB->livein_end(); I != E; ++I) 809 if (RegClassInfo.isAllocatable(*I)) 810 definePhysReg(MII, *I, regReserved); 811 812 SmallVector<unsigned, 8> VirtDead; 813 SmallVector<MachineInstr*, 32> Coalesced; 814 815 // Otherwise, sequentially allocate each instruction in the MBB. 816 while (MII != MBB->end()) { 817 MachineInstr *MI = MII++; 818 const MCInstrDesc &MCID = MI->getDesc(); 819 DEBUG({ 820 dbgs() << "\n>> " << *MI << "Regs:"; 821 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 822 if (PhysRegState[Reg] == regDisabled) continue; 823 dbgs() << " " << TRI->getName(Reg); 824 switch(PhysRegState[Reg]) { 825 case regFree: 826 break; 827 case regReserved: 828 dbgs() << "*"; 829 break; 830 default: 831 dbgs() << '=' << PrintReg(PhysRegState[Reg]); 832 if (LiveVirtRegs[PhysRegState[Reg]].Dirty) 833 dbgs() << "*"; 834 assert(LiveVirtRegs[PhysRegState[Reg]].PhysReg == Reg && 835 "Bad inverse map"); 836 break; 837 } 838 } 839 dbgs() << '\n'; 840 // Check that LiveVirtRegs is the inverse. 841 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 842 e = LiveVirtRegs.end(); i != e; ++i) { 843 assert(TargetRegisterInfo::isVirtualRegister(i->first) && 844 "Bad map key"); 845 assert(TargetRegisterInfo::isPhysicalRegister(i->second.PhysReg) && 846 "Bad map value"); 847 assert(PhysRegState[i->second.PhysReg] == i->first && 848 "Bad inverse map"); 849 } 850 }); 851 852 // Debug values are not allowed to change codegen in any way. 853 if (MI->isDebugValue()) { 854 bool ScanDbgValue = true; 855 while (ScanDbgValue) { 856 ScanDbgValue = false; 857 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 858 MachineOperand &MO = MI->getOperand(i); 859 if (!MO.isReg()) continue; 860 unsigned Reg = MO.getReg(); 861 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 862 LiveRegMap::iterator LRI = LiveVirtRegs.find(Reg); 863 if (LRI != LiveVirtRegs.end()) 864 setPhysReg(MI, i, LRI->second.PhysReg); 865 else { 866 int SS = StackSlotForVirtReg[Reg]; 867 if (SS == -1) { 868 // We can't allocate a physreg for a DebugValue, sorry! 869 DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE"); 870 MO.setReg(0); 871 } 872 else { 873 // Modify DBG_VALUE now that the value is in a spill slot. 874 int64_t Offset = MI->getOperand(1).getImm(); 875 const MDNode *MDPtr = 876 MI->getOperand(MI->getNumOperands()-1).getMetadata(); 877 DebugLoc DL = MI->getDebugLoc(); 878 if (MachineInstr *NewDV = 879 TII->emitFrameIndexDebugValue(*MF, SS, Offset, MDPtr, DL)) { 880 DEBUG(dbgs() << "Modifying debug info due to spill:" << 881 "\t" << *MI); 882 MachineBasicBlock *MBB = MI->getParent(); 883 MBB->insert(MBB->erase(MI), NewDV); 884 // Scan NewDV operands from the beginning. 885 MI = NewDV; 886 ScanDbgValue = true; 887 break; 888 } else { 889 // We can't allocate a physreg for a DebugValue; sorry! 890 DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE"); 891 MO.setReg(0); 892 } 893 } 894 } 895 LiveDbgValueMap[Reg].push_back(MI); 896 } 897 } 898 // Next instruction. 899 continue; 900 } 901 902 // If this is a copy, we may be able to coalesce. 903 unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0; 904 if (MI->isCopy()) { 905 CopyDst = MI->getOperand(0).getReg(); 906 CopySrc = MI->getOperand(1).getReg(); 907 CopyDstSub = MI->getOperand(0).getSubReg(); 908 CopySrcSub = MI->getOperand(1).getSubReg(); 909 } 910 911 // Track registers used by instruction. 912 UsedInInstr.reset(); 913 914 // First scan. 915 // Mark physreg uses and early clobbers as used. 916 // Find the end of the virtreg operands 917 unsigned VirtOpEnd = 0; 918 bool hasTiedOps = false; 919 bool hasEarlyClobbers = false; 920 bool hasPartialRedefs = false; 921 bool hasPhysDefs = false; 922 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 923 MachineOperand &MO = MI->getOperand(i); 924 if (!MO.isReg()) continue; 925 unsigned Reg = MO.getReg(); 926 if (!Reg) continue; 927 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 928 VirtOpEnd = i+1; 929 if (MO.isUse()) { 930 hasTiedOps = hasTiedOps || 931 MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1; 932 } else { 933 if (MO.isEarlyClobber()) 934 hasEarlyClobbers = true; 935 if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) 936 hasPartialRedefs = true; 937 } 938 continue; 939 } 940 if (!RegClassInfo.isAllocatable(Reg)) continue; 941 if (MO.isUse()) { 942 usePhysReg(MO); 943 } else if (MO.isEarlyClobber()) { 944 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 945 regFree : regReserved); 946 hasEarlyClobbers = true; 947 } else 948 hasPhysDefs = true; 949 } 950 951 // The instruction may have virtual register operands that must be allocated 952 // the same register at use-time and def-time: early clobbers and tied 953 // operands. If there are also physical defs, these registers must avoid 954 // both physical defs and uses, making them more constrained than normal 955 // operands. 956 // Similarly, if there are multiple defs and tied operands, we must make 957 // sure the same register is allocated to uses and defs. 958 // We didn't detect inline asm tied operands above, so just make this extra 959 // pass for all inline asm. 960 if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || 961 (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) { 962 handleThroughOperands(MI, VirtDead); 963 // Don't attempt coalescing when we have funny stuff going on. 964 CopyDst = 0; 965 // Pretend we have early clobbers so the use operands get marked below. 966 // This is not necessary for the common case of a single tied use. 967 hasEarlyClobbers = true; 968 } 969 970 // Second scan. 971 // Allocate virtreg uses. 972 for (unsigned i = 0; i != VirtOpEnd; ++i) { 973 MachineOperand &MO = MI->getOperand(i); 974 if (!MO.isReg()) continue; 975 unsigned Reg = MO.getReg(); 976 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 977 if (MO.isUse()) { 978 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); 979 unsigned PhysReg = LRI->second.PhysReg; 980 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 981 if (setPhysReg(MI, i, PhysReg)) 982 killVirtReg(LRI); 983 } 984 } 985 986 MRI->addPhysRegsUsed(UsedInInstr); 987 988 // Track registers defined by instruction - early clobbers and tied uses at 989 // this point. 990 UsedInInstr.reset(); 991 if (hasEarlyClobbers) { 992 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 993 MachineOperand &MO = MI->getOperand(i); 994 if (!MO.isReg()) continue; 995 unsigned Reg = MO.getReg(); 996 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 997 // Look for physreg defs and tied uses. 998 if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue; 999 UsedInInstr.set(Reg); 1000 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) 1001 UsedInInstr.set(*AS); 1002 } 1003 } 1004 1005 unsigned DefOpEnd = MI->getNumOperands(); 1006 if (MI->isCall()) { 1007 // Spill all virtregs before a call. This serves two purposes: 1. If an 1008 // exception is thrown, the landing pad is going to expect to find 1009 // registers in their spill slots, and 2. we don't have to wade through 1010 // all the <imp-def> operands on the call instruction. 1011 DefOpEnd = VirtOpEnd; 1012 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 1013 spillAll(MI); 1014 1015 // The imp-defs are skipped below, but we still need to mark those 1016 // registers as used by the function. 1017 SkippedInstrs.insert(&MCID); 1018 } 1019 1020 // Third scan. 1021 // Allocate defs and collect dead defs. 1022 for (unsigned i = 0; i != DefOpEnd; ++i) { 1023 MachineOperand &MO = MI->getOperand(i); 1024 if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) 1025 continue; 1026 unsigned Reg = MO.getReg(); 1027 1028 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 1029 if (!RegClassInfo.isAllocatable(Reg)) continue; 1030 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 1031 regFree : regReserved); 1032 continue; 1033 } 1034 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); 1035 unsigned PhysReg = LRI->second.PhysReg; 1036 if (setPhysReg(MI, i, PhysReg)) { 1037 VirtDead.push_back(Reg); 1038 CopyDst = 0; // cancel coalescing; 1039 } else 1040 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 1041 } 1042 1043 // Kill dead defs after the scan to ensure that multiple defs of the same 1044 // register are allocated identically. We didn't need to do this for uses 1045 // because we are crerating our own kill flags, and they are always at the 1046 // last use. 1047 for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) 1048 killVirtReg(VirtDead[i]); 1049 VirtDead.clear(); 1050 1051 MRI->addPhysRegsUsed(UsedInInstr); 1052 1053 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 1054 DEBUG(dbgs() << "-- coalescing: " << *MI); 1055 Coalesced.push_back(MI); 1056 } else { 1057 DEBUG(dbgs() << "<< " << *MI); 1058 } 1059 } 1060 1061 // Spill all physical registers holding virtual registers now. 1062 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 1063 spillAll(MBB->getFirstTerminator()); 1064 1065 // Erase all the coalesced copies. We are delaying it until now because 1066 // LiveVirtRegs might refer to the instrs. 1067 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 1068 MBB->erase(Coalesced[i]); 1069 NumCopies += Coalesced.size(); 1070 1071 // addRetOperands must run after we've seen all defs in this block. 1072 addRetOperands(MBB); 1073 1074 DEBUG(MBB->dump()); 1075 } 1076 1077 /// runOnMachineFunction - Register allocate the whole function 1078 /// 1079 bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 1080 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 1081 << "********** Function: " 1082 << ((Value*)Fn.getFunction())->getName() << '\n'); 1083 MF = &Fn; 1084 MRI = &MF->getRegInfo(); 1085 TM = &Fn.getTarget(); 1086 TRI = TM->getRegisterInfo(); 1087 TII = TM->getInstrInfo(); 1088 MRI->freezeReservedRegs(Fn); 1089 RegClassInfo.runOnMachineFunction(Fn); 1090 UsedInInstr.resize(TRI->getNumRegs()); 1091 1092 // initialize the virtual->physical register map to have a 'null' 1093 // mapping for all virtual registers 1094 StackSlotForVirtReg.resize(MRI->getNumVirtRegs()); 1095 1096 // Loop over all of the basic blocks, eliminating virtual register references 1097 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 1098 MBBi != MBBe; ++MBBi) { 1099 MBB = &*MBBi; 1100 AllocateBasicBlock(); 1101 } 1102 1103 // Make sure the set of used physregs is closed under subreg operations. 1104 MRI->closePhysRegsUsed(*TRI); 1105 1106 // Add the clobber lists for all the instructions we skipped earlier. 1107 for (SmallPtrSet<const MCInstrDesc*, 4>::const_iterator 1108 I = SkippedInstrs.begin(), E = SkippedInstrs.end(); I != E; ++I) 1109 if (const unsigned *Defs = (*I)->getImplicitDefs()) 1110 while (*Defs) 1111 MRI->setPhysRegUsed(*Defs++); 1112 1113 SkippedInstrs.clear(); 1114 StackSlotForVirtReg.clear(); 1115 LiveDbgValueMap.clear(); 1116 return true; 1117 } 1118 1119 FunctionPass *llvm::createFastRegisterAllocator() { 1120 return new RAFast(); 1121 } 1122