1 //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This register allocator allocates registers to a basic block at a time, 11 // attempting to keep values in registers and reusing registers as appropriate. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #define DEBUG_TYPE "regalloc" 16 #include "RegisterClassInfo.h" 17 #include "llvm/BasicBlock.h" 18 #include "llvm/CodeGen/MachineFunctionPass.h" 19 #include "llvm/CodeGen/MachineInstr.h" 20 #include "llvm/CodeGen/MachineInstrBuilder.h" 21 #include "llvm/CodeGen/MachineFrameInfo.h" 22 #include "llvm/CodeGen/MachineRegisterInfo.h" 23 #include "llvm/CodeGen/Passes.h" 24 #include "llvm/CodeGen/RegAllocRegistry.h" 25 #include "llvm/Target/TargetInstrInfo.h" 26 #include "llvm/Target/TargetMachine.h" 27 #include "llvm/Support/CommandLine.h" 28 #include "llvm/Support/Debug.h" 29 #include "llvm/Support/ErrorHandling.h" 30 #include "llvm/Support/raw_ostream.h" 31 #include "llvm/ADT/DenseMap.h" 32 #include "llvm/ADT/IndexedMap.h" 33 #include "llvm/ADT/SmallSet.h" 34 #include "llvm/ADT/SmallVector.h" 35 #include "llvm/ADT/Statistic.h" 36 #include "llvm/ADT/STLExtras.h" 37 #include <algorithm> 38 using namespace llvm; 39 40 STATISTIC(NumStores, "Number of stores added"); 41 STATISTIC(NumLoads , "Number of loads added"); 42 STATISTIC(NumCopies, "Number of copies coalesced"); 43 44 static RegisterRegAlloc 45 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 46 47 namespace { 48 class RAFast : public MachineFunctionPass { 49 public: 50 static char ID; 51 RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1), 52 isBulkSpilling(false) {} 53 private: 54 const TargetMachine *TM; 55 MachineFunction *MF; 56 MachineRegisterInfo *MRI; 57 const TargetRegisterInfo *TRI; 58 const TargetInstrInfo *TII; 59 RegisterClassInfo RegClassInfo; 60 61 // Basic block currently being allocated. 62 MachineBasicBlock *MBB; 63 64 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 65 // values are spilled. 66 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 67 68 // Everything we know about a live virtual register. 69 struct LiveReg { 70 MachineInstr *LastUse; // Last instr to use reg. 71 unsigned PhysReg; // Currently held here. 72 unsigned short LastOpNum; // OpNum on LastUse. 73 bool Dirty; // Register needs spill. 74 75 LiveReg(unsigned p=0) : LastUse(0), PhysReg(p), LastOpNum(0), 76 Dirty(false) {} 77 }; 78 79 typedef DenseMap<unsigned, LiveReg> LiveRegMap; 80 typedef LiveRegMap::value_type LiveRegEntry; 81 82 // LiveVirtRegs - This map contains entries for each virtual register 83 // that is currently available in a physical register. 84 LiveRegMap LiveVirtRegs; 85 86 DenseMap<unsigned, SmallVector<MachineInstr *, 4> > LiveDbgValueMap; 87 88 // RegState - Track the state of a physical register. 89 enum RegState { 90 // A disabled register is not available for allocation, but an alias may 91 // be in use. A register can only be moved out of the disabled state if 92 // all aliases are disabled. 93 regDisabled, 94 95 // A free register is not currently in use and can be allocated 96 // immediately without checking aliases. 97 regFree, 98 99 // A reserved register has been assigned explicitly (e.g., setting up a 100 // call parameter), and it remains reserved until it is used. 101 regReserved 102 103 // A register state may also be a virtual register number, indication that 104 // the physical register is currently allocated to a virtual register. In 105 // that case, LiveVirtRegs contains the inverse mapping. 106 }; 107 108 // PhysRegState - One of the RegState enums, or a virtreg. 109 std::vector<unsigned> PhysRegState; 110 111 // UsedInInstr - BitVector of physregs that are used in the current 112 // instruction, and so cannot be allocated. 113 BitVector UsedInInstr; 114 115 // SkippedInstrs - Descriptors of instructions whose clobber list was 116 // ignored because all registers were spilled. It is still necessary to 117 // mark all the clobbered registers as used by the function. 118 SmallPtrSet<const MCInstrDesc*, 4> SkippedInstrs; 119 120 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 121 // completely after spilling all live registers. LiveRegMap entries should 122 // not be erased. 123 bool isBulkSpilling; 124 125 enum { 126 spillClean = 1, 127 spillDirty = 100, 128 spillImpossible = ~0u 129 }; 130 public: 131 virtual const char *getPassName() const { 132 return "Fast Register Allocator"; 133 } 134 135 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 136 AU.setPreservesCFG(); 137 MachineFunctionPass::getAnalysisUsage(AU); 138 } 139 140 private: 141 bool runOnMachineFunction(MachineFunction &Fn); 142 void AllocateBasicBlock(); 143 void handleThroughOperands(MachineInstr *MI, 144 SmallVectorImpl<unsigned> &VirtDead); 145 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 146 bool isLastUseOfLocalReg(MachineOperand&); 147 148 void addKillFlag(const LiveReg&); 149 void killVirtReg(LiveRegMap::iterator); 150 void killVirtReg(unsigned VirtReg); 151 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 152 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 153 154 void usePhysReg(MachineOperand&); 155 void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); 156 unsigned calcSpillCost(unsigned PhysReg) const; 157 void assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg); 158 void allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint); 159 LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, 160 unsigned VirtReg, unsigned Hint); 161 LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, 162 unsigned VirtReg, unsigned Hint); 163 void spillAll(MachineInstr *MI); 164 bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); 165 void addRetOperands(MachineBasicBlock *MBB); 166 }; 167 char RAFast::ID = 0; 168 } 169 170 /// getStackSpaceFor - This allocates space for the specified virtual register 171 /// to be held on the stack. 172 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 173 // Find the location Reg would belong... 174 int SS = StackSlotForVirtReg[VirtReg]; 175 if (SS != -1) 176 return SS; // Already has space allocated? 177 178 // Allocate a new stack object for this spill location... 179 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 180 RC->getAlignment()); 181 182 // Assign the slot. 183 StackSlotForVirtReg[VirtReg] = FrameIdx; 184 return FrameIdx; 185 } 186 187 /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 188 /// its virtual register, and it is guaranteed to be a block-local register. 189 /// 190 bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 191 // Check for non-debug uses or defs following MO. 192 // This is the most likely way to fail - fast path it. 193 MachineOperand *Next = &MO; 194 while ((Next = Next->getNextOperandForReg())) 195 if (!Next->isDebug()) 196 return false; 197 198 // If the register has ever been spilled or reloaded, we conservatively assume 199 // it is a global register used in multiple blocks. 200 if (StackSlotForVirtReg[MO.getReg()] != -1) 201 return false; 202 203 // Check that the use/def chain has exactly one operand - MO. 204 return &MRI->reg_nodbg_begin(MO.getReg()).getOperand() == &MO; 205 } 206 207 /// addKillFlag - Set kill flags on last use of a virtual register. 208 void RAFast::addKillFlag(const LiveReg &LR) { 209 if (!LR.LastUse) return; 210 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 211 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 212 if (MO.getReg() == LR.PhysReg) 213 MO.setIsKill(); 214 else 215 LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true); 216 } 217 } 218 219 /// killVirtReg - Mark virtreg as no longer available. 220 void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 221 addKillFlag(LRI->second); 222 const LiveReg &LR = LRI->second; 223 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 224 PhysRegState[LR.PhysReg] = regFree; 225 // Erase from LiveVirtRegs unless we're spilling in bulk. 226 if (!isBulkSpilling) 227 LiveVirtRegs.erase(LRI); 228 } 229 230 /// killVirtReg - Mark virtreg as no longer available. 231 void RAFast::killVirtReg(unsigned VirtReg) { 232 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 233 "killVirtReg needs a virtual register"); 234 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 235 if (LRI != LiveVirtRegs.end()) 236 killVirtReg(LRI); 237 } 238 239 /// spillVirtReg - This method spills the value specified by VirtReg into the 240 /// corresponding stack slot if needed. 241 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 242 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 243 "Spilling a physical register is illegal!"); 244 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 245 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 246 spillVirtReg(MI, LRI); 247 } 248 249 /// spillVirtReg - Do the actual work of spilling. 250 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 251 LiveRegMap::iterator LRI) { 252 LiveReg &LR = LRI->second; 253 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 254 255 if (LR.Dirty) { 256 // If this physreg is used by the instruction, we want to kill it on the 257 // instruction, not on the spill. 258 bool SpillKill = LR.LastUse != MI; 259 LR.Dirty = false; 260 DEBUG(dbgs() << "Spilling " << PrintReg(LRI->first, TRI) 261 << " in " << PrintReg(LR.PhysReg, TRI)); 262 const TargetRegisterClass *RC = MRI->getRegClass(LRI->first); 263 int FI = getStackSpaceFor(LRI->first, RC); 264 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 265 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 266 ++NumStores; // Update statistics 267 268 // If this register is used by DBG_VALUE then insert new DBG_VALUE to 269 // identify spilled location as the place to find corresponding variable's 270 // value. 271 SmallVector<MachineInstr *, 4> &LRIDbgValues = LiveDbgValueMap[LRI->first]; 272 for (unsigned li = 0, le = LRIDbgValues.size(); li != le; ++li) { 273 MachineInstr *DBG = LRIDbgValues[li]; 274 const MDNode *MDPtr = 275 DBG->getOperand(DBG->getNumOperands()-1).getMetadata(); 276 int64_t Offset = 0; 277 if (DBG->getOperand(1).isImm()) 278 Offset = DBG->getOperand(1).getImm(); 279 DebugLoc DL; 280 if (MI == MBB->end()) { 281 // If MI is at basic block end then use last instruction's location. 282 MachineBasicBlock::iterator EI = MI; 283 DL = (--EI)->getDebugLoc(); 284 } 285 else 286 DL = MI->getDebugLoc(); 287 if (MachineInstr *NewDV = 288 TII->emitFrameIndexDebugValue(*MF, FI, Offset, MDPtr, DL)) { 289 MachineBasicBlock *MBB = DBG->getParent(); 290 MBB->insert(MI, NewDV); 291 DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV); 292 } 293 } 294 // Now this register is spilled there is should not be any DBG_VALUE pointing 295 // to this register because they are all pointing to spilled value now. 296 LRIDbgValues.clear(); 297 if (SpillKill) 298 LR.LastUse = 0; // Don't kill register again 299 } 300 killVirtReg(LRI); 301 } 302 303 /// spillAll - Spill all dirty virtregs without killing them. 304 void RAFast::spillAll(MachineInstr *MI) { 305 if (LiveVirtRegs.empty()) return; 306 isBulkSpilling = true; 307 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 308 // of spilling here is deterministic, if arbitrary. 309 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 310 i != e; ++i) 311 spillVirtReg(MI, i); 312 LiveVirtRegs.clear(); 313 isBulkSpilling = false; 314 } 315 316 /// usePhysReg - Handle the direct use of a physical register. 317 /// Check that the register is not used by a virtreg. 318 /// Kill the physreg, marking it free. 319 /// This may add implicit kills to MO->getParent() and invalidate MO. 320 void RAFast::usePhysReg(MachineOperand &MO) { 321 unsigned PhysReg = MO.getReg(); 322 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 323 "Bad usePhysReg operand"); 324 325 switch (PhysRegState[PhysReg]) { 326 case regDisabled: 327 break; 328 case regReserved: 329 PhysRegState[PhysReg] = regFree; 330 // Fall through 331 case regFree: 332 UsedInInstr.set(PhysReg); 333 MO.setIsKill(); 334 return; 335 default: 336 // The physreg was allocated to a virtual register. That means the value we 337 // wanted has been clobbered. 338 llvm_unreachable("Instruction uses an allocated register"); 339 } 340 341 // Maybe a superregister is reserved? 342 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 343 unsigned Alias = *AS; ++AS) { 344 switch (PhysRegState[Alias]) { 345 case regDisabled: 346 break; 347 case regReserved: 348 assert(TRI->isSuperRegister(PhysReg, Alias) && 349 "Instruction is not using a subregister of a reserved register"); 350 // Leave the superregister in the working set. 351 PhysRegState[Alias] = regFree; 352 UsedInInstr.set(Alias); 353 MO.getParent()->addRegisterKilled(Alias, TRI, true); 354 return; 355 case regFree: 356 if (TRI->isSuperRegister(PhysReg, Alias)) { 357 // Leave the superregister in the working set. 358 UsedInInstr.set(Alias); 359 MO.getParent()->addRegisterKilled(Alias, TRI, true); 360 return; 361 } 362 // Some other alias was in the working set - clear it. 363 PhysRegState[Alias] = regDisabled; 364 break; 365 default: 366 llvm_unreachable("Instruction uses an alias of an allocated register"); 367 } 368 } 369 370 // All aliases are disabled, bring register into working set. 371 PhysRegState[PhysReg] = regFree; 372 UsedInInstr.set(PhysReg); 373 MO.setIsKill(); 374 } 375 376 /// definePhysReg - Mark PhysReg as reserved or free after spilling any 377 /// virtregs. This is very similar to defineVirtReg except the physreg is 378 /// reserved instead of allocated. 379 void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, 380 RegState NewState) { 381 UsedInInstr.set(PhysReg); 382 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 383 case regDisabled: 384 break; 385 default: 386 spillVirtReg(MI, VirtReg); 387 // Fall through. 388 case regFree: 389 case regReserved: 390 PhysRegState[PhysReg] = NewState; 391 return; 392 } 393 394 // This is a disabled register, disable all aliases. 395 PhysRegState[PhysReg] = NewState; 396 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 397 unsigned Alias = *AS; ++AS) { 398 switch (unsigned VirtReg = PhysRegState[Alias]) { 399 case regDisabled: 400 break; 401 default: 402 spillVirtReg(MI, VirtReg); 403 // Fall through. 404 case regFree: 405 case regReserved: 406 PhysRegState[Alias] = regDisabled; 407 if (TRI->isSuperRegister(PhysReg, Alias)) 408 return; 409 break; 410 } 411 } 412 } 413 414 415 // calcSpillCost - Return the cost of spilling clearing out PhysReg and 416 // aliases so it is free for allocation. 417 // Returns 0 when PhysReg is free or disabled with all aliases disabled - it 418 // can be allocated directly. 419 // Returns spillImpossible when PhysReg or an alias can't be spilled. 420 unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 421 if (UsedInInstr.test(PhysReg)) { 422 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is already used in instr.\n"); 423 return spillImpossible; 424 } 425 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 426 case regDisabled: 427 break; 428 case regFree: 429 return 0; 430 case regReserved: 431 DEBUG(dbgs() << PrintReg(VirtReg, TRI) << " corresponding " 432 << PrintReg(PhysReg, TRI) << " is reserved already.\n"); 433 return spillImpossible; 434 default: 435 return LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 436 } 437 438 // This is a disabled register, add up cost of aliases. 439 DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is disabled.\n"); 440 unsigned Cost = 0; 441 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 442 unsigned Alias = *AS; ++AS) { 443 if (UsedInInstr.test(Alias)) 444 return spillImpossible; 445 switch (unsigned VirtReg = PhysRegState[Alias]) { 446 case regDisabled: 447 break; 448 case regFree: 449 ++Cost; 450 break; 451 case regReserved: 452 return spillImpossible; 453 default: 454 Cost += LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 455 break; 456 } 457 } 458 return Cost; 459 } 460 461 462 /// assignVirtToPhysReg - This method updates local state so that we know 463 /// that PhysReg is the proper container for VirtReg now. The physical 464 /// register must not be used for anything else when this is called. 465 /// 466 void RAFast::assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg) { 467 DEBUG(dbgs() << "Assigning " << PrintReg(LRE.first, TRI) << " to " 468 << PrintReg(PhysReg, TRI) << "\n"); 469 PhysRegState[PhysReg] = LRE.first; 470 assert(!LRE.second.PhysReg && "Already assigned a physreg"); 471 LRE.second.PhysReg = PhysReg; 472 } 473 474 /// allocVirtReg - Allocate a physical register for VirtReg. 475 void RAFast::allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint) { 476 const unsigned VirtReg = LRE.first; 477 478 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 479 "Can only allocate virtual registers"); 480 481 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 482 483 // Ignore invalid hints. 484 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 485 !RC->contains(Hint) || !RegClassInfo.isAllocatable(Hint))) 486 Hint = 0; 487 488 // Take hint when possible. 489 if (Hint) { 490 // Ignore the hint if we would have to spill a dirty register. 491 unsigned Cost = calcSpillCost(Hint); 492 if (Cost < spillDirty) { 493 if (Cost) 494 definePhysReg(MI, Hint, regFree); 495 return assignVirtToPhysReg(LRE, Hint); 496 } 497 } 498 499 ArrayRef<unsigned> AO = RegClassInfo.getOrder(RC); 500 501 // First try to find a completely free register. 502 for (ArrayRef<unsigned>::iterator I = AO.begin(), E = AO.end(); I != E; ++I) { 503 unsigned PhysReg = *I; 504 if (PhysRegState[PhysReg] == regFree && !UsedInInstr.test(PhysReg)) 505 return assignVirtToPhysReg(LRE, PhysReg); 506 } 507 508 DEBUG(dbgs() << "Allocating " << PrintReg(VirtReg) << " from " 509 << RC->getName() << "\n"); 510 511 unsigned BestReg = 0, BestCost = spillImpossible; 512 for (ArrayRef<unsigned>::iterator I = AO.begin(), E = AO.end(); I != E; ++I) { 513 unsigned Cost = calcSpillCost(*I); 514 DEBUG(dbgs() << "\tRegister: " << PrintReg(*I, TRI) << "\n"); 515 DEBUG(dbgs() << "\tCost: " << Cost << "\n"); 516 DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n"); 517 // Cost is 0 when all aliases are already disabled. 518 if (Cost == 0) 519 return assignVirtToPhysReg(LRE, *I); 520 if (Cost < BestCost) 521 BestReg = *I, BestCost = Cost; 522 } 523 524 if (BestReg) { 525 definePhysReg(MI, BestReg, regFree); 526 return assignVirtToPhysReg(LRE, BestReg); 527 } 528 529 // Nothing we can do. Report an error and keep going with a bad allocation. 530 MI->emitError("ran out of registers during register allocation"); 531 definePhysReg(MI, *AO.begin(), regFree); 532 assignVirtToPhysReg(LRE, *AO.begin()); 533 } 534 535 /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 536 RAFast::LiveRegMap::iterator 537 RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, 538 unsigned VirtReg, unsigned Hint) { 539 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 540 "Not a virtual register"); 541 LiveRegMap::iterator LRI; 542 bool New; 543 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 544 LiveReg &LR = LRI->second; 545 if (New) { 546 // If there is no hint, peek at the only use of this register. 547 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 548 MRI->hasOneNonDBGUse(VirtReg)) { 549 const MachineInstr &UseMI = *MRI->use_nodbg_begin(VirtReg); 550 // It's a copy, use the destination register as a hint. 551 if (UseMI.isCopyLike()) 552 Hint = UseMI.getOperand(0).getReg(); 553 } 554 allocVirtReg(MI, *LRI, Hint); 555 } else if (LR.LastUse) { 556 // Redefining a live register - kill at the last use, unless it is this 557 // instruction defining VirtReg multiple times. 558 if (LR.LastUse != MI || LR.LastUse->getOperand(LR.LastOpNum).isUse()) 559 addKillFlag(LR); 560 } 561 assert(LR.PhysReg && "Register not assigned"); 562 LR.LastUse = MI; 563 LR.LastOpNum = OpNum; 564 LR.Dirty = true; 565 UsedInInstr.set(LR.PhysReg); 566 return LRI; 567 } 568 569 /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 570 RAFast::LiveRegMap::iterator 571 RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, 572 unsigned VirtReg, unsigned Hint) { 573 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 574 "Not a virtual register"); 575 LiveRegMap::iterator LRI; 576 bool New; 577 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 578 LiveReg &LR = LRI->second; 579 MachineOperand &MO = MI->getOperand(OpNum); 580 if (New) { 581 allocVirtReg(MI, *LRI, Hint); 582 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 583 int FrameIndex = getStackSpaceFor(VirtReg, RC); 584 DEBUG(dbgs() << "Reloading " << PrintReg(VirtReg, TRI) << " into " 585 << PrintReg(LR.PhysReg, TRI) << "\n"); 586 TII->loadRegFromStackSlot(*MBB, MI, LR.PhysReg, FrameIndex, RC, TRI); 587 ++NumLoads; 588 } else if (LR.Dirty) { 589 if (isLastUseOfLocalReg(MO)) { 590 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 591 if (MO.isUse()) 592 MO.setIsKill(); 593 else 594 MO.setIsDead(); 595 } else if (MO.isKill()) { 596 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 597 MO.setIsKill(false); 598 } else if (MO.isDead()) { 599 DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n"); 600 MO.setIsDead(false); 601 } 602 } else if (MO.isKill()) { 603 // We must remove kill flags from uses of reloaded registers because the 604 // register would be killed immediately, and there might be a second use: 605 // %foo = OR %x<kill>, %x 606 // This would cause a second reload of %x into a different register. 607 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 608 MO.setIsKill(false); 609 } else if (MO.isDead()) { 610 DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n"); 611 MO.setIsDead(false); 612 } 613 assert(LR.PhysReg && "Register not assigned"); 614 LR.LastUse = MI; 615 LR.LastOpNum = OpNum; 616 UsedInInstr.set(LR.PhysReg); 617 return LRI; 618 } 619 620 // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering 621 // subregs. This may invalidate any operand pointers. 622 // Return true if the operand kills its register. 623 bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { 624 MachineOperand &MO = MI->getOperand(OpNum); 625 if (!MO.getSubReg()) { 626 MO.setReg(PhysReg); 627 return MO.isKill() || MO.isDead(); 628 } 629 630 // Handle subregister index. 631 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 632 MO.setSubReg(0); 633 634 // A kill flag implies killing the full register. Add corresponding super 635 // register kill. 636 if (MO.isKill()) { 637 MI->addRegisterKilled(PhysReg, TRI, true); 638 return true; 639 } 640 return MO.isDead(); 641 } 642 643 // Handle special instruction operand like early clobbers and tied ops when 644 // there are additional physreg defines. 645 void RAFast::handleThroughOperands(MachineInstr *MI, 646 SmallVectorImpl<unsigned> &VirtDead) { 647 DEBUG(dbgs() << "Scanning for through registers:"); 648 SmallSet<unsigned, 8> ThroughRegs; 649 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 650 MachineOperand &MO = MI->getOperand(i); 651 if (!MO.isReg()) continue; 652 unsigned Reg = MO.getReg(); 653 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 654 continue; 655 if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) || 656 (MO.getSubReg() && MI->readsVirtualRegister(Reg))) { 657 if (ThroughRegs.insert(Reg)) 658 DEBUG(dbgs() << ' ' << PrintReg(Reg)); 659 } 660 } 661 662 // If any physreg defines collide with preallocated through registers, 663 // we must spill and reallocate. 664 DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); 665 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 666 MachineOperand &MO = MI->getOperand(i); 667 if (!MO.isReg() || !MO.isDef()) continue; 668 unsigned Reg = MO.getReg(); 669 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 670 UsedInInstr.set(Reg); 671 if (ThroughRegs.count(PhysRegState[Reg])) 672 definePhysReg(MI, Reg, regFree); 673 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) { 674 UsedInInstr.set(*AS); 675 if (ThroughRegs.count(PhysRegState[*AS])) 676 definePhysReg(MI, *AS, regFree); 677 } 678 } 679 680 SmallVector<unsigned, 8> PartialDefs; 681 DEBUG(dbgs() << "Allocating tied uses.\n"); 682 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 683 MachineOperand &MO = MI->getOperand(i); 684 if (!MO.isReg()) continue; 685 unsigned Reg = MO.getReg(); 686 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 687 if (MO.isUse()) { 688 unsigned DefIdx = 0; 689 if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue; 690 DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand " 691 << DefIdx << ".\n"); 692 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 693 unsigned PhysReg = LRI->second.PhysReg; 694 setPhysReg(MI, i, PhysReg); 695 // Note: we don't update the def operand yet. That would cause the normal 696 // def-scan to attempt spilling. 697 } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) { 698 DEBUG(dbgs() << "Partial redefine: " << MO << "\n"); 699 // Reload the register, but don't assign to the operand just yet. 700 // That would confuse the later phys-def processing pass. 701 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 702 PartialDefs.push_back(LRI->second.PhysReg); 703 } 704 } 705 706 DEBUG(dbgs() << "Allocating early clobbers.\n"); 707 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 708 MachineOperand &MO = MI->getOperand(i); 709 if (!MO.isReg()) continue; 710 unsigned Reg = MO.getReg(); 711 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 712 if (!MO.isEarlyClobber()) 713 continue; 714 // Note: defineVirtReg may invalidate MO. 715 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); 716 unsigned PhysReg = LRI->second.PhysReg; 717 if (setPhysReg(MI, i, PhysReg)) 718 VirtDead.push_back(Reg); 719 } 720 721 // Restore UsedInInstr to a state usable for allocating normal virtual uses. 722 UsedInInstr.reset(); 723 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 724 MachineOperand &MO = MI->getOperand(i); 725 if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; 726 unsigned Reg = MO.getReg(); 727 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 728 DEBUG(dbgs() << "\tSetting " << PrintReg(Reg, TRI) 729 << " as used in instr\n"); 730 UsedInInstr.set(Reg); 731 } 732 733 // Also mark PartialDefs as used to avoid reallocation. 734 for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i) 735 UsedInInstr.set(PartialDefs[i]); 736 } 737 738 /// addRetOperand - ensure that a return instruction has an operand for each 739 /// value live out of the function. 740 /// 741 /// Things marked both call and return are tail calls; do not do this for them. 742 /// The tail callee need not take the same registers as input that it produces 743 /// as output, and there are dependencies for its input registers elsewhere. 744 /// 745 /// FIXME: This should be done as part of instruction selection, and this helper 746 /// should be deleted. Until then, we use custom logic here to create the proper 747 /// operand under all circumstances. We can't use addRegisterKilled because that 748 /// doesn't make sense for undefined values. We can't simply avoid calling it 749 /// for undefined values, because we must ensure that the operand always exists. 750 void RAFast::addRetOperands(MachineBasicBlock *MBB) { 751 if (MBB->empty() || !MBB->back().isReturn() || MBB->back().isCall()) 752 return; 753 754 MachineInstr *MI = &MBB->back(); 755 756 for (MachineRegisterInfo::liveout_iterator 757 I = MBB->getParent()->getRegInfo().liveout_begin(), 758 E = MBB->getParent()->getRegInfo().liveout_end(); I != E; ++I) { 759 unsigned Reg = *I; 760 assert(TargetRegisterInfo::isPhysicalRegister(Reg) && 761 "Cannot have a live-out virtual register."); 762 763 bool hasDef = PhysRegState[Reg] == regReserved; 764 765 // Check if this register already has an operand. 766 bool Found = false; 767 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 768 MachineOperand &MO = MI->getOperand(i); 769 if (!MO.isReg() || !MO.isUse()) 770 continue; 771 772 unsigned OperReg = MO.getReg(); 773 if (!TargetRegisterInfo::isPhysicalRegister(OperReg)) 774 continue; 775 776 if (OperReg == Reg || TRI->isSuperRegister(OperReg, Reg)) { 777 // If the ret already has an operand for this physreg or a superset, 778 // don't duplicate it. Set the kill flag if the value is defined. 779 if (hasDef && !MO.isKill()) 780 MO.setIsKill(); 781 Found = true; 782 break; 783 } 784 } 785 if (!Found) 786 MI->addOperand(MachineOperand::CreateReg(Reg, 787 false /*IsDef*/, 788 true /*IsImp*/, 789 hasDef/*IsKill*/)); 790 } 791 } 792 793 void RAFast::AllocateBasicBlock() { 794 DEBUG(dbgs() << "\nAllocating " << *MBB); 795 796 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 797 assert(LiveVirtRegs.empty() && "Mapping not cleared form last block?"); 798 799 MachineBasicBlock::iterator MII = MBB->begin(); 800 801 // Add live-in registers as live. 802 for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(), 803 E = MBB->livein_end(); I != E; ++I) 804 if (RegClassInfo.isAllocatable(*I)) 805 definePhysReg(MII, *I, regReserved); 806 807 SmallVector<unsigned, 8> VirtDead; 808 SmallVector<MachineInstr*, 32> Coalesced; 809 810 // Otherwise, sequentially allocate each instruction in the MBB. 811 while (MII != MBB->end()) { 812 MachineInstr *MI = MII++; 813 const MCInstrDesc &MCID = MI->getDesc(); 814 DEBUG({ 815 dbgs() << "\n>> " << *MI << "Regs:"; 816 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 817 if (PhysRegState[Reg] == regDisabled) continue; 818 dbgs() << " " << TRI->getName(Reg); 819 switch(PhysRegState[Reg]) { 820 case regFree: 821 break; 822 case regReserved: 823 dbgs() << "*"; 824 break; 825 default: 826 dbgs() << '=' << PrintReg(PhysRegState[Reg]); 827 if (LiveVirtRegs[PhysRegState[Reg]].Dirty) 828 dbgs() << "*"; 829 assert(LiveVirtRegs[PhysRegState[Reg]].PhysReg == Reg && 830 "Bad inverse map"); 831 break; 832 } 833 } 834 dbgs() << '\n'; 835 // Check that LiveVirtRegs is the inverse. 836 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 837 e = LiveVirtRegs.end(); i != e; ++i) { 838 assert(TargetRegisterInfo::isVirtualRegister(i->first) && 839 "Bad map key"); 840 assert(TargetRegisterInfo::isPhysicalRegister(i->second.PhysReg) && 841 "Bad map value"); 842 assert(PhysRegState[i->second.PhysReg] == i->first && 843 "Bad inverse map"); 844 } 845 }); 846 847 // Debug values are not allowed to change codegen in any way. 848 if (MI->isDebugValue()) { 849 bool ScanDbgValue = true; 850 while (ScanDbgValue) { 851 ScanDbgValue = false; 852 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 853 MachineOperand &MO = MI->getOperand(i); 854 if (!MO.isReg()) continue; 855 unsigned Reg = MO.getReg(); 856 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 857 LiveRegMap::iterator LRI = LiveVirtRegs.find(Reg); 858 if (LRI != LiveVirtRegs.end()) 859 setPhysReg(MI, i, LRI->second.PhysReg); 860 else { 861 int SS = StackSlotForVirtReg[Reg]; 862 if (SS == -1) { 863 // We can't allocate a physreg for a DebugValue, sorry! 864 DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE"); 865 MO.setReg(0); 866 } 867 else { 868 // Modify DBG_VALUE now that the value is in a spill slot. 869 int64_t Offset = MI->getOperand(1).getImm(); 870 const MDNode *MDPtr = 871 MI->getOperand(MI->getNumOperands()-1).getMetadata(); 872 DebugLoc DL = MI->getDebugLoc(); 873 if (MachineInstr *NewDV = 874 TII->emitFrameIndexDebugValue(*MF, SS, Offset, MDPtr, DL)) { 875 DEBUG(dbgs() << "Modifying debug info due to spill:" << 876 "\t" << *MI); 877 MachineBasicBlock *MBB = MI->getParent(); 878 MBB->insert(MBB->erase(MI), NewDV); 879 // Scan NewDV operands from the beginning. 880 MI = NewDV; 881 ScanDbgValue = true; 882 break; 883 } else { 884 // We can't allocate a physreg for a DebugValue; sorry! 885 DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE"); 886 MO.setReg(0); 887 } 888 } 889 } 890 LiveDbgValueMap[Reg].push_back(MI); 891 } 892 } 893 // Next instruction. 894 continue; 895 } 896 897 // If this is a copy, we may be able to coalesce. 898 unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0; 899 if (MI->isCopy()) { 900 CopyDst = MI->getOperand(0).getReg(); 901 CopySrc = MI->getOperand(1).getReg(); 902 CopyDstSub = MI->getOperand(0).getSubReg(); 903 CopySrcSub = MI->getOperand(1).getSubReg(); 904 } 905 906 // Track registers used by instruction. 907 UsedInInstr.reset(); 908 909 // First scan. 910 // Mark physreg uses and early clobbers as used. 911 // Find the end of the virtreg operands 912 unsigned VirtOpEnd = 0; 913 bool hasTiedOps = false; 914 bool hasEarlyClobbers = false; 915 bool hasPartialRedefs = false; 916 bool hasPhysDefs = false; 917 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 918 MachineOperand &MO = MI->getOperand(i); 919 if (!MO.isReg()) continue; 920 unsigned Reg = MO.getReg(); 921 if (!Reg) continue; 922 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 923 VirtOpEnd = i+1; 924 if (MO.isUse()) { 925 hasTiedOps = hasTiedOps || 926 MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1; 927 } else { 928 if (MO.isEarlyClobber()) 929 hasEarlyClobbers = true; 930 if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) 931 hasPartialRedefs = true; 932 } 933 continue; 934 } 935 if (!RegClassInfo.isAllocatable(Reg)) continue; 936 if (MO.isUse()) { 937 usePhysReg(MO); 938 } else if (MO.isEarlyClobber()) { 939 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 940 regFree : regReserved); 941 hasEarlyClobbers = true; 942 } else 943 hasPhysDefs = true; 944 } 945 946 // The instruction may have virtual register operands that must be allocated 947 // the same register at use-time and def-time: early clobbers and tied 948 // operands. If there are also physical defs, these registers must avoid 949 // both physical defs and uses, making them more constrained than normal 950 // operands. 951 // Similarly, if there are multiple defs and tied operands, we must make 952 // sure the same register is allocated to uses and defs. 953 // We didn't detect inline asm tied operands above, so just make this extra 954 // pass for all inline asm. 955 if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || 956 (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) { 957 handleThroughOperands(MI, VirtDead); 958 // Don't attempt coalescing when we have funny stuff going on. 959 CopyDst = 0; 960 // Pretend we have early clobbers so the use operands get marked below. 961 // This is not necessary for the common case of a single tied use. 962 hasEarlyClobbers = true; 963 } 964 965 // Second scan. 966 // Allocate virtreg uses. 967 for (unsigned i = 0; i != VirtOpEnd; ++i) { 968 MachineOperand &MO = MI->getOperand(i); 969 if (!MO.isReg()) continue; 970 unsigned Reg = MO.getReg(); 971 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 972 if (MO.isUse()) { 973 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); 974 unsigned PhysReg = LRI->second.PhysReg; 975 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 976 if (setPhysReg(MI, i, PhysReg)) 977 killVirtReg(LRI); 978 } 979 } 980 981 MRI->addPhysRegsUsed(UsedInInstr); 982 983 // Track registers defined by instruction - early clobbers and tied uses at 984 // this point. 985 UsedInInstr.reset(); 986 if (hasEarlyClobbers) { 987 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 988 MachineOperand &MO = MI->getOperand(i); 989 if (!MO.isReg()) continue; 990 unsigned Reg = MO.getReg(); 991 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 992 // Look for physreg defs and tied uses. 993 if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue; 994 UsedInInstr.set(Reg); 995 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) 996 UsedInInstr.set(*AS); 997 } 998 } 999 1000 unsigned DefOpEnd = MI->getNumOperands(); 1001 if (MI->isCall()) { 1002 // Spill all virtregs before a call. This serves two purposes: 1. If an 1003 // exception is thrown, the landing pad is going to expect to find 1004 // registers in their spill slots, and 2. we don't have to wade through 1005 // all the <imp-def> operands on the call instruction. 1006 DefOpEnd = VirtOpEnd; 1007 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 1008 spillAll(MI); 1009 1010 // The imp-defs are skipped below, but we still need to mark those 1011 // registers as used by the function. 1012 SkippedInstrs.insert(&MCID); 1013 } 1014 1015 // Third scan. 1016 // Allocate defs and collect dead defs. 1017 for (unsigned i = 0; i != DefOpEnd; ++i) { 1018 MachineOperand &MO = MI->getOperand(i); 1019 if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) 1020 continue; 1021 unsigned Reg = MO.getReg(); 1022 1023 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 1024 if (!RegClassInfo.isAllocatable(Reg)) continue; 1025 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 1026 regFree : regReserved); 1027 continue; 1028 } 1029 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); 1030 unsigned PhysReg = LRI->second.PhysReg; 1031 if (setPhysReg(MI, i, PhysReg)) { 1032 VirtDead.push_back(Reg); 1033 CopyDst = 0; // cancel coalescing; 1034 } else 1035 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 1036 } 1037 1038 // Kill dead defs after the scan to ensure that multiple defs of the same 1039 // register are allocated identically. We didn't need to do this for uses 1040 // because we are crerating our own kill flags, and they are always at the 1041 // last use. 1042 for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) 1043 killVirtReg(VirtDead[i]); 1044 VirtDead.clear(); 1045 1046 MRI->addPhysRegsUsed(UsedInInstr); 1047 1048 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 1049 DEBUG(dbgs() << "-- coalescing: " << *MI); 1050 Coalesced.push_back(MI); 1051 } else { 1052 DEBUG(dbgs() << "<< " << *MI); 1053 } 1054 } 1055 1056 // Spill all physical registers holding virtual registers now. 1057 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 1058 spillAll(MBB->getFirstTerminator()); 1059 1060 // Erase all the coalesced copies. We are delaying it until now because 1061 // LiveVirtRegs might refer to the instrs. 1062 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 1063 MBB->erase(Coalesced[i]); 1064 NumCopies += Coalesced.size(); 1065 1066 // addRetOperands must run after we've seen all defs in this block. 1067 addRetOperands(MBB); 1068 1069 DEBUG(MBB->dump()); 1070 } 1071 1072 /// runOnMachineFunction - Register allocate the whole function 1073 /// 1074 bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 1075 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 1076 << "********** Function: " 1077 << ((Value*)Fn.getFunction())->getName() << '\n'); 1078 MF = &Fn; 1079 MRI = &MF->getRegInfo(); 1080 TM = &Fn.getTarget(); 1081 TRI = TM->getRegisterInfo(); 1082 TII = TM->getInstrInfo(); 1083 MRI->freezeReservedRegs(Fn); 1084 RegClassInfo.runOnMachineFunction(Fn); 1085 UsedInInstr.resize(TRI->getNumRegs()); 1086 1087 assert(!MRI->isSSA() && "regalloc requires leaving SSA"); 1088 1089 // initialize the virtual->physical register map to have a 'null' 1090 // mapping for all virtual registers 1091 StackSlotForVirtReg.resize(MRI->getNumVirtRegs()); 1092 1093 // Loop over all of the basic blocks, eliminating virtual register references 1094 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 1095 MBBi != MBBe; ++MBBi) { 1096 MBB = &*MBBi; 1097 AllocateBasicBlock(); 1098 } 1099 1100 // Add the clobber lists for all the instructions we skipped earlier. 1101 for (SmallPtrSet<const MCInstrDesc*, 4>::const_iterator 1102 I = SkippedInstrs.begin(), E = SkippedInstrs.end(); I != E; ++I) 1103 if (const unsigned *Defs = (*I)->getImplicitDefs()) 1104 while (*Defs) 1105 MRI->setPhysRegUsed(*Defs++); 1106 1107 SkippedInstrs.clear(); 1108 StackSlotForVirtReg.clear(); 1109 LiveDbgValueMap.clear(); 1110 return true; 1111 } 1112 1113 FunctionPass *llvm::createFastRegisterAllocator() { 1114 return new RAFast(); 1115 } 1116