1 //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This register allocator allocates registers to a basic block at a time, 11 // attempting to keep values in registers and reusing registers as appropriate. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #define DEBUG_TYPE "regalloc" 16 #include "llvm/BasicBlock.h" 17 #include "llvm/CodeGen/MachineFunctionPass.h" 18 #include "llvm/CodeGen/MachineInstr.h" 19 #include "llvm/CodeGen/MachineInstrBuilder.h" 20 #include "llvm/CodeGen/MachineFrameInfo.h" 21 #include "llvm/CodeGen/MachineRegisterInfo.h" 22 #include "llvm/CodeGen/Passes.h" 23 #include "llvm/CodeGen/RegAllocRegistry.h" 24 #include "llvm/Target/TargetInstrInfo.h" 25 #include "llvm/Target/TargetMachine.h" 26 #include "llvm/Support/CommandLine.h" 27 #include "llvm/Support/Debug.h" 28 #include "llvm/Support/ErrorHandling.h" 29 #include "llvm/Support/raw_ostream.h" 30 #include "llvm/ADT/DenseMap.h" 31 #include "llvm/ADT/IndexedMap.h" 32 #include "llvm/ADT/SmallSet.h" 33 #include "llvm/ADT/SmallVector.h" 34 #include "llvm/ADT/Statistic.h" 35 #include "llvm/ADT/STLExtras.h" 36 #include <algorithm> 37 using namespace llvm; 38 39 STATISTIC(NumStores, "Number of stores added"); 40 STATISTIC(NumLoads , "Number of loads added"); 41 STATISTIC(NumCopies, "Number of copies coalesced"); 42 43 static RegisterRegAlloc 44 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 45 46 namespace { 47 class RAFast : public MachineFunctionPass { 48 public: 49 static char ID; 50 RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1), 51 isBulkSpilling(false) {} 52 private: 53 const TargetMachine *TM; 54 MachineFunction *MF; 55 MachineRegisterInfo *MRI; 56 const TargetRegisterInfo *TRI; 57 const TargetInstrInfo *TII; 58 59 // Basic block currently being allocated. 60 MachineBasicBlock *MBB; 61 62 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 63 // values are spilled. 64 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 65 66 // Everything we know about a live virtual register. 67 struct LiveReg { 68 MachineInstr *LastUse; // Last instr to use reg. 69 unsigned PhysReg; // Currently held here. 70 unsigned short LastOpNum; // OpNum on LastUse. 71 bool Dirty; // Register needs spill. 72 73 LiveReg(unsigned p=0) : LastUse(0), PhysReg(p), LastOpNum(0), 74 Dirty(false) {} 75 }; 76 77 typedef DenseMap<unsigned, LiveReg> LiveRegMap; 78 typedef LiveRegMap::value_type LiveRegEntry; 79 80 // LiveVirtRegs - This map contains entries for each virtual register 81 // that is currently available in a physical register. 82 LiveRegMap LiveVirtRegs; 83 84 DenseMap<unsigned, MachineInstr *> LiveDbgValueMap; 85 86 // RegState - Track the state of a physical register. 87 enum RegState { 88 // A disabled register is not available for allocation, but an alias may 89 // be in use. A register can only be moved out of the disabled state if 90 // all aliases are disabled. 91 regDisabled, 92 93 // A free register is not currently in use and can be allocated 94 // immediately without checking aliases. 95 regFree, 96 97 // A reserved register has been assigned expolicitly (e.g., setting up a 98 // call parameter), and it remains reserved until it is used. 99 regReserved 100 101 // A register state may also be a virtual register number, indication that 102 // the physical register is currently allocated to a virtual register. In 103 // that case, LiveVirtRegs contains the inverse mapping. 104 }; 105 106 // PhysRegState - One of the RegState enums, or a virtreg. 107 std::vector<unsigned> PhysRegState; 108 109 // UsedInInstr - BitVector of physregs that are used in the current 110 // instruction, and so cannot be allocated. 111 BitVector UsedInInstr; 112 113 // Allocatable - vector of allocatable physical registers. 114 BitVector Allocatable; 115 116 // SkippedInstrs - Descriptors of instructions whose clobber list was 117 // ignored because all registers were spilled. It is still necessary to 118 // mark all the clobbered registers as used by the function. 119 SmallPtrSet<const TargetInstrDesc*, 4> SkippedInstrs; 120 121 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 122 // completely after spilling all live registers. LiveRegMap entries should 123 // not be erased. 124 bool isBulkSpilling; 125 126 enum { 127 spillClean = 1, 128 spillDirty = 100, 129 spillImpossible = ~0u 130 }; 131 public: 132 virtual const char *getPassName() const { 133 return "Fast Register Allocator"; 134 } 135 136 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 137 AU.setPreservesCFG(); 138 AU.addRequiredID(PHIEliminationID); 139 AU.addRequiredID(TwoAddressInstructionPassID); 140 MachineFunctionPass::getAnalysisUsage(AU); 141 } 142 143 private: 144 bool runOnMachineFunction(MachineFunction &Fn); 145 void AllocateBasicBlock(); 146 void handleThroughOperands(MachineInstr *MI, 147 SmallVectorImpl<unsigned> &VirtDead); 148 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 149 bool isLastUseOfLocalReg(MachineOperand&); 150 151 void addKillFlag(const LiveReg&); 152 void killVirtReg(LiveRegMap::iterator); 153 void killVirtReg(unsigned VirtReg); 154 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 155 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 156 157 void usePhysReg(MachineOperand&); 158 void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); 159 unsigned calcSpillCost(unsigned PhysReg) const; 160 void assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg); 161 void allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint); 162 LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, 163 unsigned VirtReg, unsigned Hint); 164 LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, 165 unsigned VirtReg, unsigned Hint); 166 void spillAll(MachineInstr *MI); 167 bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); 168 }; 169 char RAFast::ID = 0; 170 } 171 172 /// getStackSpaceFor - This allocates space for the specified virtual register 173 /// to be held on the stack. 174 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 175 // Find the location Reg would belong... 176 int SS = StackSlotForVirtReg[VirtReg]; 177 if (SS != -1) 178 return SS; // Already has space allocated? 179 180 // Allocate a new stack object for this spill location... 181 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 182 RC->getAlignment()); 183 184 // Assign the slot. 185 StackSlotForVirtReg[VirtReg] = FrameIdx; 186 return FrameIdx; 187 } 188 189 /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 190 /// its virtual register, and it is guaranteed to be a block-local register. 191 /// 192 bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 193 // Check for non-debug uses or defs following MO. 194 // This is the most likely way to fail - fast path it. 195 MachineOperand *Next = &MO; 196 while ((Next = Next->getNextOperandForReg())) 197 if (!Next->isDebug()) 198 return false; 199 200 // If the register has ever been spilled or reloaded, we conservatively assume 201 // it is a global register used in multiple blocks. 202 if (StackSlotForVirtReg[MO.getReg()] != -1) 203 return false; 204 205 // Check that the use/def chain has exactly one operand - MO. 206 return &MRI->reg_nodbg_begin(MO.getReg()).getOperand() == &MO; 207 } 208 209 /// addKillFlag - Set kill flags on last use of a virtual register. 210 void RAFast::addKillFlag(const LiveReg &LR) { 211 if (!LR.LastUse) return; 212 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 213 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 214 if (MO.getReg() == LR.PhysReg) 215 MO.setIsKill(); 216 else 217 LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true); 218 } 219 } 220 221 /// killVirtReg - Mark virtreg as no longer available. 222 void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 223 addKillFlag(LRI->second); 224 const LiveReg &LR = LRI->second; 225 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 226 PhysRegState[LR.PhysReg] = regFree; 227 // Erase from LiveVirtRegs unless we're spilling in bulk. 228 if (!isBulkSpilling) 229 LiveVirtRegs.erase(LRI); 230 } 231 232 /// killVirtReg - Mark virtreg as no longer available. 233 void RAFast::killVirtReg(unsigned VirtReg) { 234 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 235 "killVirtReg needs a virtual register"); 236 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 237 if (LRI != LiveVirtRegs.end()) 238 killVirtReg(LRI); 239 } 240 241 /// spillVirtReg - This method spills the value specified by VirtReg into the 242 /// corresponding stack slot if needed. 243 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 244 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 245 "Spilling a physical register is illegal!"); 246 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 247 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 248 spillVirtReg(MI, LRI); 249 } 250 251 /// spillVirtReg - Do the actual work of spilling. 252 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 253 LiveRegMap::iterator LRI) { 254 LiveReg &LR = LRI->second; 255 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 256 257 if (LR.Dirty) { 258 // If this physreg is used by the instruction, we want to kill it on the 259 // instruction, not on the spill. 260 bool SpillKill = LR.LastUse != MI; 261 LR.Dirty = false; 262 DEBUG(dbgs() << "Spilling %reg" << LRI->first 263 << " in " << TRI->getName(LR.PhysReg)); 264 const TargetRegisterClass *RC = MRI->getRegClass(LRI->first); 265 int FI = getStackSpaceFor(LRI->first, RC); 266 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 267 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 268 ++NumStores; // Update statistics 269 270 // If this register is used by DBG_VALUE then insert new DBG_VALUE to 271 // identify spilled location as the place to find corresponding variable's 272 // value. 273 if (MachineInstr *DBG = LiveDbgValueMap.lookup(LRI->first)) { 274 const MDNode *MDPtr = 275 DBG->getOperand(DBG->getNumOperands()-1).getMetadata(); 276 int64_t Offset = 0; 277 if (DBG->getOperand(1).isImm()) 278 Offset = DBG->getOperand(1).getImm(); 279 DebugLoc DL; 280 if (MI == MBB->end()) { 281 // If MI is at basic block end then use last instruction's location. 282 MachineBasicBlock::iterator EI = MI; 283 DL = (--EI)->getDebugLoc(); 284 } 285 else 286 DL = MI->getDebugLoc(); 287 if (MachineInstr *NewDV = 288 TII->emitFrameIndexDebugValue(*MF, FI, Offset, MDPtr, DL)) { 289 MachineBasicBlock *MBB = DBG->getParent(); 290 MBB->insert(MI, NewDV); 291 DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV); 292 LiveDbgValueMap[LRI->first] = NewDV; 293 } 294 } 295 if (SpillKill) 296 LR.LastUse = 0; // Don't kill register again 297 } 298 killVirtReg(LRI); 299 } 300 301 /// spillAll - Spill all dirty virtregs without killing them. 302 void RAFast::spillAll(MachineInstr *MI) { 303 if (LiveVirtRegs.empty()) return; 304 isBulkSpilling = true; 305 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 306 // of spilling here is deterministic, if arbitrary. 307 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 308 i != e; ++i) 309 spillVirtReg(MI, i); 310 LiveVirtRegs.clear(); 311 isBulkSpilling = false; 312 } 313 314 /// usePhysReg - Handle the direct use of a physical register. 315 /// Check that the register is not used by a virtreg. 316 /// Kill the physreg, marking it free. 317 /// This may add implicit kills to MO->getParent() and invalidate MO. 318 void RAFast::usePhysReg(MachineOperand &MO) { 319 unsigned PhysReg = MO.getReg(); 320 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 321 "Bad usePhysReg operand"); 322 323 switch (PhysRegState[PhysReg]) { 324 case regDisabled: 325 break; 326 case regReserved: 327 PhysRegState[PhysReg] = regFree; 328 // Fall through 329 case regFree: 330 UsedInInstr.set(PhysReg); 331 MO.setIsKill(); 332 return; 333 default: 334 // The physreg was allocated to a virtual register. That means to value we 335 // wanted has been clobbered. 336 llvm_unreachable("Instruction uses an allocated register"); 337 } 338 339 // Maybe a superregister is reserved? 340 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 341 unsigned Alias = *AS; ++AS) { 342 switch (PhysRegState[Alias]) { 343 case regDisabled: 344 break; 345 case regReserved: 346 assert(TRI->isSuperRegister(PhysReg, Alias) && 347 "Instruction is not using a subregister of a reserved register"); 348 // Leave the superregister in the working set. 349 PhysRegState[Alias] = regFree; 350 UsedInInstr.set(Alias); 351 MO.getParent()->addRegisterKilled(Alias, TRI, true); 352 return; 353 case regFree: 354 if (TRI->isSuperRegister(PhysReg, Alias)) { 355 // Leave the superregister in the working set. 356 UsedInInstr.set(Alias); 357 MO.getParent()->addRegisterKilled(Alias, TRI, true); 358 return; 359 } 360 // Some other alias was in the working set - clear it. 361 PhysRegState[Alias] = regDisabled; 362 break; 363 default: 364 llvm_unreachable("Instruction uses an alias of an allocated register"); 365 } 366 } 367 368 // All aliases are disabled, bring register into working set. 369 PhysRegState[PhysReg] = regFree; 370 UsedInInstr.set(PhysReg); 371 MO.setIsKill(); 372 } 373 374 /// definePhysReg - Mark PhysReg as reserved or free after spilling any 375 /// virtregs. This is very similar to defineVirtReg except the physreg is 376 /// reserved instead of allocated. 377 void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, 378 RegState NewState) { 379 UsedInInstr.set(PhysReg); 380 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 381 case regDisabled: 382 break; 383 default: 384 spillVirtReg(MI, VirtReg); 385 // Fall through. 386 case regFree: 387 case regReserved: 388 PhysRegState[PhysReg] = NewState; 389 return; 390 } 391 392 // This is a disabled register, disable all aliases. 393 PhysRegState[PhysReg] = NewState; 394 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 395 unsigned Alias = *AS; ++AS) { 396 UsedInInstr.set(Alias); 397 switch (unsigned VirtReg = PhysRegState[Alias]) { 398 case regDisabled: 399 break; 400 default: 401 spillVirtReg(MI, VirtReg); 402 // Fall through. 403 case regFree: 404 case regReserved: 405 PhysRegState[Alias] = regDisabled; 406 if (TRI->isSuperRegister(PhysReg, Alias)) 407 return; 408 break; 409 } 410 } 411 } 412 413 414 // calcSpillCost - Return the cost of spilling clearing out PhysReg and 415 // aliases so it is free for allocation. 416 // Returns 0 when PhysReg is free or disabled with all aliases disabled - it 417 // can be allocated directly. 418 // Returns spillImpossible when PhysReg or an alias can't be spilled. 419 unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 420 if (UsedInInstr.test(PhysReg)) 421 return spillImpossible; 422 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 423 case regDisabled: 424 break; 425 case regFree: 426 return 0; 427 case regReserved: 428 return spillImpossible; 429 default: 430 return LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 431 } 432 433 // This is a disabled register, add up const of aliases. 434 unsigned Cost = 0; 435 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 436 unsigned Alias = *AS; ++AS) { 437 if (UsedInInstr.test(Alias)) 438 return spillImpossible; 439 switch (unsigned VirtReg = PhysRegState[Alias]) { 440 case regDisabled: 441 break; 442 case regFree: 443 ++Cost; 444 break; 445 case regReserved: 446 return spillImpossible; 447 default: 448 Cost += LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 449 break; 450 } 451 } 452 return Cost; 453 } 454 455 456 /// assignVirtToPhysReg - This method updates local state so that we know 457 /// that PhysReg is the proper container for VirtReg now. The physical 458 /// register must not be used for anything else when this is called. 459 /// 460 void RAFast::assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg) { 461 DEBUG(dbgs() << "Assigning %reg" << LRE.first << " to " 462 << TRI->getName(PhysReg) << "\n"); 463 PhysRegState[PhysReg] = LRE.first; 464 assert(!LRE.second.PhysReg && "Already assigned a physreg"); 465 LRE.second.PhysReg = PhysReg; 466 } 467 468 /// allocVirtReg - Allocate a physical register for VirtReg. 469 void RAFast::allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint) { 470 const unsigned VirtReg = LRE.first; 471 472 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 473 "Can only allocate virtual registers"); 474 475 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 476 477 // Ignore invalid hints. 478 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 479 !RC->contains(Hint) || !Allocatable.test(Hint))) 480 Hint = 0; 481 482 // Take hint when possible. 483 if (Hint) { 484 switch(calcSpillCost(Hint)) { 485 default: 486 definePhysReg(MI, Hint, regFree); 487 // Fall through. 488 case 0: 489 return assignVirtToPhysReg(LRE, Hint); 490 case spillImpossible: 491 break; 492 } 493 } 494 495 TargetRegisterClass::iterator AOB = RC->allocation_order_begin(*MF); 496 TargetRegisterClass::iterator AOE = RC->allocation_order_end(*MF); 497 498 // First try to find a completely free register. 499 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 500 unsigned PhysReg = *I; 501 if (PhysRegState[PhysReg] == regFree && !UsedInInstr.test(PhysReg) && 502 Allocatable.test(PhysReg)) 503 return assignVirtToPhysReg(LRE, PhysReg); 504 } 505 506 DEBUG(dbgs() << "Allocating %reg" << VirtReg << " from " << RC->getName() 507 << "\n"); 508 509 unsigned BestReg = 0, BestCost = spillImpossible; 510 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 511 if (!Allocatable.test(*I)) 512 continue; 513 unsigned Cost = calcSpillCost(*I); 514 // Cost is 0 when all aliases are already disabled. 515 if (Cost == 0) 516 return assignVirtToPhysReg(LRE, *I); 517 if (Cost < BestCost) 518 BestReg = *I, BestCost = Cost; 519 } 520 521 if (BestReg) { 522 definePhysReg(MI, BestReg, regFree); 523 return assignVirtToPhysReg(LRE, BestReg); 524 } 525 526 // Nothing we can do. 527 std::string msg; 528 raw_string_ostream Msg(msg); 529 Msg << "Ran out of registers during register allocation!"; 530 if (MI->isInlineAsm()) { 531 Msg << "\nPlease check your inline asm statement for " 532 << "invalid constraints:\n"; 533 MI->print(Msg, TM); 534 } 535 report_fatal_error(Msg.str()); 536 } 537 538 /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 539 RAFast::LiveRegMap::iterator 540 RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, 541 unsigned VirtReg, unsigned Hint) { 542 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 543 "Not a virtual register"); 544 LiveRegMap::iterator LRI; 545 bool New; 546 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 547 LiveReg &LR = LRI->second; 548 if (New) { 549 // If there is no hint, peek at the only use of this register. 550 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 551 MRI->hasOneNonDBGUse(VirtReg)) { 552 const MachineInstr &UseMI = *MRI->use_nodbg_begin(VirtReg); 553 // It's a copy, use the destination register as a hint. 554 if (UseMI.isCopyLike()) 555 Hint = UseMI.getOperand(0).getReg(); 556 } 557 allocVirtReg(MI, *LRI, Hint); 558 } else if (LR.LastUse) { 559 // Redefining a live register - kill at the last use, unless it is this 560 // instruction defining VirtReg multiple times. 561 if (LR.LastUse != MI || LR.LastUse->getOperand(LR.LastOpNum).isUse()) 562 addKillFlag(LR); 563 } 564 assert(LR.PhysReg && "Register not assigned"); 565 LR.LastUse = MI; 566 LR.LastOpNum = OpNum; 567 LR.Dirty = true; 568 UsedInInstr.set(LR.PhysReg); 569 return LRI; 570 } 571 572 /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 573 RAFast::LiveRegMap::iterator 574 RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, 575 unsigned VirtReg, unsigned Hint) { 576 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 577 "Not a virtual register"); 578 LiveRegMap::iterator LRI; 579 bool New; 580 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 581 LiveReg &LR = LRI->second; 582 MachineOperand &MO = MI->getOperand(OpNum); 583 if (New) { 584 allocVirtReg(MI, *LRI, Hint); 585 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 586 int FrameIndex = getStackSpaceFor(VirtReg, RC); 587 DEBUG(dbgs() << "Reloading %reg" << VirtReg << " into " 588 << TRI->getName(LR.PhysReg) << "\n"); 589 TII->loadRegFromStackSlot(*MBB, MI, LR.PhysReg, FrameIndex, RC, TRI); 590 ++NumLoads; 591 } else if (LR.Dirty) { 592 if (isLastUseOfLocalReg(MO)) { 593 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 594 if (MO.isUse()) 595 MO.setIsKill(); 596 else 597 MO.setIsDead(); 598 } else if (MO.isKill()) { 599 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 600 MO.setIsKill(false); 601 } else if (MO.isDead()) { 602 DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n"); 603 MO.setIsDead(false); 604 } 605 } else if (MO.isKill()) { 606 // We must remove kill flags from uses of reloaded registers because the 607 // register would be killed immediately, and there might be a second use: 608 // %foo = OR %x<kill>, %x 609 // This would cause a second reload of %x into a different register. 610 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 611 MO.setIsKill(false); 612 } else if (MO.isDead()) { 613 DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n"); 614 MO.setIsDead(false); 615 } 616 assert(LR.PhysReg && "Register not assigned"); 617 LR.LastUse = MI; 618 LR.LastOpNum = OpNum; 619 UsedInInstr.set(LR.PhysReg); 620 return LRI; 621 } 622 623 // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering 624 // subregs. This may invalidate any operand pointers. 625 // Return true if the operand kills its register. 626 bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { 627 MachineOperand &MO = MI->getOperand(OpNum); 628 if (!MO.getSubReg()) { 629 MO.setReg(PhysReg); 630 return MO.isKill() || MO.isDead(); 631 } 632 633 // Handle subregister index. 634 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 635 MO.setSubReg(0); 636 637 // A kill flag implies killing the full register. Add corresponding super 638 // register kill. 639 if (MO.isKill()) { 640 MI->addRegisterKilled(PhysReg, TRI, true); 641 return true; 642 } 643 return MO.isDead(); 644 } 645 646 // Handle special instruction operand like early clobbers and tied ops when 647 // there are additional physreg defines. 648 void RAFast::handleThroughOperands(MachineInstr *MI, 649 SmallVectorImpl<unsigned> &VirtDead) { 650 DEBUG(dbgs() << "Scanning for through registers:"); 651 SmallSet<unsigned, 8> ThroughRegs; 652 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 653 MachineOperand &MO = MI->getOperand(i); 654 if (!MO.isReg()) continue; 655 unsigned Reg = MO.getReg(); 656 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 657 if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) || 658 (MO.getSubReg() && MI->readsVirtualRegister(Reg))) { 659 if (ThroughRegs.insert(Reg)) 660 DEBUG(dbgs() << " %reg" << Reg); 661 } 662 } 663 664 // If any physreg defines collide with preallocated through registers, 665 // we must spill and reallocate. 666 DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); 667 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 668 MachineOperand &MO = MI->getOperand(i); 669 if (!MO.isReg() || !MO.isDef()) continue; 670 unsigned Reg = MO.getReg(); 671 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 672 UsedInInstr.set(Reg); 673 if (ThroughRegs.count(PhysRegState[Reg])) 674 definePhysReg(MI, Reg, regFree); 675 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) { 676 UsedInInstr.set(*AS); 677 if (ThroughRegs.count(PhysRegState[*AS])) 678 definePhysReg(MI, *AS, regFree); 679 } 680 } 681 682 SmallVector<unsigned, 8> PartialDefs; 683 DEBUG(dbgs() << "Allocating tied uses and early clobbers.\n"); 684 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 685 MachineOperand &MO = MI->getOperand(i); 686 if (!MO.isReg()) continue; 687 unsigned Reg = MO.getReg(); 688 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 689 if (MO.isUse()) { 690 unsigned DefIdx = 0; 691 if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue; 692 DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand " 693 << DefIdx << ".\n"); 694 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 695 unsigned PhysReg = LRI->second.PhysReg; 696 setPhysReg(MI, i, PhysReg); 697 // Note: we don't update the def operand yet. That would cause the normal 698 // def-scan to attempt spilling. 699 } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) { 700 DEBUG(dbgs() << "Partial redefine: " << MO << "\n"); 701 // Reload the register, but don't assign to the operand just yet. 702 // That would confuse the later phys-def processing pass. 703 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0); 704 PartialDefs.push_back(LRI->second.PhysReg); 705 } else if (MO.isEarlyClobber()) { 706 // Note: defineVirtReg may invalidate MO. 707 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); 708 unsigned PhysReg = LRI->second.PhysReg; 709 if (setPhysReg(MI, i, PhysReg)) 710 VirtDead.push_back(Reg); 711 } 712 } 713 714 // Restore UsedInInstr to a state usable for allocating normal virtual uses. 715 UsedInInstr.reset(); 716 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 717 MachineOperand &MO = MI->getOperand(i); 718 if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; 719 unsigned Reg = MO.getReg(); 720 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 721 UsedInInstr.set(Reg); 722 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) 723 UsedInInstr.set(*AS); 724 } 725 726 // Also mark PartialDefs as used to avoid reallocation. 727 for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i) 728 UsedInInstr.set(PartialDefs[i]); 729 } 730 731 void RAFast::AllocateBasicBlock() { 732 DEBUG(dbgs() << "\nAllocating " << *MBB); 733 734 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 735 assert(LiveVirtRegs.empty() && "Mapping not cleared form last block?"); 736 737 MachineBasicBlock::iterator MII = MBB->begin(); 738 739 // Add live-in registers as live. 740 for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(), 741 E = MBB->livein_end(); I != E; ++I) 742 if (Allocatable.test(*I)) 743 definePhysReg(MII, *I, regReserved); 744 745 SmallVector<unsigned, 8> VirtDead; 746 SmallVector<MachineInstr*, 32> Coalesced; 747 748 // Otherwise, sequentially allocate each instruction in the MBB. 749 while (MII != MBB->end()) { 750 MachineInstr *MI = MII++; 751 const TargetInstrDesc &TID = MI->getDesc(); 752 DEBUG({ 753 dbgs() << "\n>> " << *MI << "Regs:"; 754 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 755 if (PhysRegState[Reg] == regDisabled) continue; 756 dbgs() << " " << TRI->getName(Reg); 757 switch(PhysRegState[Reg]) { 758 case regFree: 759 break; 760 case regReserved: 761 dbgs() << "*"; 762 break; 763 default: 764 dbgs() << "=%reg" << PhysRegState[Reg]; 765 if (LiveVirtRegs[PhysRegState[Reg]].Dirty) 766 dbgs() << "*"; 767 assert(LiveVirtRegs[PhysRegState[Reg]].PhysReg == Reg && 768 "Bad inverse map"); 769 break; 770 } 771 } 772 dbgs() << '\n'; 773 // Check that LiveVirtRegs is the inverse. 774 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 775 e = LiveVirtRegs.end(); i != e; ++i) { 776 assert(TargetRegisterInfo::isVirtualRegister(i->first) && 777 "Bad map key"); 778 assert(TargetRegisterInfo::isPhysicalRegister(i->second.PhysReg) && 779 "Bad map value"); 780 assert(PhysRegState[i->second.PhysReg] == i->first && 781 "Bad inverse map"); 782 } 783 }); 784 785 // Debug values are not allowed to change codegen in any way. 786 if (MI->isDebugValue()) { 787 bool ScanDbgValue = true; 788 while (ScanDbgValue) { 789 ScanDbgValue = false; 790 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 791 MachineOperand &MO = MI->getOperand(i); 792 if (!MO.isReg()) continue; 793 unsigned Reg = MO.getReg(); 794 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 795 LiveDbgValueMap[Reg] = MI; 796 LiveRegMap::iterator LRI = LiveVirtRegs.find(Reg); 797 if (LRI != LiveVirtRegs.end()) 798 setPhysReg(MI, i, LRI->second.PhysReg); 799 else { 800 int SS = StackSlotForVirtReg[Reg]; 801 if (SS == -1) 802 // We can't allocate a physreg for a DebugValue, sorry! 803 MO.setReg(0); 804 else { 805 // Modify DBG_VALUE now that the value is in a spill slot. 806 int64_t Offset = MI->getOperand(1).getImm(); 807 const MDNode *MDPtr = 808 MI->getOperand(MI->getNumOperands()-1).getMetadata(); 809 DebugLoc DL = MI->getDebugLoc(); 810 if (MachineInstr *NewDV = 811 TII->emitFrameIndexDebugValue(*MF, SS, Offset, MDPtr, DL)) { 812 DEBUG(dbgs() << "Modifying debug info due to spill:" << 813 "\t" << *MI); 814 MachineBasicBlock *MBB = MI->getParent(); 815 MBB->insert(MBB->erase(MI), NewDV); 816 // Scan NewDV operands from the beginning. 817 MI = NewDV; 818 ScanDbgValue = true; 819 break; 820 } else 821 // We can't allocate a physreg for a DebugValue; sorry! 822 MO.setReg(0); 823 } 824 } 825 } 826 } 827 // Next instruction. 828 continue; 829 } 830 831 // If this is a copy, we may be able to coalesce. 832 unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0; 833 if (MI->isCopy()) { 834 CopyDst = MI->getOperand(0).getReg(); 835 CopySrc = MI->getOperand(1).getReg(); 836 CopyDstSub = MI->getOperand(0).getSubReg(); 837 CopySrcSub = MI->getOperand(1).getSubReg(); 838 } 839 840 // Track registers used by instruction. 841 UsedInInstr.reset(); 842 843 // First scan. 844 // Mark physreg uses and early clobbers as used. 845 // Find the end of the virtreg operands 846 unsigned VirtOpEnd = 0; 847 bool hasTiedOps = false; 848 bool hasEarlyClobbers = false; 849 bool hasPartialRedefs = false; 850 bool hasPhysDefs = false; 851 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 852 MachineOperand &MO = MI->getOperand(i); 853 if (!MO.isReg()) continue; 854 unsigned Reg = MO.getReg(); 855 if (!Reg) continue; 856 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 857 VirtOpEnd = i+1; 858 if (MO.isUse()) { 859 hasTiedOps = hasTiedOps || 860 TID.getOperandConstraint(i, TOI::TIED_TO) != -1; 861 } else { 862 if (MO.isEarlyClobber()) 863 hasEarlyClobbers = true; 864 if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) 865 hasPartialRedefs = true; 866 } 867 continue; 868 } 869 if (!Allocatable.test(Reg)) continue; 870 if (MO.isUse()) { 871 usePhysReg(MO); 872 } else if (MO.isEarlyClobber()) { 873 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 874 regFree : regReserved); 875 hasEarlyClobbers = true; 876 } else 877 hasPhysDefs = true; 878 } 879 880 // The instruction may have virtual register operands that must be allocated 881 // the same register at use-time and def-time: early clobbers and tied 882 // operands. If there are also physical defs, these registers must avoid 883 // both physical defs and uses, making them more constrained than normal 884 // operands. 885 // Similarly, if there are multiple defs and tied operands, we must make 886 // sure the same register is allocated to uses and defs. 887 // We didn't detect inline asm tied operands above, so just make this extra 888 // pass for all inline asm. 889 if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || 890 (hasTiedOps && (hasPhysDefs || TID.getNumDefs() > 1))) { 891 handleThroughOperands(MI, VirtDead); 892 // Don't attempt coalescing when we have funny stuff going on. 893 CopyDst = 0; 894 // Pretend we have early clobbers so the use operands get marked below. 895 // This is not necessary for the common case of a single tied use. 896 hasEarlyClobbers = true; 897 } 898 899 // Second scan. 900 // Allocate virtreg uses. 901 for (unsigned i = 0; i != VirtOpEnd; ++i) { 902 MachineOperand &MO = MI->getOperand(i); 903 if (!MO.isReg()) continue; 904 unsigned Reg = MO.getReg(); 905 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 906 if (MO.isUse()) { 907 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); 908 unsigned PhysReg = LRI->second.PhysReg; 909 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 910 if (setPhysReg(MI, i, PhysReg)) 911 killVirtReg(LRI); 912 } 913 } 914 915 MRI->addPhysRegsUsed(UsedInInstr); 916 917 // Track registers defined by instruction - early clobbers and tied uses at 918 // this point. 919 UsedInInstr.reset(); 920 if (hasEarlyClobbers) { 921 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 922 MachineOperand &MO = MI->getOperand(i); 923 if (!MO.isReg()) continue; 924 unsigned Reg = MO.getReg(); 925 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 926 // Look for physreg defs and tied uses. 927 if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue; 928 UsedInInstr.set(Reg); 929 for (const unsigned *AS = TRI->getAliasSet(Reg); *AS; ++AS) 930 UsedInInstr.set(*AS); 931 } 932 } 933 934 unsigned DefOpEnd = MI->getNumOperands(); 935 if (TID.isCall()) { 936 // Spill all virtregs before a call. This serves two purposes: 1. If an 937 // exception is thrown, the landing pad is going to expect to find 938 // registers in their spill slots, and 2. we don't have to wade through 939 // all the <imp-def> operands on the call instruction. 940 DefOpEnd = VirtOpEnd; 941 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 942 spillAll(MI); 943 944 // The imp-defs are skipped below, but we still need to mark those 945 // registers as used by the function. 946 SkippedInstrs.insert(&TID); 947 } 948 949 // Third scan. 950 // Allocate defs and collect dead defs. 951 for (unsigned i = 0; i != DefOpEnd; ++i) { 952 MachineOperand &MO = MI->getOperand(i); 953 if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) 954 continue; 955 unsigned Reg = MO.getReg(); 956 957 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 958 if (!Allocatable.test(Reg)) continue; 959 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 960 regFree : regReserved); 961 continue; 962 } 963 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); 964 unsigned PhysReg = LRI->second.PhysReg; 965 if (setPhysReg(MI, i, PhysReg)) { 966 VirtDead.push_back(Reg); 967 CopyDst = 0; // cancel coalescing; 968 } else 969 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 970 } 971 972 // Kill dead defs after the scan to ensure that multiple defs of the same 973 // register are allocated identically. We didn't need to do this for uses 974 // because we are crerating our own kill flags, and they are always at the 975 // last use. 976 for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) 977 killVirtReg(VirtDead[i]); 978 VirtDead.clear(); 979 980 MRI->addPhysRegsUsed(UsedInInstr); 981 982 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 983 DEBUG(dbgs() << "-- coalescing: " << *MI); 984 Coalesced.push_back(MI); 985 } else { 986 DEBUG(dbgs() << "<< " << *MI); 987 } 988 } 989 990 // Spill all physical registers holding virtual registers now. 991 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 992 spillAll(MBB->getFirstTerminator()); 993 994 // Erase all the coalesced copies. We are delaying it until now because 995 // LiveVirtRegs might refer to the instrs. 996 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 997 MBB->erase(Coalesced[i]); 998 NumCopies += Coalesced.size(); 999 1000 DEBUG(MBB->dump()); 1001 } 1002 1003 /// runOnMachineFunction - Register allocate the whole function 1004 /// 1005 bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 1006 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 1007 << "********** Function: " 1008 << ((Value*)Fn.getFunction())->getName() << '\n'); 1009 MF = &Fn; 1010 MRI = &MF->getRegInfo(); 1011 TM = &Fn.getTarget(); 1012 TRI = TM->getRegisterInfo(); 1013 TII = TM->getInstrInfo(); 1014 1015 UsedInInstr.resize(TRI->getNumRegs()); 1016 Allocatable = TRI->getAllocatableSet(*MF); 1017 1018 // initialize the virtual->physical register map to have a 'null' 1019 // mapping for all virtual registers 1020 unsigned LastVirtReg = MRI->getLastVirtReg(); 1021 StackSlotForVirtReg.grow(LastVirtReg); 1022 1023 // Loop over all of the basic blocks, eliminating virtual register references 1024 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 1025 MBBi != MBBe; ++MBBi) { 1026 MBB = &*MBBi; 1027 AllocateBasicBlock(); 1028 } 1029 1030 // Make sure the set of used physregs is closed under subreg operations. 1031 MRI->closePhysRegsUsed(*TRI); 1032 1033 // Add the clobber lists for all the instructions we skipped earlier. 1034 for (SmallPtrSet<const TargetInstrDesc*, 4>::const_iterator 1035 I = SkippedInstrs.begin(), E = SkippedInstrs.end(); I != E; ++I) 1036 if (const unsigned *Defs = (*I)->getImplicitDefs()) 1037 while (*Defs) 1038 MRI->setPhysRegUsed(*Defs++); 1039 1040 SkippedInstrs.clear(); 1041 StackSlotForVirtReg.clear(); 1042 LiveDbgValueMap.clear(); 1043 return true; 1044 } 1045 1046 FunctionPass *llvm::createFastRegisterAllocator() { 1047 return new RAFast(); 1048 } 1049