1 //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This register allocator allocates registers to a basic block at a time, 11 // attempting to keep values in registers and reusing registers as appropriate. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #define DEBUG_TYPE "regalloc" 16 #include "llvm/BasicBlock.h" 17 #include "llvm/CodeGen/MachineFunctionPass.h" 18 #include "llvm/CodeGen/MachineInstr.h" 19 #include "llvm/CodeGen/MachineFrameInfo.h" 20 #include "llvm/CodeGen/MachineRegisterInfo.h" 21 #include "llvm/CodeGen/Passes.h" 22 #include "llvm/CodeGen/RegAllocRegistry.h" 23 #include "llvm/Target/TargetInstrInfo.h" 24 #include "llvm/Target/TargetMachine.h" 25 #include "llvm/Support/CommandLine.h" 26 #include "llvm/Support/Debug.h" 27 #include "llvm/Support/ErrorHandling.h" 28 #include "llvm/Support/raw_ostream.h" 29 #include "llvm/ADT/DenseMap.h" 30 #include "llvm/ADT/IndexedMap.h" 31 #include "llvm/ADT/SmallSet.h" 32 #include "llvm/ADT/SmallVector.h" 33 #include "llvm/ADT/Statistic.h" 34 #include "llvm/ADT/STLExtras.h" 35 #include <algorithm> 36 using namespace llvm; 37 38 STATISTIC(NumStores, "Number of stores added"); 39 STATISTIC(NumLoads , "Number of loads added"); 40 STATISTIC(NumCopies, "Number of copies coalesced"); 41 42 static RegisterRegAlloc 43 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 44 45 namespace { 46 class RAFast : public MachineFunctionPass { 47 public: 48 static char ID; 49 RAFast() : MachineFunctionPass(&ID), StackSlotForVirtReg(-1), 50 isBulkSpilling(false) {} 51 private: 52 const TargetMachine *TM; 53 MachineFunction *MF; 54 MachineRegisterInfo *MRI; 55 const TargetRegisterInfo *TRI; 56 const TargetInstrInfo *TII; 57 58 // Basic block currently being allocated. 59 MachineBasicBlock *MBB; 60 61 // StackSlotForVirtReg - Maps virtual regs to the frame index where these 62 // values are spilled. 63 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 64 65 // Everything we know about a live virtual register. 66 struct LiveReg { 67 MachineInstr *LastUse; // Last instr to use reg. 68 unsigned PhysReg; // Currently held here. 69 unsigned short LastOpNum; // OpNum on LastUse. 70 bool Dirty; // Register needs spill. 71 72 LiveReg(unsigned p=0) : LastUse(0), PhysReg(p), LastOpNum(0), 73 Dirty(false) {} 74 }; 75 76 typedef DenseMap<unsigned, LiveReg> LiveRegMap; 77 typedef LiveRegMap::value_type LiveRegEntry; 78 79 // LiveVirtRegs - This map contains entries for each virtual register 80 // that is currently available in a physical register. 81 LiveRegMap LiveVirtRegs; 82 83 // RegState - Track the state of a physical register. 84 enum RegState { 85 // A disabled register is not available for allocation, but an alias may 86 // be in use. A register can only be moved out of the disabled state if 87 // all aliases are disabled. 88 regDisabled, 89 90 // A free register is not currently in use and can be allocated 91 // immediately without checking aliases. 92 regFree, 93 94 // A reserved register has been assigned expolicitly (e.g., setting up a 95 // call parameter), and it remains reserved until it is used. 96 regReserved 97 98 // A register state may also be a virtual register number, indication that 99 // the physical register is currently allocated to a virtual register. In 100 // that case, LiveVirtRegs contains the inverse mapping. 101 }; 102 103 // PhysRegState - One of the RegState enums, or a virtreg. 104 std::vector<unsigned> PhysRegState; 105 106 // UsedInInstr - BitVector of physregs that are used in the current 107 // instruction, and so cannot be allocated. 108 BitVector UsedInInstr; 109 110 // Allocatable - vector of allocatable physical registers. 111 BitVector Allocatable; 112 113 // SkippedInstrs - Descriptors of instructions whose clobber list was ignored 114 // because all registers were spilled. It is still necessary to mark all the 115 // clobbered registers as used by the function. 116 SmallPtrSet<const TargetInstrDesc*, 4> SkippedInstrs; 117 118 // isBulkSpilling - This flag is set when LiveRegMap will be cleared 119 // completely after spilling all live registers. LiveRegMap entries should 120 // not be erased. 121 bool isBulkSpilling; 122 123 enum { 124 spillClean = 1, 125 spillDirty = 100, 126 spillImpossible = ~0u 127 }; 128 public: 129 virtual const char *getPassName() const { 130 return "Fast Register Allocator"; 131 } 132 133 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 134 AU.setPreservesCFG(); 135 AU.addRequiredID(PHIEliminationID); 136 AU.addRequiredID(TwoAddressInstructionPassID); 137 MachineFunctionPass::getAnalysisUsage(AU); 138 } 139 140 private: 141 bool runOnMachineFunction(MachineFunction &Fn); 142 void AllocateBasicBlock(); 143 int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC); 144 bool isLastUseOfLocalReg(MachineOperand&); 145 146 void addKillFlag(const LiveReg&); 147 void killVirtReg(LiveRegMap::iterator); 148 void killVirtReg(unsigned VirtReg); 149 void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator); 150 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 151 152 void usePhysReg(MachineOperand&); 153 void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState); 154 unsigned calcSpillCost(unsigned PhysReg) const; 155 void assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg); 156 void allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint); 157 LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum, 158 unsigned VirtReg, unsigned Hint); 159 LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum, 160 unsigned VirtReg, unsigned Hint); 161 void spillAll(MachineInstr *MI); 162 bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg); 163 }; 164 char RAFast::ID = 0; 165 } 166 167 /// getStackSpaceFor - This allocates space for the specified virtual register 168 /// to be held on the stack. 169 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) { 170 // Find the location Reg would belong... 171 int SS = StackSlotForVirtReg[VirtReg]; 172 if (SS != -1) 173 return SS; // Already has space allocated? 174 175 // Allocate a new stack object for this spill location... 176 int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(), 177 RC->getAlignment()); 178 179 // Assign the slot. 180 StackSlotForVirtReg[VirtReg] = FrameIdx; 181 return FrameIdx; 182 } 183 184 /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to 185 /// its virtual register, and it is guaranteed to be a block-local register. 186 /// 187 bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) { 188 // Check for non-debug uses or defs following MO. 189 // This is the most likely way to fail - fast path it. 190 MachineOperand *Next = &MO; 191 while ((Next = Next->getNextOperandForReg())) 192 if (!Next->isDebug()) 193 return false; 194 195 // If the register has ever been spilled or reloaded, we conservatively assume 196 // it is a global register used in multiple blocks. 197 if (StackSlotForVirtReg[MO.getReg()] != -1) 198 return false; 199 200 // Check that the use/def chain has exactly one operand - MO. 201 return &MRI->reg_nodbg_begin(MO.getReg()).getOperand() == &MO; 202 } 203 204 /// addKillFlag - Set kill flags on last use of a virtual register. 205 void RAFast::addKillFlag(const LiveReg &LR) { 206 if (!LR.LastUse) return; 207 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 208 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 209 if (MO.getReg() == LR.PhysReg) 210 MO.setIsKill(); 211 else 212 LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true); 213 } 214 } 215 216 /// killVirtReg - Mark virtreg as no longer available. 217 void RAFast::killVirtReg(LiveRegMap::iterator LRI) { 218 addKillFlag(LRI->second); 219 const LiveReg &LR = LRI->second; 220 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 221 PhysRegState[LR.PhysReg] = regFree; 222 // Erase from LiveVirtRegs unless we're spilling in bulk. 223 if (!isBulkSpilling) 224 LiveVirtRegs.erase(LRI); 225 } 226 227 /// killVirtReg - Mark virtreg as no longer available. 228 void RAFast::killVirtReg(unsigned VirtReg) { 229 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 230 "killVirtReg needs a virtual register"); 231 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 232 if (LRI != LiveVirtRegs.end()) 233 killVirtReg(LRI); 234 } 235 236 /// spillVirtReg - This method spills the value specified by VirtReg into the 237 /// corresponding stack slot if needed. If isKill is set, the register is also 238 /// killed. 239 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) { 240 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 241 "Spilling a physical register is illegal!"); 242 LiveRegMap::iterator LRI = LiveVirtRegs.find(VirtReg); 243 assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register"); 244 spillVirtReg(MI, LRI); 245 } 246 247 /// spillVirtReg - Do the actual work of spilling. 248 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, 249 LiveRegMap::iterator LRI) { 250 LiveReg &LR = LRI->second; 251 assert(PhysRegState[LR.PhysReg] == LRI->first && "Broken RegState mapping"); 252 253 if (LR.Dirty) { 254 // If this physreg is used by the instruction, we want to kill it on the 255 // instruction, not on the spill. 256 bool SpillKill = LR.LastUse != MI; 257 LR.Dirty = false; 258 DEBUG(dbgs() << "Spilling %reg" << LRI->first 259 << " in " << TRI->getName(LR.PhysReg)); 260 const TargetRegisterClass *RC = MRI->getRegClass(LRI->first); 261 int FI = getStackSpaceFor(LRI->first, RC); 262 DEBUG(dbgs() << " to stack slot #" << FI << "\n"); 263 TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI); 264 ++NumStores; // Update statistics 265 266 if (SpillKill) 267 LR.LastUse = 0; // Don't kill register again 268 } 269 killVirtReg(LRI); 270 } 271 272 /// spillAll - Spill all dirty virtregs without killing them. 273 void RAFast::spillAll(MachineInstr *MI) { 274 if (LiveVirtRegs.empty()) return; 275 isBulkSpilling = true; 276 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 277 // of spilling here is deterministic, if arbitrary. 278 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end(); 279 i != e; ++i) 280 spillVirtReg(MI, i); 281 LiveVirtRegs.clear(); 282 isBulkSpilling = false; 283 } 284 285 /// usePhysReg - Handle the direct use of a physical register. 286 /// Check that the register is not used by a virtreg. 287 /// Kill the physreg, marking it free. 288 /// This may add implicit kills to MO->getParent() and invalidate MO. 289 void RAFast::usePhysReg(MachineOperand &MO) { 290 unsigned PhysReg = MO.getReg(); 291 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 292 "Bad usePhysReg operand"); 293 294 switch (PhysRegState[PhysReg]) { 295 case regDisabled: 296 break; 297 case regReserved: 298 PhysRegState[PhysReg] = regFree; 299 // Fall through 300 case regFree: 301 UsedInInstr.set(PhysReg); 302 MO.setIsKill(); 303 return; 304 default: 305 // The physreg was allocated to a virtual register. That means to value we 306 // wanted has been clobbered. 307 llvm_unreachable("Instruction uses an allocated register"); 308 } 309 310 // Maybe a superregister is reserved? 311 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 312 unsigned Alias = *AS; ++AS) { 313 switch (PhysRegState[Alias]) { 314 case regDisabled: 315 break; 316 case regReserved: 317 assert(TRI->isSuperRegister(PhysReg, Alias) && 318 "Instruction is not using a subregister of a reserved register"); 319 // Leave the superregister in the working set. 320 PhysRegState[Alias] = regFree; 321 UsedInInstr.set(Alias); 322 MO.getParent()->addRegisterKilled(Alias, TRI, true); 323 return; 324 case regFree: 325 if (TRI->isSuperRegister(PhysReg, Alias)) { 326 // Leave the superregister in the working set. 327 UsedInInstr.set(Alias); 328 MO.getParent()->addRegisterKilled(Alias, TRI, true); 329 return; 330 } 331 // Some other alias was in the working set - clear it. 332 PhysRegState[Alias] = regDisabled; 333 break; 334 default: 335 llvm_unreachable("Instruction uses an alias of an allocated register"); 336 } 337 } 338 339 // All aliases are disabled, bring register into working set. 340 PhysRegState[PhysReg] = regFree; 341 UsedInInstr.set(PhysReg); 342 MO.setIsKill(); 343 } 344 345 /// definePhysReg - Mark PhysReg as reserved or free after spilling any 346 /// virtregs. This is very similar to defineVirtReg except the physreg is 347 /// reserved instead of allocated. 348 void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg, 349 RegState NewState) { 350 UsedInInstr.set(PhysReg); 351 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 352 case regDisabled: 353 break; 354 default: 355 spillVirtReg(MI, VirtReg); 356 // Fall through. 357 case regFree: 358 case regReserved: 359 PhysRegState[PhysReg] = NewState; 360 return; 361 } 362 363 // This is a disabled register, disable all aliases. 364 PhysRegState[PhysReg] = NewState; 365 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 366 unsigned Alias = *AS; ++AS) { 367 UsedInInstr.set(Alias); 368 switch (unsigned VirtReg = PhysRegState[Alias]) { 369 case regDisabled: 370 break; 371 default: 372 spillVirtReg(MI, VirtReg); 373 // Fall through. 374 case regFree: 375 case regReserved: 376 PhysRegState[Alias] = regDisabled; 377 if (TRI->isSuperRegister(PhysReg, Alias)) 378 return; 379 break; 380 } 381 } 382 } 383 384 385 // calcSpillCost - Return the cost of spilling clearing out PhysReg and 386 // aliases so it is free for allocation. 387 // Returns 0 when PhysReg is free or disabled with all aliases disabled - it 388 // can be allocated directly. 389 // Returns spillImpossible when PhysReg or an alias can't be spilled. 390 unsigned RAFast::calcSpillCost(unsigned PhysReg) const { 391 if (UsedInInstr.test(PhysReg)) 392 return spillImpossible; 393 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 394 case regDisabled: 395 break; 396 case regFree: 397 return 0; 398 case regReserved: 399 return spillImpossible; 400 default: 401 return LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 402 } 403 404 // This is a disabled register, add up const of aliases. 405 unsigned Cost = 0; 406 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 407 unsigned Alias = *AS; ++AS) { 408 if (UsedInInstr.test(Alias)) 409 return spillImpossible; 410 switch (unsigned VirtReg = PhysRegState[Alias]) { 411 case regDisabled: 412 break; 413 case regFree: 414 ++Cost; 415 break; 416 case regReserved: 417 return spillImpossible; 418 default: 419 Cost += LiveVirtRegs.lookup(VirtReg).Dirty ? spillDirty : spillClean; 420 break; 421 } 422 } 423 return Cost; 424 } 425 426 427 /// assignVirtToPhysReg - This method updates local state so that we know 428 /// that PhysReg is the proper container for VirtReg now. The physical 429 /// register must not be used for anything else when this is called. 430 /// 431 void RAFast::assignVirtToPhysReg(LiveRegEntry &LRE, unsigned PhysReg) { 432 DEBUG(dbgs() << "Assigning %reg" << LRE.first << " to " 433 << TRI->getName(PhysReg) << "\n"); 434 PhysRegState[PhysReg] = LRE.first; 435 assert(!LRE.second.PhysReg && "Already assigned a physreg"); 436 LRE.second.PhysReg = PhysReg; 437 } 438 439 /// allocVirtReg - Allocate a physical register for VirtReg. 440 void RAFast::allocVirtReg(MachineInstr *MI, LiveRegEntry &LRE, unsigned Hint) { 441 const unsigned VirtReg = LRE.first; 442 443 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 444 "Can only allocate virtual registers"); 445 446 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 447 448 // Ignore invalid hints. 449 if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) || 450 !RC->contains(Hint) || !Allocatable.test(Hint))) 451 Hint = 0; 452 453 // Take hint when possible. 454 if (Hint) { 455 switch(calcSpillCost(Hint)) { 456 default: 457 definePhysReg(MI, Hint, regFree); 458 // Fall through. 459 case 0: 460 return assignVirtToPhysReg(LRE, Hint); 461 case spillImpossible: 462 break; 463 } 464 } 465 466 TargetRegisterClass::iterator AOB = RC->allocation_order_begin(*MF); 467 TargetRegisterClass::iterator AOE = RC->allocation_order_end(*MF); 468 469 // First try to find a completely free register. 470 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 471 unsigned PhysReg = *I; 472 if (PhysRegState[PhysReg] == regFree && !UsedInInstr.test(PhysReg)) 473 return assignVirtToPhysReg(LRE, PhysReg); 474 } 475 476 DEBUG(dbgs() << "Allocating %reg" << VirtReg << " from " << RC->getName() 477 << "\n"); 478 479 unsigned BestReg = 0, BestCost = spillImpossible; 480 for (TargetRegisterClass::iterator I = AOB; I != AOE; ++I) { 481 unsigned Cost = calcSpillCost(*I); 482 // Cost is 0 when all aliases are already disabled. 483 if (Cost == 0) 484 return assignVirtToPhysReg(LRE, *I); 485 if (Cost < BestCost) 486 BestReg = *I, BestCost = Cost; 487 } 488 489 if (BestReg) { 490 definePhysReg(MI, BestReg, regFree); 491 return assignVirtToPhysReg(LRE, BestReg); 492 } 493 494 // Nothing we can do. 495 std::string msg; 496 raw_string_ostream Msg(msg); 497 Msg << "Ran out of registers during register allocation!"; 498 if (MI->isInlineAsm()) { 499 Msg << "\nPlease check your inline asm statement for " 500 << "invalid constraints:\n"; 501 MI->print(Msg, TM); 502 } 503 report_fatal_error(Msg.str()); 504 } 505 506 /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty. 507 RAFast::LiveRegMap::iterator 508 RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum, 509 unsigned VirtReg, unsigned Hint) { 510 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 511 "Not a virtual register"); 512 LiveRegMap::iterator LRI; 513 bool New; 514 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 515 LiveReg &LR = LRI->second; 516 bool PartialRedef = MI->getOperand(OpNum).getSubReg(); 517 if (New) { 518 // If there is no hint, peek at the only use of this register. 519 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 520 MRI->hasOneNonDBGUse(VirtReg)) { 521 unsigned SrcReg, DstReg, SrcSubReg, DstSubReg; 522 // It's a copy, use the destination register as a hint. 523 if (TII->isMoveInstr(*MRI->use_nodbg_begin(VirtReg), 524 SrcReg, DstReg, SrcSubReg, DstSubReg)) 525 Hint = DstReg; 526 } 527 allocVirtReg(MI, *LRI, Hint); 528 // If this is only a partial redefinition, we must reload the other parts. 529 if (PartialRedef && MI->readsVirtualRegister(VirtReg)) { 530 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 531 int FI = getStackSpaceFor(VirtReg, RC); 532 DEBUG(dbgs() << "Reloading for partial redef: %reg" << VirtReg << "\n"); 533 TII->loadRegFromStackSlot(*MBB, MI, LR.PhysReg, FI, RC, TRI); 534 ++NumLoads; 535 } 536 } else if (LR.LastUse && !PartialRedef) { 537 // Redefining a live register - kill at the last use, unless it is this 538 // instruction defining VirtReg multiple times. 539 if (LR.LastUse != MI || LR.LastUse->getOperand(LR.LastOpNum).isUse()) 540 addKillFlag(LR); 541 } 542 assert(LR.PhysReg && "Register not assigned"); 543 LR.LastUse = MI; 544 LR.LastOpNum = OpNum; 545 LR.Dirty = true; 546 UsedInInstr.set(LR.PhysReg); 547 return LRI; 548 } 549 550 /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it. 551 RAFast::LiveRegMap::iterator 552 RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum, 553 unsigned VirtReg, unsigned Hint) { 554 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 555 "Not a virtual register"); 556 LiveRegMap::iterator LRI; 557 bool New; 558 tie(LRI, New) = LiveVirtRegs.insert(std::make_pair(VirtReg, LiveReg())); 559 LiveReg &LR = LRI->second; 560 MachineOperand &MO = MI->getOperand(OpNum); 561 if (New) { 562 allocVirtReg(MI, *LRI, Hint); 563 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg); 564 int FrameIndex = getStackSpaceFor(VirtReg, RC); 565 DEBUG(dbgs() << "Reloading %reg" << VirtReg << " into " 566 << TRI->getName(LR.PhysReg) << "\n"); 567 TII->loadRegFromStackSlot(*MBB, MI, LR.PhysReg, FrameIndex, RC, TRI); 568 ++NumLoads; 569 } else if (LR.Dirty) { 570 if (isLastUseOfLocalReg(MO)) { 571 DEBUG(dbgs() << "Killing last use: " << MO << "\n"); 572 MO.setIsKill(); 573 } else if (MO.isKill()) { 574 DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n"); 575 MO.setIsKill(false); 576 } 577 } else if (MO.isKill()) { 578 // We must remove kill flags from uses of reloaded registers because the 579 // register would be killed immediately, and there might be a second use: 580 // %foo = OR %x<kill>, %x 581 // This would cause a second reload of %x into a different register. 582 DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n"); 583 MO.setIsKill(false); 584 } 585 assert(LR.PhysReg && "Register not assigned"); 586 LR.LastUse = MI; 587 LR.LastOpNum = OpNum; 588 UsedInInstr.set(LR.PhysReg); 589 return LRI; 590 } 591 592 // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering 593 // subregs. This may invalidate any operand pointers. 594 // Return true if the operand kills its register. 595 bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) { 596 MachineOperand &MO = MI->getOperand(OpNum); 597 if (!MO.getSubReg()) { 598 MO.setReg(PhysReg); 599 return MO.isKill() || MO.isDead(); 600 } 601 602 // Handle subregister index. 603 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 604 MO.setSubReg(0); 605 606 // A kill flag implies killing the full register. Add corresponding super 607 // register kill. 608 if (MO.isKill()) { 609 MI->addRegisterKilled(PhysReg, TRI, true); 610 return true; 611 } 612 return MO.isDead(); 613 } 614 615 void RAFast::AllocateBasicBlock() { 616 DEBUG(dbgs() << "\nAllocating " << *MBB); 617 618 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 619 assert(LiveVirtRegs.empty() && "Mapping not cleared form last block?"); 620 621 MachineBasicBlock::iterator MII = MBB->begin(); 622 623 // Add live-in registers as live. 624 for (MachineBasicBlock::livein_iterator I = MBB->livein_begin(), 625 E = MBB->livein_end(); I != E; ++I) 626 definePhysReg(MII, *I, regReserved); 627 628 SmallVector<unsigned, 8> PhysECs, VirtDead; 629 SmallVector<MachineInstr*, 32> Coalesced; 630 631 // Otherwise, sequentially allocate each instruction in the MBB. 632 while (MII != MBB->end()) { 633 MachineInstr *MI = MII++; 634 const TargetInstrDesc &TID = MI->getDesc(); 635 DEBUG({ 636 dbgs() << "\n>> " << *MI << "Regs:"; 637 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 638 if (PhysRegState[Reg] == regDisabled) continue; 639 dbgs() << " " << TRI->getName(Reg); 640 switch(PhysRegState[Reg]) { 641 case regFree: 642 break; 643 case regReserved: 644 dbgs() << "*"; 645 break; 646 default: 647 dbgs() << "=%reg" << PhysRegState[Reg]; 648 if (LiveVirtRegs[PhysRegState[Reg]].Dirty) 649 dbgs() << "*"; 650 assert(LiveVirtRegs[PhysRegState[Reg]].PhysReg == Reg && 651 "Bad inverse map"); 652 break; 653 } 654 } 655 dbgs() << '\n'; 656 // Check that LiveVirtRegs is the inverse. 657 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 658 e = LiveVirtRegs.end(); i != e; ++i) { 659 assert(TargetRegisterInfo::isVirtualRegister(i->first) && 660 "Bad map key"); 661 assert(TargetRegisterInfo::isPhysicalRegister(i->second.PhysReg) && 662 "Bad map value"); 663 assert(PhysRegState[i->second.PhysReg] == i->first && 664 "Bad inverse map"); 665 } 666 }); 667 668 // Debug values are not allowed to change codegen in any way. 669 if (MI->isDebugValue()) { 670 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 671 MachineOperand &MO = MI->getOperand(i); 672 if (!MO.isReg()) continue; 673 unsigned Reg = MO.getReg(); 674 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 675 LiveRegMap::iterator LRI = LiveVirtRegs.find(Reg); 676 if (LRI != LiveVirtRegs.end()) 677 setPhysReg(MI, i, LRI->second.PhysReg); 678 else 679 MO.setReg(0); // We can't allocate a physreg for a DebugValue, sorry! 680 } 681 // Next instruction. 682 continue; 683 } 684 685 // If this is a copy, we may be able to coalesce. 686 unsigned CopySrc, CopyDst, CopySrcSub, CopyDstSub; 687 if (!TII->isMoveInstr(*MI, CopySrc, CopyDst, CopySrcSub, CopyDstSub)) 688 CopySrc = CopyDst = 0; 689 690 // Track registers used by instruction. 691 UsedInInstr.reset(); 692 PhysECs.clear(); 693 694 // First scan. 695 // Mark physreg uses and early clobbers as used. 696 // Find the end of the virtreg operands 697 unsigned VirtOpEnd = 0; 698 for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) { 699 MachineOperand &MO = MI->getOperand(i); 700 if (!MO.isReg()) continue; 701 unsigned Reg = MO.getReg(); 702 if (!Reg) continue; 703 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 704 VirtOpEnd = i+1; 705 continue; 706 } 707 if (!Allocatable.test(Reg)) continue; 708 if (MO.isUse()) { 709 usePhysReg(MO); 710 } else if (MO.isEarlyClobber()) { 711 definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved); 712 PhysECs.push_back(Reg); 713 } 714 } 715 716 // Second scan. 717 // Allocate virtreg uses and early clobbers. 718 // Collect VirtKills 719 for (unsigned i = 0; i != VirtOpEnd; ++i) { 720 MachineOperand &MO = MI->getOperand(i); 721 if (!MO.isReg()) continue; 722 unsigned Reg = MO.getReg(); 723 if (!Reg || TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 724 if (MO.isUse()) { 725 LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst); 726 unsigned PhysReg = LRI->second.PhysReg; 727 CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0; 728 if (setPhysReg(MI, i, PhysReg)) 729 killVirtReg(LRI); 730 } else if (MO.isEarlyClobber()) { 731 // Note: defineVirtReg may invalidate MO. 732 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0); 733 unsigned PhysReg = LRI->second.PhysReg; 734 setPhysReg(MI, i, PhysReg); 735 PhysECs.push_back(PhysReg); 736 } 737 } 738 739 MRI->addPhysRegsUsed(UsedInInstr); 740 741 // Track registers defined by instruction - early clobbers at this point. 742 UsedInInstr.reset(); 743 for (unsigned i = 0, e = PhysECs.size(); i != e; ++i) { 744 unsigned PhysReg = PhysECs[i]; 745 UsedInInstr.set(PhysReg); 746 for (const unsigned *AS = TRI->getAliasSet(PhysReg); 747 unsigned Alias = *AS; ++AS) 748 UsedInInstr.set(Alias); 749 } 750 751 unsigned DefOpEnd = MI->getNumOperands(); 752 if (TID.isCall()) { 753 // Spill all virtregs before a call. This serves two purposes: 1. If an 754 // exception is thrown, the landing pad is going to expect to find registers 755 // in their spill slots, and 2. we don't have to wade through all the 756 // <imp-def> operands on the call instruction. 757 DefOpEnd = VirtOpEnd; 758 DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 759 spillAll(MI); 760 761 // The imp-defs are skipped below, but we still need to mark those 762 // registers as used by the function. 763 SkippedInstrs.insert(&TID); 764 } 765 766 // Third scan. 767 // Allocate defs and collect dead defs. 768 for (unsigned i = 0; i != DefOpEnd; ++i) { 769 MachineOperand &MO = MI->getOperand(i); 770 if (!MO.isReg() || !MO.isDef() || !MO.getReg()) continue; 771 unsigned Reg = MO.getReg(); 772 773 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 774 if (!Allocatable.test(Reg)) continue; 775 definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ? 776 regFree : regReserved); 777 continue; 778 } 779 LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc); 780 unsigned PhysReg = LRI->second.PhysReg; 781 if (setPhysReg(MI, i, PhysReg)) { 782 VirtDead.push_back(Reg); 783 CopyDst = 0; // cancel coalescing; 784 } else 785 CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0; 786 } 787 788 // Kill dead defs after the scan to ensure that multiple defs of the same 789 // register are allocated identically. We didn't need to do this for uses 790 // because we are crerating our own kill flags, and they are always at the 791 // last use. 792 for (unsigned i = 0, e = VirtDead.size(); i != e; ++i) 793 killVirtReg(VirtDead[i]); 794 VirtDead.clear(); 795 796 MRI->addPhysRegsUsed(UsedInInstr); 797 798 if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) { 799 DEBUG(dbgs() << "-- coalescing: " << *MI); 800 Coalesced.push_back(MI); 801 } else { 802 DEBUG(dbgs() << "<< " << *MI); 803 } 804 } 805 806 // Spill all physical registers holding virtual registers now. 807 DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 808 spillAll(MBB->getFirstTerminator()); 809 810 // Erase all the coalesced copies. We are delaying it until now because 811 // LiveVirtRegs might refer to the instrs. 812 for (unsigned i = 0, e = Coalesced.size(); i != e; ++i) 813 MBB->erase(Coalesced[i]); 814 NumCopies += Coalesced.size(); 815 816 DEBUG(MBB->dump()); 817 } 818 819 /// runOnMachineFunction - Register allocate the whole function 820 /// 821 bool RAFast::runOnMachineFunction(MachineFunction &Fn) { 822 DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 823 << "********** Function: " 824 << ((Value*)Fn.getFunction())->getName() << '\n'); 825 MF = &Fn; 826 MRI = &MF->getRegInfo(); 827 TM = &Fn.getTarget(); 828 TRI = TM->getRegisterInfo(); 829 TII = TM->getInstrInfo(); 830 831 UsedInInstr.resize(TRI->getNumRegs()); 832 Allocatable = TRI->getAllocatableSet(*MF); 833 834 // initialize the virtual->physical register map to have a 'null' 835 // mapping for all virtual registers 836 unsigned LastVirtReg = MRI->getLastVirtReg(); 837 StackSlotForVirtReg.grow(LastVirtReg); 838 839 // Loop over all of the basic blocks, eliminating virtual register references 840 for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end(); 841 MBBi != MBBe; ++MBBi) { 842 MBB = &*MBBi; 843 AllocateBasicBlock(); 844 } 845 846 // Make sure the set of used physregs is closed under subreg operations. 847 MRI->closePhysRegsUsed(*TRI); 848 849 // Add the clobber lists for all the instructions we skipped earlier. 850 for (SmallPtrSet<const TargetInstrDesc*, 4>::const_iterator 851 I = SkippedInstrs.begin(), E = SkippedInstrs.end(); I != E; ++I) 852 if (const unsigned *Defs = (*I)->getImplicitDefs()) 853 while (*Defs) 854 MRI->setPhysRegUsed(*Defs++); 855 856 SkippedInstrs.clear(); 857 StackSlotForVirtReg.clear(); 858 return true; 859 } 860 861 FunctionPass *llvm::createFastRegisterAllocator() { 862 return new RAFast(); 863 } 864