1 //===- RegAllocFast.cpp - A fast register allocator for debug code --------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 /// \file This register allocator allocates registers to a basic block at a 10 /// time, attempting to keep values in registers and reusing registers as 11 /// appropriate. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/ADT/ArrayRef.h" 16 #include "llvm/ADT/DenseMap.h" 17 #include "llvm/ADT/IndexedMap.h" 18 #include "llvm/ADT/SmallSet.h" 19 #include "llvm/ADT/SmallVector.h" 20 #include "llvm/ADT/SparseSet.h" 21 #include "llvm/ADT/Statistic.h" 22 #include "llvm/CodeGen/MachineBasicBlock.h" 23 #include "llvm/CodeGen/MachineFrameInfo.h" 24 #include "llvm/CodeGen/MachineFunction.h" 25 #include "llvm/CodeGen/MachineFunctionPass.h" 26 #include "llvm/CodeGen/MachineInstr.h" 27 #include "llvm/CodeGen/MachineInstrBuilder.h" 28 #include "llvm/CodeGen/MachineOperand.h" 29 #include "llvm/CodeGen/MachineRegisterInfo.h" 30 #include "llvm/CodeGen/RegAllocRegistry.h" 31 #include "llvm/CodeGen/RegisterClassInfo.h" 32 #include "llvm/CodeGen/TargetInstrInfo.h" 33 #include "llvm/CodeGen/TargetOpcodes.h" 34 #include "llvm/CodeGen/TargetRegisterInfo.h" 35 #include "llvm/CodeGen/TargetSubtargetInfo.h" 36 #include "llvm/IR/DebugLoc.h" 37 #include "llvm/IR/Metadata.h" 38 #include "llvm/MC/MCInstrDesc.h" 39 #include "llvm/MC/MCRegisterInfo.h" 40 #include "llvm/Pass.h" 41 #include "llvm/Support/Casting.h" 42 #include "llvm/Support/Compiler.h" 43 #include "llvm/Support/Debug.h" 44 #include "llvm/Support/ErrorHandling.h" 45 #include "llvm/Support/raw_ostream.h" 46 #include <cassert> 47 #include <tuple> 48 #include <vector> 49 50 using namespace llvm; 51 52 #define DEBUG_TYPE "regalloc" 53 54 STATISTIC(NumStores, "Number of stores added"); 55 STATISTIC(NumLoads , "Number of loads added"); 56 STATISTIC(NumCoalesced, "Number of copies coalesced"); 57 58 static RegisterRegAlloc 59 fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator); 60 61 namespace { 62 63 class RegAllocFast : public MachineFunctionPass { 64 public: 65 static char ID; 66 67 RegAllocFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1) {} 68 69 private: 70 MachineFrameInfo *MFI; 71 MachineRegisterInfo *MRI; 72 const TargetRegisterInfo *TRI; 73 const TargetInstrInfo *TII; 74 RegisterClassInfo RegClassInfo; 75 76 /// Basic block currently being allocated. 77 MachineBasicBlock *MBB; 78 79 /// Maps virtual regs to the frame index where these values are spilled. 80 IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg; 81 82 /// Everything we know about a live virtual register. 83 struct LiveReg { 84 MachineInstr *LastUse = nullptr; ///< Last instr to use reg. 85 unsigned VirtReg; ///< Virtual register number. 86 MCPhysReg PhysReg = 0; ///< Currently held here. 87 unsigned short LastOpNum = 0; ///< OpNum on LastUse. 88 bool Dirty = false; ///< Register needs spill. 89 90 explicit LiveReg(unsigned VirtReg) : VirtReg(VirtReg) {} 91 92 unsigned getSparseSetIndex() const { 93 return TargetRegisterInfo::virtReg2Index(VirtReg); 94 } 95 }; 96 97 using LiveRegMap = SparseSet<LiveReg>; 98 /// This map contains entries for each virtual register that is currently 99 /// available in a physical register. 100 LiveRegMap LiveVirtRegs; 101 102 DenseMap<unsigned, SmallVector<MachineInstr *, 2>> LiveDbgValueMap; 103 104 /// State of a physical register. 105 enum RegState { 106 /// A disabled register is not available for allocation, but an alias may 107 /// be in use. A register can only be moved out of the disabled state if 108 /// all aliases are disabled. 109 regDisabled, 110 111 /// A free register is not currently in use and can be allocated 112 /// immediately without checking aliases. 113 regFree, 114 115 /// A reserved register has been assigned explicitly (e.g., setting up a 116 /// call parameter), and it remains reserved until it is used. 117 regReserved 118 119 /// A register state may also be a virtual register number, indication 120 /// that the physical register is currently allocated to a virtual 121 /// register. In that case, LiveVirtRegs contains the inverse mapping. 122 }; 123 124 /// Maps each physical register to a RegState enum or a virtual register. 125 std::vector<unsigned> PhysRegState; 126 127 SmallVector<unsigned, 16> VirtDead; 128 SmallVector<MachineInstr *, 32> Coalesced; 129 130 using RegUnitSet = SparseSet<uint16_t, identity<uint16_t>>; 131 /// Set of register units that are used in the current instruction, and so 132 /// cannot be allocated. 133 RegUnitSet UsedInInstr; 134 135 void setPhysRegState(MCPhysReg PhysReg, unsigned NewState); 136 137 /// Mark a physreg as used in this instruction. 138 void markRegUsedInInstr(MCPhysReg PhysReg) { 139 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) 140 UsedInInstr.insert(*Units); 141 } 142 143 /// Check if a physreg or any of its aliases are used in this instruction. 144 bool isRegUsedInInstr(MCPhysReg PhysReg) const { 145 for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units) 146 if (UsedInInstr.count(*Units)) 147 return true; 148 return false; 149 } 150 151 enum : unsigned { 152 spillClean = 50, 153 spillDirty = 100, 154 spillImpossible = ~0u 155 }; 156 157 public: 158 StringRef getPassName() const override { return "Fast Register Allocator"; } 159 160 void getAnalysisUsage(AnalysisUsage &AU) const override { 161 AU.setPreservesCFG(); 162 MachineFunctionPass::getAnalysisUsage(AU); 163 } 164 165 MachineFunctionProperties getRequiredProperties() const override { 166 return MachineFunctionProperties().set( 167 MachineFunctionProperties::Property::NoPHIs); 168 } 169 170 MachineFunctionProperties getSetProperties() const override { 171 return MachineFunctionProperties().set( 172 MachineFunctionProperties::Property::NoVRegs); 173 } 174 175 private: 176 bool runOnMachineFunction(MachineFunction &MF) override; 177 178 void allocateBasicBlock(MachineBasicBlock &MBB); 179 void allocateInstruction(MachineInstr &MI); 180 void handleDebugValue(MachineInstr &MI); 181 void handleThroughOperands(MachineInstr &MI, 182 SmallVectorImpl<unsigned> &VirtDead); 183 bool isLastUseOfLocalReg(const MachineOperand &MO) const; 184 185 void addKillFlag(const LiveReg &LRI); 186 void killVirtReg(LiveReg &LR); 187 void killVirtReg(unsigned VirtReg); 188 void spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR); 189 void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg); 190 191 void usePhysReg(MachineOperand &MO); 192 void definePhysReg(MachineBasicBlock::iterator MI, MCPhysReg PhysReg, 193 RegState NewState); 194 unsigned calcSpillCost(MCPhysReg PhysReg) const; 195 void assignVirtToPhysReg(LiveReg &, MCPhysReg PhysReg); 196 197 LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) { 198 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); 199 } 200 201 LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const { 202 return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg)); 203 } 204 205 void allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint); 206 void allocVirtRegUndef(MachineOperand &MO); 207 MCPhysReg defineVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg, 208 unsigned Hint); 209 LiveReg &reloadVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg, 210 unsigned Hint); 211 void spillAll(MachineBasicBlock::iterator MI); 212 bool setPhysReg(MachineInstr &MI, MachineOperand &MO, MCPhysReg PhysReg); 213 214 int getStackSpaceFor(unsigned VirtReg); 215 void spill(MachineBasicBlock::iterator Before, unsigned VirtReg, 216 MCPhysReg AssignedReg, bool Kill); 217 void reload(MachineBasicBlock::iterator Before, unsigned VirtReg, 218 MCPhysReg PhysReg); 219 220 void dumpState(); 221 }; 222 223 } // end anonymous namespace 224 225 char RegAllocFast::ID = 0; 226 227 INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false, 228 false) 229 230 void RegAllocFast::setPhysRegState(MCPhysReg PhysReg, unsigned NewState) { 231 PhysRegState[PhysReg] = NewState; 232 } 233 234 /// This allocates space for the specified virtual register to be held on the 235 /// stack. 236 int RegAllocFast::getStackSpaceFor(unsigned VirtReg) { 237 // Find the location Reg would belong... 238 int SS = StackSlotForVirtReg[VirtReg]; 239 // Already has space allocated? 240 if (SS != -1) 241 return SS; 242 243 // Allocate a new stack object for this spill location... 244 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg); 245 unsigned Size = TRI->getSpillSize(RC); 246 unsigned Align = TRI->getSpillAlignment(RC); 247 int FrameIdx = MFI->CreateSpillStackObject(Size, Align); 248 249 // Assign the slot. 250 StackSlotForVirtReg[VirtReg] = FrameIdx; 251 return FrameIdx; 252 } 253 254 /// Insert spill instruction for \p AssignedReg before \p Before. Update 255 /// DBG_VALUEs with \p VirtReg operands with the stack slot. 256 void RegAllocFast::spill(MachineBasicBlock::iterator Before, unsigned VirtReg, 257 MCPhysReg AssignedReg, bool Kill) { 258 LLVM_DEBUG(dbgs() << "Spilling " << printReg(VirtReg, TRI) 259 << " in " << printReg(AssignedReg, TRI)); 260 int FI = getStackSpaceFor(VirtReg); 261 LLVM_DEBUG(dbgs() << " to stack slot #" << FI << '\n'); 262 263 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg); 264 TII->storeRegToStackSlot(*MBB, Before, AssignedReg, Kill, FI, &RC, TRI); 265 ++NumStores; 266 267 // If this register is used by DBG_VALUE then insert new DBG_VALUE to 268 // identify spilled location as the place to find corresponding variable's 269 // value. 270 SmallVectorImpl<MachineInstr *> &LRIDbgValues = LiveDbgValueMap[VirtReg]; 271 for (MachineInstr *DBG : LRIDbgValues) { 272 MachineInstr *NewDV = buildDbgValueForSpill(*MBB, Before, *DBG, FI); 273 assert(NewDV->getParent() == MBB && "dangling parent pointer"); 274 (void)NewDV; 275 LLVM_DEBUG(dbgs() << "Inserting debug info due to spill:\n" << *NewDV); 276 } 277 // Now this register is spilled there is should not be any DBG_VALUE 278 // pointing to this register because they are all pointing to spilled value 279 // now. 280 LRIDbgValues.clear(); 281 } 282 283 /// Insert reload instruction for \p PhysReg before \p Before. 284 void RegAllocFast::reload(MachineBasicBlock::iterator Before, unsigned VirtReg, 285 MCPhysReg PhysReg) { 286 LLVM_DEBUG(dbgs() << "Reloading " << printReg(VirtReg, TRI) << " into " 287 << printReg(PhysReg, TRI) << '\n'); 288 int FI = getStackSpaceFor(VirtReg); 289 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg); 290 TII->loadRegFromStackSlot(*MBB, Before, PhysReg, FI, &RC, TRI); 291 ++NumLoads; 292 } 293 294 /// Return true if MO is the only remaining reference to its virtual register, 295 /// and it is guaranteed to be a block-local register. 296 bool RegAllocFast::isLastUseOfLocalReg(const MachineOperand &MO) const { 297 // If the register has ever been spilled or reloaded, we conservatively assume 298 // it is a global register used in multiple blocks. 299 if (StackSlotForVirtReg[MO.getReg()] != -1) 300 return false; 301 302 // Check that the use/def chain has exactly one operand - MO. 303 MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg()); 304 if (&*I != &MO) 305 return false; 306 return ++I == MRI->reg_nodbg_end(); 307 } 308 309 /// Set kill flags on last use of a virtual register. 310 void RegAllocFast::addKillFlag(const LiveReg &LR) { 311 if (!LR.LastUse) return; 312 MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum); 313 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { 314 if (MO.getReg() == LR.PhysReg) 315 MO.setIsKill(); 316 // else, don't do anything we are problably redefining a 317 // subreg of this register and given we don't track which 318 // lanes are actually dead, we cannot insert a kill flag here. 319 // Otherwise we may end up in a situation like this: 320 // ... = (MO) physreg:sub1, implicit killed physreg 321 // ... <== Here we would allow later pass to reuse physreg:sub1 322 // which is potentially wrong. 323 // LR:sub0 = ... 324 // ... = LR.sub1 <== This is going to use physreg:sub1 325 } 326 } 327 328 /// Mark virtreg as no longer available. 329 void RegAllocFast::killVirtReg(LiveReg &LR) { 330 addKillFlag(LR); 331 assert(PhysRegState[LR.PhysReg] == LR.VirtReg && 332 "Broken RegState mapping"); 333 setPhysRegState(LR.PhysReg, regFree); 334 LR.PhysReg = 0; 335 } 336 337 /// Mark virtreg as no longer available. 338 void RegAllocFast::killVirtReg(unsigned VirtReg) { 339 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 340 "killVirtReg needs a virtual register"); 341 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 342 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) 343 killVirtReg(*LRI); 344 } 345 346 /// This method spills the value specified by VirtReg into the corresponding 347 /// stack slot if needed. 348 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI, 349 unsigned VirtReg) { 350 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 351 "Spilling a physical register is illegal!"); 352 LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg); 353 assert(LRI != LiveVirtRegs.end() && LRI->PhysReg && 354 "Spilling unmapped virtual register"); 355 spillVirtReg(MI, *LRI); 356 } 357 358 /// Do the actual work of spilling. 359 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR) { 360 assert(PhysRegState[LR.PhysReg] == LR.VirtReg && "Broken RegState mapping"); 361 362 if (LR.Dirty) { 363 // If this physreg is used by the instruction, we want to kill it on the 364 // instruction, not on the spill. 365 bool SpillKill = MachineBasicBlock::iterator(LR.LastUse) != MI; 366 LR.Dirty = false; 367 368 spill(MI, LR.VirtReg, LR.PhysReg, SpillKill); 369 370 if (SpillKill) 371 LR.LastUse = nullptr; // Don't kill register again 372 } 373 killVirtReg(LR); 374 } 375 376 /// Spill all dirty virtregs without killing them. 377 void RegAllocFast::spillAll(MachineBasicBlock::iterator MI) { 378 if (LiveVirtRegs.empty()) 379 return; 380 // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order 381 // of spilling here is deterministic, if arbitrary. 382 for (LiveReg &LR : LiveVirtRegs) { 383 if (!LR.PhysReg) 384 continue; 385 spillVirtReg(MI, LR); 386 } 387 LiveVirtRegs.clear(); 388 } 389 390 /// Handle the direct use of a physical register. Check that the register is 391 /// not used by a virtreg. Kill the physreg, marking it free. This may add 392 /// implicit kills to MO->getParent() and invalidate MO. 393 void RegAllocFast::usePhysReg(MachineOperand &MO) { 394 // Ignore undef uses. 395 if (MO.isUndef()) 396 return; 397 398 unsigned PhysReg = MO.getReg(); 399 assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) && 400 "Bad usePhysReg operand"); 401 402 markRegUsedInInstr(PhysReg); 403 switch (PhysRegState[PhysReg]) { 404 case regDisabled: 405 break; 406 case regReserved: 407 PhysRegState[PhysReg] = regFree; 408 LLVM_FALLTHROUGH; 409 case regFree: 410 MO.setIsKill(); 411 return; 412 default: 413 // The physreg was allocated to a virtual register. That means the value we 414 // wanted has been clobbered. 415 llvm_unreachable("Instruction uses an allocated register"); 416 } 417 418 // Maybe a superregister is reserved? 419 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 420 MCPhysReg Alias = *AI; 421 switch (PhysRegState[Alias]) { 422 case regDisabled: 423 break; 424 case regReserved: 425 // Either PhysReg is a subregister of Alias and we mark the 426 // whole register as free, or PhysReg is the superregister of 427 // Alias and we mark all the aliases as disabled before freeing 428 // PhysReg. 429 // In the latter case, since PhysReg was disabled, this means that 430 // its value is defined only by physical sub-registers. This check 431 // is performed by the assert of the default case in this loop. 432 // Note: The value of the superregister may only be partial 433 // defined, that is why regDisabled is a valid state for aliases. 434 assert((TRI->isSuperRegister(PhysReg, Alias) || 435 TRI->isSuperRegister(Alias, PhysReg)) && 436 "Instruction is not using a subregister of a reserved register"); 437 LLVM_FALLTHROUGH; 438 case regFree: 439 if (TRI->isSuperRegister(PhysReg, Alias)) { 440 // Leave the superregister in the working set. 441 setPhysRegState(Alias, regFree); 442 MO.getParent()->addRegisterKilled(Alias, TRI, true); 443 return; 444 } 445 // Some other alias was in the working set - clear it. 446 setPhysRegState(Alias, regDisabled); 447 break; 448 default: 449 llvm_unreachable("Instruction uses an alias of an allocated register"); 450 } 451 } 452 453 // All aliases are disabled, bring register into working set. 454 setPhysRegState(PhysReg, regFree); 455 MO.setIsKill(); 456 } 457 458 /// Mark PhysReg as reserved or free after spilling any virtregs. This is very 459 /// similar to defineVirtReg except the physreg is reserved instead of 460 /// allocated. 461 void RegAllocFast::definePhysReg(MachineBasicBlock::iterator MI, 462 MCPhysReg PhysReg, RegState NewState) { 463 markRegUsedInInstr(PhysReg); 464 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 465 case regDisabled: 466 break; 467 default: 468 spillVirtReg(MI, VirtReg); 469 LLVM_FALLTHROUGH; 470 case regFree: 471 case regReserved: 472 setPhysRegState(PhysReg, NewState); 473 return; 474 } 475 476 // This is a disabled register, disable all aliases. 477 setPhysRegState(PhysReg, NewState); 478 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 479 MCPhysReg Alias = *AI; 480 switch (unsigned VirtReg = PhysRegState[Alias]) { 481 case regDisabled: 482 break; 483 default: 484 spillVirtReg(MI, VirtReg); 485 LLVM_FALLTHROUGH; 486 case regFree: 487 case regReserved: 488 setPhysRegState(Alias, regDisabled); 489 if (TRI->isSuperRegister(PhysReg, Alias)) 490 return; 491 break; 492 } 493 } 494 } 495 496 /// Return the cost of spilling clearing out PhysReg and aliases so it is free 497 /// for allocation. Returns 0 when PhysReg is free or disabled with all aliases 498 /// disabled - it can be allocated directly. 499 /// \returns spillImpossible when PhysReg or an alias can't be spilled. 500 unsigned RegAllocFast::calcSpillCost(MCPhysReg PhysReg) const { 501 if (isRegUsedInInstr(PhysReg)) { 502 LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI) 503 << " is already used in instr.\n"); 504 return spillImpossible; 505 } 506 switch (unsigned VirtReg = PhysRegState[PhysReg]) { 507 case regDisabled: 508 break; 509 case regFree: 510 return 0; 511 case regReserved: 512 LLVM_DEBUG(dbgs() << printReg(VirtReg, TRI) << " corresponding " 513 << printReg(PhysReg, TRI) << " is reserved already.\n"); 514 return spillImpossible; 515 default: { 516 LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg); 517 assert(LRI != LiveVirtRegs.end() && LRI->PhysReg && 518 "Missing VirtReg entry"); 519 return LRI->Dirty ? spillDirty : spillClean; 520 } 521 } 522 523 // This is a disabled register, add up cost of aliases. 524 LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI) << " is disabled.\n"); 525 unsigned Cost = 0; 526 for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) { 527 MCPhysReg Alias = *AI; 528 switch (unsigned VirtReg = PhysRegState[Alias]) { 529 case regDisabled: 530 break; 531 case regFree: 532 ++Cost; 533 break; 534 case regReserved: 535 return spillImpossible; 536 default: { 537 LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg); 538 assert(LRI != LiveVirtRegs.end() && LRI->PhysReg && 539 "Missing VirtReg entry"); 540 Cost += LRI->Dirty ? spillDirty : spillClean; 541 break; 542 } 543 } 544 } 545 return Cost; 546 } 547 548 /// This method updates local state so that we know that PhysReg is the 549 /// proper container for VirtReg now. The physical register must not be used 550 /// for anything else when this is called. 551 void RegAllocFast::assignVirtToPhysReg(LiveReg &LR, MCPhysReg PhysReg) { 552 unsigned VirtReg = LR.VirtReg; 553 LLVM_DEBUG(dbgs() << "Assigning " << printReg(VirtReg, TRI) << " to " 554 << printReg(PhysReg, TRI) << '\n'); 555 assert(LR.PhysReg == 0 && "Already assigned a physreg"); 556 assert(PhysReg != 0 && "Trying to assign no register"); 557 LR.PhysReg = PhysReg; 558 setPhysRegState(PhysReg, VirtReg); 559 } 560 561 /// Allocates a physical register for VirtReg. 562 void RegAllocFast::allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint) { 563 const unsigned VirtReg = LR.VirtReg; 564 565 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 566 "Can only allocate virtual registers"); 567 568 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg); 569 LLVM_DEBUG(dbgs() << "Search register for " << printReg(VirtReg) 570 << " in class " << TRI->getRegClassName(&RC) 571 << " with hint " << printReg(Hint, TRI) << '\n'); 572 573 // Take hint when possible. 574 if (TargetRegisterInfo::isPhysicalRegister(Hint) && 575 MRI->isAllocatable(Hint) && RC.contains(Hint)) { 576 // Ignore the hint if we would have to spill a dirty register. 577 unsigned Cost = calcSpillCost(Hint); 578 if (Cost < spillDirty) { 579 if (Cost) 580 definePhysReg(MI, Hint, regFree); 581 assignVirtToPhysReg(LR, Hint); 582 return; 583 } 584 } 585 586 MCPhysReg BestReg = 0; 587 unsigned BestCost = spillImpossible; 588 ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC); 589 for (MCPhysReg PhysReg : AllocationOrder) { 590 LLVM_DEBUG(dbgs() << "\tRegister: " << printReg(PhysReg, TRI) << ' '); 591 unsigned Cost = calcSpillCost(PhysReg); 592 LLVM_DEBUG(dbgs() << "Cost: " << Cost << " BestCost: " << BestCost << '\n'); 593 // Immediate take a register with cost 0. 594 if (Cost == 0) { 595 assignVirtToPhysReg(LR, PhysReg); 596 return; 597 } 598 if (Cost < BestCost) { 599 BestReg = PhysReg; 600 BestCost = Cost; 601 } 602 } 603 604 if (!BestReg) { 605 // Nothing we can do: Report an error and keep going with an invalid 606 // allocation. 607 if (MI.isInlineAsm()) 608 MI.emitError("inline assembly requires more registers than available"); 609 else 610 MI.emitError("ran out of registers during register allocation"); 611 definePhysReg(MI, *AllocationOrder.begin(), regFree); 612 assignVirtToPhysReg(LR, *AllocationOrder.begin()); 613 return; 614 } 615 616 definePhysReg(MI, BestReg, regFree); 617 assignVirtToPhysReg(LR, BestReg); 618 } 619 620 void RegAllocFast::allocVirtRegUndef(MachineOperand &MO) { 621 assert(MO.isUndef() && "expected undef use"); 622 unsigned VirtReg = MO.getReg(); 623 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && "Expected virtreg"); 624 625 LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg); 626 MCPhysReg PhysReg; 627 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) { 628 PhysReg = LRI->PhysReg; 629 } else { 630 const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg); 631 ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC); 632 assert(!AllocationOrder.empty() && "Allocation order must not be empty"); 633 PhysReg = AllocationOrder[0]; 634 } 635 636 unsigned SubRegIdx = MO.getSubReg(); 637 if (SubRegIdx != 0) { 638 PhysReg = TRI->getSubReg(PhysReg, SubRegIdx); 639 MO.setSubReg(0); 640 } 641 MO.setReg(PhysReg); 642 MO.setIsRenamable(true); 643 } 644 645 /// Allocates a register for VirtReg and mark it as dirty. 646 MCPhysReg RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum, 647 unsigned VirtReg, unsigned Hint) { 648 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 649 "Not a virtual register"); 650 LiveRegMap::iterator LRI; 651 bool New; 652 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); 653 if (!LRI->PhysReg) { 654 // If there is no hint, peek at the only use of this register. 655 if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) && 656 MRI->hasOneNonDBGUse(VirtReg)) { 657 const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg); 658 // It's a copy, use the destination register as a hint. 659 if (UseMI.isCopyLike()) 660 Hint = UseMI.getOperand(0).getReg(); 661 } 662 allocVirtReg(MI, *LRI, Hint); 663 } else if (LRI->LastUse) { 664 // Redefining a live register - kill at the last use, unless it is this 665 // instruction defining VirtReg multiple times. 666 if (LRI->LastUse != &MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse()) 667 addKillFlag(*LRI); 668 } 669 assert(LRI->PhysReg && "Register not assigned"); 670 LRI->LastUse = &MI; 671 LRI->LastOpNum = OpNum; 672 LRI->Dirty = true; 673 markRegUsedInInstr(LRI->PhysReg); 674 return LRI->PhysReg; 675 } 676 677 /// Make sure VirtReg is available in a physreg and return it. 678 RegAllocFast::LiveReg &RegAllocFast::reloadVirtReg(MachineInstr &MI, 679 unsigned OpNum, 680 unsigned VirtReg, 681 unsigned Hint) { 682 assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && 683 "Not a virtual register"); 684 LiveRegMap::iterator LRI; 685 bool New; 686 std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg)); 687 MachineOperand &MO = MI.getOperand(OpNum); 688 if (!LRI->PhysReg) { 689 allocVirtReg(MI, *LRI, Hint); 690 reload(MI, VirtReg, LRI->PhysReg); 691 } else if (LRI->Dirty) { 692 if (isLastUseOfLocalReg(MO)) { 693 LLVM_DEBUG(dbgs() << "Killing last use: " << MO << '\n'); 694 if (MO.isUse()) 695 MO.setIsKill(); 696 else 697 MO.setIsDead(); 698 } else if (MO.isKill()) { 699 LLVM_DEBUG(dbgs() << "Clearing dubious kill: " << MO << '\n'); 700 MO.setIsKill(false); 701 } else if (MO.isDead()) { 702 LLVM_DEBUG(dbgs() << "Clearing dubious dead: " << MO << '\n'); 703 MO.setIsDead(false); 704 } 705 } else if (MO.isKill()) { 706 // We must remove kill flags from uses of reloaded registers because the 707 // register would be killed immediately, and there might be a second use: 708 // %foo = OR killed %x, %x 709 // This would cause a second reload of %x into a different register. 710 LLVM_DEBUG(dbgs() << "Clearing clean kill: " << MO << '\n'); 711 MO.setIsKill(false); 712 } else if (MO.isDead()) { 713 LLVM_DEBUG(dbgs() << "Clearing clean dead: " << MO << '\n'); 714 MO.setIsDead(false); 715 } 716 assert(LRI->PhysReg && "Register not assigned"); 717 LRI->LastUse = &MI; 718 LRI->LastOpNum = OpNum; 719 markRegUsedInInstr(LRI->PhysReg); 720 return *LRI; 721 } 722 723 /// Changes operand OpNum in MI the refer the PhysReg, considering subregs. This 724 /// may invalidate any operand pointers. Return true if the operand kills its 725 /// register. 726 bool RegAllocFast::setPhysReg(MachineInstr &MI, MachineOperand &MO, 727 MCPhysReg PhysReg) { 728 bool Dead = MO.isDead(); 729 if (!MO.getSubReg()) { 730 MO.setReg(PhysReg); 731 MO.setIsRenamable(true); 732 return MO.isKill() || Dead; 733 } 734 735 // Handle subregister index. 736 MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0); 737 MO.setIsRenamable(true); 738 MO.setSubReg(0); 739 740 // A kill flag implies killing the full register. Add corresponding super 741 // register kill. 742 if (MO.isKill()) { 743 MI.addRegisterKilled(PhysReg, TRI, true); 744 return true; 745 } 746 747 // A <def,read-undef> of a sub-register requires an implicit def of the full 748 // register. 749 if (MO.isDef() && MO.isUndef()) 750 MI.addRegisterDefined(PhysReg, TRI); 751 752 return Dead; 753 } 754 755 // Handles special instruction operand like early clobbers and tied ops when 756 // there are additional physreg defines. 757 void RegAllocFast::handleThroughOperands(MachineInstr &MI, 758 SmallVectorImpl<unsigned> &VirtDead) { 759 LLVM_DEBUG(dbgs() << "Scanning for through registers:"); 760 SmallSet<unsigned, 8> ThroughRegs; 761 for (const MachineOperand &MO : MI.operands()) { 762 if (!MO.isReg()) continue; 763 unsigned Reg = MO.getReg(); 764 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 765 continue; 766 if (MO.isEarlyClobber() || (MO.isUse() && MO.isTied()) || 767 (MO.getSubReg() && MI.readsVirtualRegister(Reg))) { 768 if (ThroughRegs.insert(Reg).second) 769 LLVM_DEBUG(dbgs() << ' ' << printReg(Reg)); 770 } 771 } 772 773 // If any physreg defines collide with preallocated through registers, 774 // we must spill and reallocate. 775 LLVM_DEBUG(dbgs() << "\nChecking for physdef collisions.\n"); 776 for (const MachineOperand &MO : MI.operands()) { 777 if (!MO.isReg() || !MO.isDef()) continue; 778 unsigned Reg = MO.getReg(); 779 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 780 markRegUsedInInstr(Reg); 781 for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) { 782 if (ThroughRegs.count(PhysRegState[*AI])) 783 definePhysReg(MI, *AI, regFree); 784 } 785 } 786 787 SmallVector<unsigned, 8> PartialDefs; 788 LLVM_DEBUG(dbgs() << "Allocating tied uses.\n"); 789 for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) { 790 MachineOperand &MO = MI.getOperand(I); 791 if (!MO.isReg()) continue; 792 unsigned Reg = MO.getReg(); 793 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 794 if (MO.isUse()) { 795 if (!MO.isTied()) continue; 796 LLVM_DEBUG(dbgs() << "Operand " << I << "(" << MO 797 << ") is tied to operand " << MI.findTiedOperandIdx(I) 798 << ".\n"); 799 LiveReg &LR = reloadVirtReg(MI, I, Reg, 0); 800 MCPhysReg PhysReg = LR.PhysReg; 801 setPhysReg(MI, MO, PhysReg); 802 // Note: we don't update the def operand yet. That would cause the normal 803 // def-scan to attempt spilling. 804 } else if (MO.getSubReg() && MI.readsVirtualRegister(Reg)) { 805 LLVM_DEBUG(dbgs() << "Partial redefine: " << MO << '\n'); 806 // Reload the register, but don't assign to the operand just yet. 807 // That would confuse the later phys-def processing pass. 808 LiveReg &LR = reloadVirtReg(MI, I, Reg, 0); 809 PartialDefs.push_back(LR.PhysReg); 810 } 811 } 812 813 LLVM_DEBUG(dbgs() << "Allocating early clobbers.\n"); 814 for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) { 815 const MachineOperand &MO = MI.getOperand(I); 816 if (!MO.isReg()) continue; 817 unsigned Reg = MO.getReg(); 818 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 819 if (!MO.isEarlyClobber()) 820 continue; 821 // Note: defineVirtReg may invalidate MO. 822 MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, 0); 823 if (setPhysReg(MI, MI.getOperand(I), PhysReg)) 824 VirtDead.push_back(Reg); 825 } 826 827 // Restore UsedInInstr to a state usable for allocating normal virtual uses. 828 UsedInInstr.clear(); 829 for (const MachineOperand &MO : MI.operands()) { 830 if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue; 831 unsigned Reg = MO.getReg(); 832 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 833 LLVM_DEBUG(dbgs() << "\tSetting " << printReg(Reg, TRI) 834 << " as used in instr\n"); 835 markRegUsedInInstr(Reg); 836 } 837 838 // Also mark PartialDefs as used to avoid reallocation. 839 for (unsigned PartialDef : PartialDefs) 840 markRegUsedInInstr(PartialDef); 841 } 842 843 #ifndef NDEBUG 844 void RegAllocFast::dumpState() { 845 for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) { 846 if (PhysRegState[Reg] == regDisabled) continue; 847 dbgs() << " " << printReg(Reg, TRI); 848 switch(PhysRegState[Reg]) { 849 case regFree: 850 break; 851 case regReserved: 852 dbgs() << "*"; 853 break; 854 default: { 855 dbgs() << '=' << printReg(PhysRegState[Reg]); 856 LiveRegMap::iterator LRI = findLiveVirtReg(PhysRegState[Reg]); 857 assert(LRI != LiveVirtRegs.end() && LRI->PhysReg && 858 "Missing VirtReg entry"); 859 if (LRI->Dirty) 860 dbgs() << "*"; 861 assert(LRI->PhysReg == Reg && "Bad inverse map"); 862 break; 863 } 864 } 865 } 866 dbgs() << '\n'; 867 // Check that LiveVirtRegs is the inverse. 868 for (LiveRegMap::iterator i = LiveVirtRegs.begin(), 869 e = LiveVirtRegs.end(); i != e; ++i) { 870 if (!i->PhysReg) 871 continue; 872 assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) && 873 "Bad map key"); 874 assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) && 875 "Bad map value"); 876 assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map"); 877 } 878 } 879 #endif 880 881 void RegAllocFast::allocateInstruction(MachineInstr &MI) { 882 const MCInstrDesc &MCID = MI.getDesc(); 883 884 // If this is a copy, we may be able to coalesce. 885 unsigned CopySrcReg = 0; 886 unsigned CopyDstReg = 0; 887 unsigned CopySrcSub = 0; 888 unsigned CopyDstSub = 0; 889 if (MI.isCopy()) { 890 CopyDstReg = MI.getOperand(0).getReg(); 891 CopySrcReg = MI.getOperand(1).getReg(); 892 CopyDstSub = MI.getOperand(0).getSubReg(); 893 CopySrcSub = MI.getOperand(1).getSubReg(); 894 } 895 896 // Track registers used by instruction. 897 UsedInInstr.clear(); 898 899 // First scan. 900 // Mark physreg uses and early clobbers as used. 901 // Find the end of the virtreg operands 902 unsigned VirtOpEnd = 0; 903 bool hasTiedOps = false; 904 bool hasEarlyClobbers = false; 905 bool hasPartialRedefs = false; 906 bool hasPhysDefs = false; 907 for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) { 908 MachineOperand &MO = MI.getOperand(i); 909 // Make sure MRI knows about registers clobbered by regmasks. 910 if (MO.isRegMask()) { 911 MRI->addPhysRegsUsedFromRegMask(MO.getRegMask()); 912 continue; 913 } 914 if (!MO.isReg()) continue; 915 unsigned Reg = MO.getReg(); 916 if (!Reg) continue; 917 if (TargetRegisterInfo::isVirtualRegister(Reg)) { 918 VirtOpEnd = i+1; 919 if (MO.isUse()) { 920 hasTiedOps = hasTiedOps || 921 MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1; 922 } else { 923 if (MO.isEarlyClobber()) 924 hasEarlyClobbers = true; 925 if (MO.getSubReg() && MI.readsVirtualRegister(Reg)) 926 hasPartialRedefs = true; 927 } 928 continue; 929 } 930 if (!MRI->isAllocatable(Reg)) continue; 931 if (MO.isUse()) { 932 usePhysReg(MO); 933 } else if (MO.isEarlyClobber()) { 934 definePhysReg(MI, Reg, 935 (MO.isImplicit() || MO.isDead()) ? regFree : regReserved); 936 hasEarlyClobbers = true; 937 } else 938 hasPhysDefs = true; 939 } 940 941 // The instruction may have virtual register operands that must be allocated 942 // the same register at use-time and def-time: early clobbers and tied 943 // operands. If there are also physical defs, these registers must avoid 944 // both physical defs and uses, making them more constrained than normal 945 // operands. 946 // Similarly, if there are multiple defs and tied operands, we must make 947 // sure the same register is allocated to uses and defs. 948 // We didn't detect inline asm tied operands above, so just make this extra 949 // pass for all inline asm. 950 if (MI.isInlineAsm() || hasEarlyClobbers || hasPartialRedefs || 951 (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) { 952 handleThroughOperands(MI, VirtDead); 953 // Don't attempt coalescing when we have funny stuff going on. 954 CopyDstReg = 0; 955 // Pretend we have early clobbers so the use operands get marked below. 956 // This is not necessary for the common case of a single tied use. 957 hasEarlyClobbers = true; 958 } 959 960 // Second scan. 961 // Allocate virtreg uses. 962 bool HasUndefUse = false; 963 for (unsigned I = 0; I != VirtOpEnd; ++I) { 964 MachineOperand &MO = MI.getOperand(I); 965 if (!MO.isReg()) continue; 966 unsigned Reg = MO.getReg(); 967 if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue; 968 if (MO.isUse()) { 969 if (MO.isUndef()) { 970 HasUndefUse = true; 971 // There is no need to allocate a register for an undef use. 972 continue; 973 } 974 LiveReg &LR = reloadVirtReg(MI, I, Reg, CopyDstReg); 975 MCPhysReg PhysReg = LR.PhysReg; 976 CopySrcReg = (CopySrcReg == Reg || CopySrcReg == PhysReg) ? PhysReg : 0; 977 if (setPhysReg(MI, MO, PhysReg)) 978 killVirtReg(LR); 979 } 980 } 981 982 // Allocate undef operands. This is a separate step because in a situation 983 // like ` = OP undef %X, %X` both operands need the same register assign 984 // so we should perform the normal assignment first. 985 if (HasUndefUse) { 986 for (MachineOperand &MO : MI.uses()) { 987 if (!MO.isReg() || !MO.isUse()) 988 continue; 989 unsigned Reg = MO.getReg(); 990 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 991 continue; 992 993 assert(MO.isUndef() && "Should only have undef virtreg uses left"); 994 allocVirtRegUndef(MO); 995 } 996 } 997 998 // Track registers defined by instruction - early clobbers and tied uses at 999 // this point. 1000 UsedInInstr.clear(); 1001 if (hasEarlyClobbers) { 1002 for (const MachineOperand &MO : MI.operands()) { 1003 if (!MO.isReg()) continue; 1004 unsigned Reg = MO.getReg(); 1005 if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue; 1006 // Look for physreg defs and tied uses. 1007 if (!MO.isDef() && !MO.isTied()) continue; 1008 markRegUsedInInstr(Reg); 1009 } 1010 } 1011 1012 unsigned DefOpEnd = MI.getNumOperands(); 1013 if (MI.isCall()) { 1014 // Spill all virtregs before a call. This serves one purpose: If an 1015 // exception is thrown, the landing pad is going to expect to find 1016 // registers in their spill slots. 1017 // Note: although this is appealing to just consider all definitions 1018 // as call-clobbered, this is not correct because some of those 1019 // definitions may be used later on and we do not want to reuse 1020 // those for virtual registers in between. 1021 LLVM_DEBUG(dbgs() << " Spilling remaining registers before call.\n"); 1022 spillAll(MI); 1023 } 1024 1025 // Third scan. 1026 // Allocate defs and collect dead defs. 1027 for (unsigned I = 0; I != DefOpEnd; ++I) { 1028 const MachineOperand &MO = MI.getOperand(I); 1029 if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber()) 1030 continue; 1031 unsigned Reg = MO.getReg(); 1032 1033 if (TargetRegisterInfo::isPhysicalRegister(Reg)) { 1034 if (!MRI->isAllocatable(Reg)) continue; 1035 definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved); 1036 continue; 1037 } 1038 MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, CopySrcReg); 1039 if (setPhysReg(MI, MI.getOperand(I), PhysReg)) { 1040 VirtDead.push_back(Reg); 1041 CopyDstReg = 0; // cancel coalescing; 1042 } else 1043 CopyDstReg = (CopyDstReg == Reg || CopyDstReg == PhysReg) ? PhysReg : 0; 1044 } 1045 1046 // Kill dead defs after the scan to ensure that multiple defs of the same 1047 // register are allocated identically. We didn't need to do this for uses 1048 // because we are crerating our own kill flags, and they are always at the 1049 // last use. 1050 for (unsigned VirtReg : VirtDead) 1051 killVirtReg(VirtReg); 1052 VirtDead.clear(); 1053 1054 LLVM_DEBUG(dbgs() << "<< " << MI); 1055 if (CopyDstReg && CopyDstReg == CopySrcReg && CopyDstSub == CopySrcSub) { 1056 LLVM_DEBUG(dbgs() << "Mark identity copy for removal\n"); 1057 Coalesced.push_back(&MI); 1058 } 1059 } 1060 1061 void RegAllocFast::handleDebugValue(MachineInstr &MI) { 1062 MachineOperand &MO = MI.getOperand(0); 1063 1064 // Ignore DBG_VALUEs that aren't based on virtual registers. These are 1065 // mostly constants and frame indices. 1066 if (!MO.isReg()) 1067 return; 1068 unsigned Reg = MO.getReg(); 1069 if (!TargetRegisterInfo::isVirtualRegister(Reg)) 1070 return; 1071 1072 // See if this virtual register has already been allocated to a physical 1073 // register or spilled to a stack slot. 1074 LiveRegMap::iterator LRI = findLiveVirtReg(Reg); 1075 if (LRI != LiveVirtRegs.end() && LRI->PhysReg) { 1076 setPhysReg(MI, MO, LRI->PhysReg); 1077 } else { 1078 int SS = StackSlotForVirtReg[Reg]; 1079 if (SS != -1) { 1080 // Modify DBG_VALUE now that the value is in a spill slot. 1081 updateDbgValueForSpill(MI, SS); 1082 LLVM_DEBUG(dbgs() << "Modifying debug info due to spill:" << "\t" << MI); 1083 return; 1084 } 1085 1086 // We can't allocate a physreg for a DebugValue, sorry! 1087 LLVM_DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE"); 1088 MO.setReg(0); 1089 } 1090 1091 // If Reg hasn't been spilled, put this DBG_VALUE in LiveDbgValueMap so 1092 // that future spills of Reg will have DBG_VALUEs. 1093 LiveDbgValueMap[Reg].push_back(&MI); 1094 } 1095 1096 void RegAllocFast::allocateBasicBlock(MachineBasicBlock &MBB) { 1097 this->MBB = &MBB; 1098 LLVM_DEBUG(dbgs() << "\nAllocating " << MBB); 1099 1100 PhysRegState.assign(TRI->getNumRegs(), regDisabled); 1101 assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?"); 1102 1103 MachineBasicBlock::iterator MII = MBB.begin(); 1104 1105 // Add live-in registers as live. 1106 for (const MachineBasicBlock::RegisterMaskPair LI : MBB.liveins()) 1107 if (MRI->isAllocatable(LI.PhysReg)) 1108 definePhysReg(MII, LI.PhysReg, regReserved); 1109 1110 VirtDead.clear(); 1111 Coalesced.clear(); 1112 1113 // Otherwise, sequentially allocate each instruction in the MBB. 1114 for (MachineInstr &MI : MBB) { 1115 LLVM_DEBUG( 1116 dbgs() << "\n>> " << MI << "Regs:"; 1117 dumpState() 1118 ); 1119 1120 // Special handling for debug values. Note that they are not allowed to 1121 // affect codegen of the other instructions in any way. 1122 if (MI.isDebugValue()) { 1123 handleDebugValue(MI); 1124 continue; 1125 } 1126 1127 allocateInstruction(MI); 1128 } 1129 1130 // Spill all physical registers holding virtual registers now. 1131 LLVM_DEBUG(dbgs() << "Spilling live registers at end of block.\n"); 1132 spillAll(MBB.getFirstTerminator()); 1133 1134 // Erase all the coalesced copies. We are delaying it until now because 1135 // LiveVirtRegs might refer to the instrs. 1136 for (MachineInstr *MI : Coalesced) 1137 MBB.erase(MI); 1138 NumCoalesced += Coalesced.size(); 1139 1140 LLVM_DEBUG(MBB.dump()); 1141 } 1142 1143 bool RegAllocFast::runOnMachineFunction(MachineFunction &MF) { 1144 LLVM_DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n" 1145 << "********** Function: " << MF.getName() << '\n'); 1146 MRI = &MF.getRegInfo(); 1147 const TargetSubtargetInfo &STI = MF.getSubtarget(); 1148 TRI = STI.getRegisterInfo(); 1149 TII = STI.getInstrInfo(); 1150 MFI = &MF.getFrameInfo(); 1151 MRI->freezeReservedRegs(MF); 1152 RegClassInfo.runOnMachineFunction(MF); 1153 UsedInInstr.clear(); 1154 UsedInInstr.setUniverse(TRI->getNumRegUnits()); 1155 1156 // initialize the virtual->physical register map to have a 'null' 1157 // mapping for all virtual registers 1158 unsigned NumVirtRegs = MRI->getNumVirtRegs(); 1159 StackSlotForVirtReg.resize(NumVirtRegs); 1160 LiveVirtRegs.setUniverse(NumVirtRegs); 1161 1162 // Loop over all of the basic blocks, eliminating virtual register references 1163 for (MachineBasicBlock &MBB : MF) 1164 allocateBasicBlock(MBB); 1165 1166 // All machine operands and other references to virtual registers have been 1167 // replaced. Remove the virtual registers. 1168 MRI->clearVirtRegs(); 1169 1170 StackSlotForVirtReg.clear(); 1171 LiveDbgValueMap.clear(); 1172 return true; 1173 } 1174 1175 FunctionPass *llvm::createFastRegisterAllocator() { 1176 return new RegAllocFast(); 1177 } 1178