1 //===- RegAllocFast.cpp - A fast register allocator for debug code --------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 /// \file This register allocator allocates registers to a basic block at a
10 /// time, attempting to keep values in registers and reusing registers as
11 /// appropriate.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/ADT/ArrayRef.h"
16 #include "llvm/ADT/DenseMap.h"
17 #include "llvm/ADT/IndexedMap.h"
18 #include "llvm/ADT/SmallSet.h"
19 #include "llvm/ADT/SmallVector.h"
20 #include "llvm/ADT/SparseSet.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/CodeGen/MachineBasicBlock.h"
23 #include "llvm/CodeGen/MachineFrameInfo.h"
24 #include "llvm/CodeGen/MachineFunction.h"
25 #include "llvm/CodeGen/MachineFunctionPass.h"
26 #include "llvm/CodeGen/MachineInstr.h"
27 #include "llvm/CodeGen/MachineInstrBuilder.h"
28 #include "llvm/CodeGen/MachineOperand.h"
29 #include "llvm/CodeGen/MachineRegisterInfo.h"
30 #include "llvm/CodeGen/RegAllocRegistry.h"
31 #include "llvm/CodeGen/RegisterClassInfo.h"
32 #include "llvm/CodeGen/TargetInstrInfo.h"
33 #include "llvm/CodeGen/TargetOpcodes.h"
34 #include "llvm/CodeGen/TargetRegisterInfo.h"
35 #include "llvm/CodeGen/TargetSubtargetInfo.h"
36 #include "llvm/IR/DebugLoc.h"
37 #include "llvm/IR/Metadata.h"
38 #include "llvm/MC/MCInstrDesc.h"
39 #include "llvm/MC/MCRegisterInfo.h"
40 #include "llvm/Pass.h"
41 #include "llvm/Support/Casting.h"
42 #include "llvm/Support/Compiler.h"
43 #include "llvm/Support/Debug.h"
44 #include "llvm/Support/ErrorHandling.h"
45 #include "llvm/Support/raw_ostream.h"
46 #include <cassert>
47 #include <tuple>
48 #include <vector>
49 
50 using namespace llvm;
51 
52 #define DEBUG_TYPE "regalloc"
53 
54 STATISTIC(NumStores, "Number of stores added");
55 STATISTIC(NumLoads , "Number of loads added");
56 STATISTIC(NumCoalesced, "Number of copies coalesced");
57 
58 static RegisterRegAlloc
59   fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator);
60 
61 namespace {
62 
63   class RegAllocFast : public MachineFunctionPass {
64   public:
65     static char ID;
66 
67     RegAllocFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1) {}
68 
69   private:
70     MachineFrameInfo *MFI;
71     MachineRegisterInfo *MRI;
72     const TargetRegisterInfo *TRI;
73     const TargetInstrInfo *TII;
74     RegisterClassInfo RegClassInfo;
75 
76     /// Basic block currently being allocated.
77     MachineBasicBlock *MBB;
78 
79     /// Maps virtual regs to the frame index where these values are spilled.
80     IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
81 
82     /// Everything we know about a live virtual register.
83     struct LiveReg {
84       MachineInstr *LastUse = nullptr; ///< Last instr to use reg.
85       unsigned VirtReg;                ///< Virtual register number.
86       MCPhysReg PhysReg = 0;           ///< Currently held here.
87       unsigned short LastOpNum = 0;    ///< OpNum on LastUse.
88       bool Dirty = false;              ///< Register needs spill.
89 
90       explicit LiveReg(unsigned VirtReg) : VirtReg(VirtReg) {}
91 
92       unsigned getSparseSetIndex() const {
93         return TargetRegisterInfo::virtReg2Index(VirtReg);
94       }
95     };
96 
97     using LiveRegMap = SparseSet<LiveReg>;
98     /// This map contains entries for each virtual register that is currently
99     /// available in a physical register.
100     LiveRegMap LiveVirtRegs;
101 
102     DenseMap<unsigned, SmallVector<MachineInstr *, 2>> LiveDbgValueMap;
103 
104     /// State of a physical register.
105     enum RegState {
106       /// A disabled register is not available for allocation, but an alias may
107       /// be in use. A register can only be moved out of the disabled state if
108       /// all aliases are disabled.
109       regDisabled,
110 
111       /// A free register is not currently in use and can be allocated
112       /// immediately without checking aliases.
113       regFree,
114 
115       /// A reserved register has been assigned explicitly (e.g., setting up a
116       /// call parameter), and it remains reserved until it is used.
117       regReserved
118 
119       /// A register state may also be a virtual register number, indication
120       /// that the physical register is currently allocated to a virtual
121       /// register. In that case, LiveVirtRegs contains the inverse mapping.
122     };
123 
124     /// Maps each physical register to a RegState enum or a virtual register.
125     std::vector<unsigned> PhysRegState;
126 
127     SmallVector<unsigned, 16> VirtDead;
128     SmallVector<MachineInstr *, 32> Coalesced;
129 
130     using RegUnitSet = SparseSet<uint16_t, identity<uint16_t>>;
131     /// Set of register units that are used in the current instruction, and so
132     /// cannot be allocated.
133     RegUnitSet UsedInInstr;
134 
135     void setPhysRegState(MCPhysReg PhysReg, unsigned NewState);
136 
137     /// Mark a physreg as used in this instruction.
138     void markRegUsedInInstr(MCPhysReg PhysReg) {
139       for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
140         UsedInInstr.insert(*Units);
141     }
142 
143     /// Check if a physreg or any of its aliases are used in this instruction.
144     bool isRegUsedInInstr(MCPhysReg PhysReg) const {
145       for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
146         if (UsedInInstr.count(*Units))
147           return true;
148       return false;
149     }
150 
151     enum : unsigned {
152       spillClean = 50,
153       spillDirty = 100,
154       spillImpossible = ~0u
155     };
156 
157   public:
158     StringRef getPassName() const override { return "Fast Register Allocator"; }
159 
160     void getAnalysisUsage(AnalysisUsage &AU) const override {
161       AU.setPreservesCFG();
162       MachineFunctionPass::getAnalysisUsage(AU);
163     }
164 
165     MachineFunctionProperties getRequiredProperties() const override {
166       return MachineFunctionProperties().set(
167           MachineFunctionProperties::Property::NoPHIs);
168     }
169 
170     MachineFunctionProperties getSetProperties() const override {
171       return MachineFunctionProperties().set(
172           MachineFunctionProperties::Property::NoVRegs);
173     }
174 
175   private:
176     bool runOnMachineFunction(MachineFunction &MF) override;
177 
178     void allocateBasicBlock(MachineBasicBlock &MBB);
179     void allocateInstruction(MachineInstr &MI);
180     void handleDebugValue(MachineInstr &MI);
181     void handleThroughOperands(MachineInstr &MI,
182                                SmallVectorImpl<unsigned> &VirtDead);
183     bool isLastUseOfLocalReg(const MachineOperand &MO) const;
184 
185     void addKillFlag(const LiveReg &LRI);
186     void killVirtReg(LiveReg &LR);
187     void killVirtReg(unsigned VirtReg);
188     void spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR);
189     void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg);
190 
191     void usePhysReg(MachineOperand &MO);
192     void definePhysReg(MachineBasicBlock::iterator MI, MCPhysReg PhysReg,
193                        RegState NewState);
194     unsigned calcSpillCost(MCPhysReg PhysReg) const;
195     void assignVirtToPhysReg(LiveReg &, MCPhysReg PhysReg);
196 
197     LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) {
198       return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
199     }
200 
201     LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const {
202       return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
203     }
204 
205     void allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint);
206     MCPhysReg defineVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg,
207                             unsigned Hint);
208     LiveReg &reloadVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg,
209                            unsigned Hint);
210     void spillAll(MachineBasicBlock::iterator MI);
211     bool setPhysReg(MachineInstr &MI, MachineOperand &MO, MCPhysReg PhysReg);
212 
213     int getStackSpaceFor(unsigned VirtReg);
214     void spill(MachineBasicBlock::iterator Before, unsigned VirtReg,
215                MCPhysReg AssignedReg, bool Kill);
216     void reload(MachineBasicBlock::iterator Before, unsigned VirtReg,
217                 MCPhysReg PhysReg);
218 
219     void dumpState();
220   };
221 
222 } // end anonymous namespace
223 
224 char RegAllocFast::ID = 0;
225 
226 INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false,
227                 false)
228 
229 void RegAllocFast::setPhysRegState(MCPhysReg PhysReg, unsigned NewState) {
230   PhysRegState[PhysReg] = NewState;
231 }
232 
233 /// This allocates space for the specified virtual register to be held on the
234 /// stack.
235 int RegAllocFast::getStackSpaceFor(unsigned VirtReg) {
236   // Find the location Reg would belong...
237   int SS = StackSlotForVirtReg[VirtReg];
238   // Already has space allocated?
239   if (SS != -1)
240     return SS;
241 
242   // Allocate a new stack object for this spill location...
243   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
244   unsigned Size = TRI->getSpillSize(RC);
245   unsigned Align = TRI->getSpillAlignment(RC);
246   int FrameIdx = MFI->CreateSpillStackObject(Size, Align);
247 
248   // Assign the slot.
249   StackSlotForVirtReg[VirtReg] = FrameIdx;
250   return FrameIdx;
251 }
252 
253 /// Insert spill instruction for \p AssignedReg before \p Before. Update
254 /// DBG_VALUEs with \p VirtReg operands with the stack slot.
255 void RegAllocFast::spill(MachineBasicBlock::iterator Before, unsigned VirtReg,
256                          MCPhysReg AssignedReg, bool Kill) {
257   LLVM_DEBUG(dbgs() << "Spilling " << printReg(VirtReg, TRI)
258                     << " in " << printReg(AssignedReg, TRI));
259   int FI = getStackSpaceFor(VirtReg);
260   LLVM_DEBUG(dbgs() << " to stack slot #" << FI << '\n');
261 
262   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
263   TII->storeRegToStackSlot(*MBB, Before, AssignedReg, Kill, FI, &RC, TRI);
264   ++NumStores;
265 
266   // If this register is used by DBG_VALUE then insert new DBG_VALUE to
267   // identify spilled location as the place to find corresponding variable's
268   // value.
269   SmallVectorImpl<MachineInstr *> &LRIDbgValues = LiveDbgValueMap[VirtReg];
270   for (MachineInstr *DBG : LRIDbgValues) {
271     MachineInstr *NewDV = buildDbgValueForSpill(*MBB, Before, *DBG, FI);
272     assert(NewDV->getParent() == MBB && "dangling parent pointer");
273     (void)NewDV;
274     LLVM_DEBUG(dbgs() << "Inserting debug info due to spill:\n" << *NewDV);
275   }
276   // Now this register is spilled there is should not be any DBG_VALUE
277   // pointing to this register because they are all pointing to spilled value
278   // now.
279   LRIDbgValues.clear();
280 }
281 
282 /// Insert reload instruction for \p PhysReg before \p Before.
283 void RegAllocFast::reload(MachineBasicBlock::iterator Before, unsigned VirtReg,
284                           MCPhysReg PhysReg) {
285   LLVM_DEBUG(dbgs() << "Reloading " << printReg(VirtReg, TRI) << " into "
286                     << printReg(PhysReg, TRI) << '\n');
287   int FI = getStackSpaceFor(VirtReg);
288   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
289   TII->loadRegFromStackSlot(*MBB, Before, PhysReg, FI, &RC, TRI);
290   ++NumLoads;
291 }
292 
293 /// Return true if MO is the only remaining reference to its virtual register,
294 /// and it is guaranteed to be a block-local register.
295 bool RegAllocFast::isLastUseOfLocalReg(const MachineOperand &MO) const {
296   // If the register has ever been spilled or reloaded, we conservatively assume
297   // it is a global register used in multiple blocks.
298   if (StackSlotForVirtReg[MO.getReg()] != -1)
299     return false;
300 
301   // Check that the use/def chain has exactly one operand - MO.
302   MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg());
303   if (&*I != &MO)
304     return false;
305   return ++I == MRI->reg_nodbg_end();
306 }
307 
308 /// Set kill flags on last use of a virtual register.
309 void RegAllocFast::addKillFlag(const LiveReg &LR) {
310   if (!LR.LastUse) return;
311   MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum);
312   if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) {
313     if (MO.getReg() == LR.PhysReg)
314       MO.setIsKill();
315     // else, don't do anything we are problably redefining a
316     // subreg of this register and given we don't track which
317     // lanes are actually dead, we cannot insert a kill flag here.
318     // Otherwise we may end up in a situation like this:
319     // ... = (MO) physreg:sub1, implicit killed physreg
320     // ... <== Here we would allow later pass to reuse physreg:sub1
321     //         which is potentially wrong.
322     // LR:sub0 = ...
323     // ... = LR.sub1 <== This is going to use physreg:sub1
324   }
325 }
326 
327 /// Mark virtreg as no longer available.
328 void RegAllocFast::killVirtReg(LiveReg &LR) {
329   addKillFlag(LR);
330   assert(PhysRegState[LR.PhysReg] == LR.VirtReg &&
331          "Broken RegState mapping");
332   setPhysRegState(LR.PhysReg, regFree);
333   LR.PhysReg = 0;
334 }
335 
336 /// Mark virtreg as no longer available.
337 void RegAllocFast::killVirtReg(unsigned VirtReg) {
338   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
339          "killVirtReg needs a virtual register");
340   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
341   if (LRI != LiveVirtRegs.end() && LRI->PhysReg)
342     killVirtReg(*LRI);
343 }
344 
345 /// This method spills the value specified by VirtReg into the corresponding
346 /// stack slot if needed.
347 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI,
348                                 unsigned VirtReg) {
349   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
350          "Spilling a physical register is illegal!");
351   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
352   assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
353          "Spilling unmapped virtual register");
354   spillVirtReg(MI, *LRI);
355 }
356 
357 /// Do the actual work of spilling.
358 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR) {
359   assert(PhysRegState[LR.PhysReg] == LR.VirtReg && "Broken RegState mapping");
360 
361   if (LR.Dirty) {
362     // If this physreg is used by the instruction, we want to kill it on the
363     // instruction, not on the spill.
364     bool SpillKill = MachineBasicBlock::iterator(LR.LastUse) != MI;
365     LR.Dirty = false;
366 
367     spill(MI, LR.VirtReg, LR.PhysReg, SpillKill);
368 
369     if (SpillKill)
370       LR.LastUse = nullptr; // Don't kill register again
371   }
372   killVirtReg(LR);
373 }
374 
375 /// Spill all dirty virtregs without killing them.
376 void RegAllocFast::spillAll(MachineBasicBlock::iterator MI) {
377   if (LiveVirtRegs.empty())
378     return;
379   // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
380   // of spilling here is deterministic, if arbitrary.
381   for (LiveReg &LR : LiveVirtRegs) {
382     if (!LR.PhysReg)
383       continue;
384     spillVirtReg(MI, LR);
385   }
386   LiveVirtRegs.clear();
387 }
388 
389 /// Handle the direct use of a physical register.  Check that the register is
390 /// not used by a virtreg. Kill the physreg, marking it free. This may add
391 /// implicit kills to MO->getParent() and invalidate MO.
392 void RegAllocFast::usePhysReg(MachineOperand &MO) {
393   // Ignore undef uses.
394   if (MO.isUndef())
395     return;
396 
397   unsigned PhysReg = MO.getReg();
398   assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) &&
399          "Bad usePhysReg operand");
400 
401   markRegUsedInInstr(PhysReg);
402   switch (PhysRegState[PhysReg]) {
403   case regDisabled:
404     break;
405   case regReserved:
406     PhysRegState[PhysReg] = regFree;
407     LLVM_FALLTHROUGH;
408   case regFree:
409     MO.setIsKill();
410     return;
411   default:
412     // The physreg was allocated to a virtual register. That means the value we
413     // wanted has been clobbered.
414     llvm_unreachable("Instruction uses an allocated register");
415   }
416 
417   // Maybe a superregister is reserved?
418   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
419     MCPhysReg Alias = *AI;
420     switch (PhysRegState[Alias]) {
421     case regDisabled:
422       break;
423     case regReserved:
424       // Either PhysReg is a subregister of Alias and we mark the
425       // whole register as free, or PhysReg is the superregister of
426       // Alias and we mark all the aliases as disabled before freeing
427       // PhysReg.
428       // In the latter case, since PhysReg was disabled, this means that
429       // its value is defined only by physical sub-registers. This check
430       // is performed by the assert of the default case in this loop.
431       // Note: The value of the superregister may only be partial
432       // defined, that is why regDisabled is a valid state for aliases.
433       assert((TRI->isSuperRegister(PhysReg, Alias) ||
434               TRI->isSuperRegister(Alias, PhysReg)) &&
435              "Instruction is not using a subregister of a reserved register");
436       LLVM_FALLTHROUGH;
437     case regFree:
438       if (TRI->isSuperRegister(PhysReg, Alias)) {
439         // Leave the superregister in the working set.
440         setPhysRegState(Alias, regFree);
441         MO.getParent()->addRegisterKilled(Alias, TRI, true);
442         return;
443       }
444       // Some other alias was in the working set - clear it.
445       setPhysRegState(Alias, regDisabled);
446       break;
447     default:
448       llvm_unreachable("Instruction uses an alias of an allocated register");
449     }
450   }
451 
452   // All aliases are disabled, bring register into working set.
453   setPhysRegState(PhysReg, regFree);
454   MO.setIsKill();
455 }
456 
457 /// Mark PhysReg as reserved or free after spilling any virtregs. This is very
458 /// similar to defineVirtReg except the physreg is reserved instead of
459 /// allocated.
460 void RegAllocFast::definePhysReg(MachineBasicBlock::iterator MI,
461                                  MCPhysReg PhysReg, RegState NewState) {
462   markRegUsedInInstr(PhysReg);
463   switch (unsigned VirtReg = PhysRegState[PhysReg]) {
464   case regDisabled:
465     break;
466   default:
467     spillVirtReg(MI, VirtReg);
468     LLVM_FALLTHROUGH;
469   case regFree:
470   case regReserved:
471     setPhysRegState(PhysReg, NewState);
472     return;
473   }
474 
475   // This is a disabled register, disable all aliases.
476   setPhysRegState(PhysReg, NewState);
477   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
478     MCPhysReg Alias = *AI;
479     switch (unsigned VirtReg = PhysRegState[Alias]) {
480     case regDisabled:
481       break;
482     default:
483       spillVirtReg(MI, VirtReg);
484       LLVM_FALLTHROUGH;
485     case regFree:
486     case regReserved:
487       setPhysRegState(Alias, regDisabled);
488       if (TRI->isSuperRegister(PhysReg, Alias))
489         return;
490       break;
491     }
492   }
493 }
494 
495 /// Return the cost of spilling clearing out PhysReg and aliases so it is free
496 /// for allocation. Returns 0 when PhysReg is free or disabled with all aliases
497 /// disabled - it can be allocated directly.
498 /// \returns spillImpossible when PhysReg or an alias can't be spilled.
499 unsigned RegAllocFast::calcSpillCost(MCPhysReg PhysReg) const {
500   if (isRegUsedInInstr(PhysReg)) {
501     LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI)
502                       << " is already used in instr.\n");
503     return spillImpossible;
504   }
505   switch (unsigned VirtReg = PhysRegState[PhysReg]) {
506   case regDisabled:
507     break;
508   case regFree:
509     return 0;
510   case regReserved:
511     LLVM_DEBUG(dbgs() << printReg(VirtReg, TRI) << " corresponding "
512                       << printReg(PhysReg, TRI) << " is reserved already.\n");
513     return spillImpossible;
514   default: {
515     LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
516     assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
517            "Missing VirtReg entry");
518     return LRI->Dirty ? spillDirty : spillClean;
519   }
520   }
521 
522   // This is a disabled register, add up cost of aliases.
523   LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI) << " is disabled.\n");
524   unsigned Cost = 0;
525   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
526     MCPhysReg Alias = *AI;
527     switch (unsigned VirtReg = PhysRegState[Alias]) {
528     case regDisabled:
529       break;
530     case regFree:
531       ++Cost;
532       break;
533     case regReserved:
534       return spillImpossible;
535     default: {
536       LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
537       assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
538              "Missing VirtReg entry");
539       Cost += LRI->Dirty ? spillDirty : spillClean;
540       break;
541     }
542     }
543   }
544   return Cost;
545 }
546 
547 /// This method updates local state so that we know that PhysReg is the
548 /// proper container for VirtReg now.  The physical register must not be used
549 /// for anything else when this is called.
550 void RegAllocFast::assignVirtToPhysReg(LiveReg &LR, MCPhysReg PhysReg) {
551   unsigned VirtReg = LR.VirtReg;
552   LLVM_DEBUG(dbgs() << "Assigning " << printReg(VirtReg, TRI) << " to "
553                     << printReg(PhysReg, TRI) << '\n');
554   assert(LR.PhysReg == 0 && "Already assigned a physreg");
555   assert(PhysReg != 0 && "Trying to assign no register");
556   LR.PhysReg = PhysReg;
557   setPhysRegState(PhysReg, VirtReg);
558 }
559 
560 /// Allocates a physical register for VirtReg.
561 void RegAllocFast::allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint) {
562   const unsigned VirtReg = LR.VirtReg;
563 
564   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
565          "Can only allocate virtual registers");
566 
567   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
568   LLVM_DEBUG(dbgs() << "Search register for " << printReg(VirtReg)
569                     << " in class " << TRI->getRegClassName(&RC)
570                     << " with hint " << printReg(Hint, TRI) << '\n');
571 
572   // Take hint when possible.
573   if (TargetRegisterInfo::isPhysicalRegister(Hint) &&
574       MRI->isAllocatable(Hint) && RC.contains(Hint)) {
575     // Ignore the hint if we would have to spill a dirty register.
576     unsigned Cost = calcSpillCost(Hint);
577     if (Cost < spillDirty) {
578       if (Cost)
579         definePhysReg(MI, Hint, regFree);
580       assignVirtToPhysReg(LR, Hint);
581       return;
582     }
583   }
584 
585   // First try to find a completely free register.
586   ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
587   for (MCPhysReg PhysReg : AllocationOrder) {
588     if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) {
589       assignVirtToPhysReg(LR, PhysReg);
590       return;
591     }
592   }
593 
594   MCPhysReg BestReg = 0;
595   unsigned BestCost = spillImpossible;
596   for (MCPhysReg PhysReg : AllocationOrder) {
597     LLVM_DEBUG(dbgs() << "\tRegister: " << printReg(PhysReg, TRI) << ' ');
598     unsigned Cost = calcSpillCost(PhysReg);
599     LLVM_DEBUG(dbgs() << "Cost: " << Cost << " BestCost: " << BestCost << '\n');
600     // Immediate take a register with cost 0.
601     if (Cost == 0) {
602       assignVirtToPhysReg(LR, PhysReg);
603       return;
604     }
605     if (Cost < BestCost) {
606       BestReg = PhysReg;
607       BestCost = Cost;
608     }
609   }
610 
611   if (!BestReg) {
612     // Nothing we can do: Report an error and keep going with an invalid
613     // allocation.
614     if (MI.isInlineAsm())
615       MI.emitError("inline assembly requires more registers than available");
616     else
617       MI.emitError("ran out of registers during register allocation");
618     definePhysReg(MI, *AllocationOrder.begin(), regFree);
619     assignVirtToPhysReg(LR, *AllocationOrder.begin());
620     return;
621   }
622 
623   definePhysReg(MI, BestReg, regFree);
624   assignVirtToPhysReg(LR, BestReg);
625 }
626 
627 /// Allocates a register for VirtReg and mark it as dirty.
628 MCPhysReg RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum,
629                                       unsigned VirtReg, unsigned Hint) {
630   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
631          "Not a virtual register");
632   LiveRegMap::iterator LRI;
633   bool New;
634   std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
635   if (!LRI->PhysReg) {
636     // If there is no hint, peek at the only use of this register.
637     if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) &&
638         MRI->hasOneNonDBGUse(VirtReg)) {
639       const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg);
640       // It's a copy, use the destination register as a hint.
641       if (UseMI.isCopyLike())
642         Hint = UseMI.getOperand(0).getReg();
643     }
644     allocVirtReg(MI, *LRI, Hint);
645   } else if (LRI->LastUse) {
646     // Redefining a live register - kill at the last use, unless it is this
647     // instruction defining VirtReg multiple times.
648     if (LRI->LastUse != &MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse())
649       addKillFlag(*LRI);
650   }
651   assert(LRI->PhysReg && "Register not assigned");
652   LRI->LastUse = &MI;
653   LRI->LastOpNum = OpNum;
654   LRI->Dirty = true;
655   markRegUsedInInstr(LRI->PhysReg);
656   return LRI->PhysReg;
657 }
658 
659 /// Make sure VirtReg is available in a physreg and return it.
660 RegAllocFast::LiveReg &RegAllocFast::reloadVirtReg(MachineInstr &MI,
661                                                    unsigned OpNum,
662                                                    unsigned VirtReg,
663                                                    unsigned Hint) {
664   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
665          "Not a virtual register");
666   LiveRegMap::iterator LRI;
667   bool New;
668   std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
669   MachineOperand &MO = MI.getOperand(OpNum);
670   if (!LRI->PhysReg) {
671     allocVirtReg(MI, *LRI, Hint);
672     reload(MI, VirtReg, LRI->PhysReg);
673   } else if (LRI->Dirty) {
674     if (isLastUseOfLocalReg(MO)) {
675       LLVM_DEBUG(dbgs() << "Killing last use: " << MO << '\n');
676       if (MO.isUse())
677         MO.setIsKill();
678       else
679         MO.setIsDead();
680     } else if (MO.isKill()) {
681       LLVM_DEBUG(dbgs() << "Clearing dubious kill: " << MO << '\n');
682       MO.setIsKill(false);
683     } else if (MO.isDead()) {
684       LLVM_DEBUG(dbgs() << "Clearing dubious dead: " << MO << '\n');
685       MO.setIsDead(false);
686     }
687   } else if (MO.isKill()) {
688     // We must remove kill flags from uses of reloaded registers because the
689     // register would be killed immediately, and there might be a second use:
690     //   %foo = OR killed %x, %x
691     // This would cause a second reload of %x into a different register.
692     LLVM_DEBUG(dbgs() << "Clearing clean kill: " << MO << '\n');
693     MO.setIsKill(false);
694   } else if (MO.isDead()) {
695     LLVM_DEBUG(dbgs() << "Clearing clean dead: " << MO << '\n');
696     MO.setIsDead(false);
697   }
698   assert(LRI->PhysReg && "Register not assigned");
699   LRI->LastUse = &MI;
700   LRI->LastOpNum = OpNum;
701   markRegUsedInInstr(LRI->PhysReg);
702   return *LRI;
703 }
704 
705 /// Changes operand OpNum in MI the refer the PhysReg, considering subregs. This
706 /// may invalidate any operand pointers.  Return true if the operand kills its
707 /// register.
708 bool RegAllocFast::setPhysReg(MachineInstr &MI, MachineOperand &MO,
709                               MCPhysReg PhysReg) {
710   bool Dead = MO.isDead();
711   if (!MO.getSubReg()) {
712     MO.setReg(PhysReg);
713     MO.setIsRenamable(true);
714     return MO.isKill() || Dead;
715   }
716 
717   // Handle subregister index.
718   MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0);
719   MO.setIsRenamable(true);
720   MO.setSubReg(0);
721 
722   // A kill flag implies killing the full register. Add corresponding super
723   // register kill.
724   if (MO.isKill()) {
725     MI.addRegisterKilled(PhysReg, TRI, true);
726     return true;
727   }
728 
729   // A <def,read-undef> of a sub-register requires an implicit def of the full
730   // register.
731   if (MO.isDef() && MO.isUndef())
732     MI.addRegisterDefined(PhysReg, TRI);
733 
734   return Dead;
735 }
736 
737 // Handles special instruction operand like early clobbers and tied ops when
738 // there are additional physreg defines.
739 void RegAllocFast::handleThroughOperands(MachineInstr &MI,
740                                          SmallVectorImpl<unsigned> &VirtDead) {
741   LLVM_DEBUG(dbgs() << "Scanning for through registers:");
742   SmallSet<unsigned, 8> ThroughRegs;
743   for (const MachineOperand &MO : MI.operands()) {
744     if (!MO.isReg()) continue;
745     unsigned Reg = MO.getReg();
746     if (!TargetRegisterInfo::isVirtualRegister(Reg))
747       continue;
748     if (MO.isEarlyClobber() || (MO.isUse() && MO.isTied()) ||
749         (MO.getSubReg() && MI.readsVirtualRegister(Reg))) {
750       if (ThroughRegs.insert(Reg).second)
751         LLVM_DEBUG(dbgs() << ' ' << printReg(Reg));
752     }
753   }
754 
755   // If any physreg defines collide with preallocated through registers,
756   // we must spill and reallocate.
757   LLVM_DEBUG(dbgs() << "\nChecking for physdef collisions.\n");
758   for (const MachineOperand &MO : MI.operands()) {
759     if (!MO.isReg() || !MO.isDef()) continue;
760     unsigned Reg = MO.getReg();
761     if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
762     markRegUsedInInstr(Reg);
763     for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
764       if (ThroughRegs.count(PhysRegState[*AI]))
765         definePhysReg(MI, *AI, regFree);
766     }
767   }
768 
769   SmallVector<unsigned, 8> PartialDefs;
770   LLVM_DEBUG(dbgs() << "Allocating tied uses.\n");
771   for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
772     MachineOperand &MO = MI.getOperand(I);
773     if (!MO.isReg()) continue;
774     unsigned Reg = MO.getReg();
775     if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
776     if (MO.isUse()) {
777       if (!MO.isTied()) continue;
778       LLVM_DEBUG(dbgs() << "Operand " << I << "(" << MO
779                         << ") is tied to operand " << MI.findTiedOperandIdx(I)
780                         << ".\n");
781       LiveReg &LR = reloadVirtReg(MI, I, Reg, 0);
782       MCPhysReg PhysReg = LR.PhysReg;
783       setPhysReg(MI, MO, PhysReg);
784       // Note: we don't update the def operand yet. That would cause the normal
785       // def-scan to attempt spilling.
786     } else if (MO.getSubReg() && MI.readsVirtualRegister(Reg)) {
787       LLVM_DEBUG(dbgs() << "Partial redefine: " << MO << '\n');
788       // Reload the register, but don't assign to the operand just yet.
789       // That would confuse the later phys-def processing pass.
790       LiveReg &LR = reloadVirtReg(MI, I, Reg, 0);
791       PartialDefs.push_back(LR.PhysReg);
792     }
793   }
794 
795   LLVM_DEBUG(dbgs() << "Allocating early clobbers.\n");
796   for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
797     const MachineOperand &MO = MI.getOperand(I);
798     if (!MO.isReg()) continue;
799     unsigned Reg = MO.getReg();
800     if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
801     if (!MO.isEarlyClobber())
802       continue;
803     // Note: defineVirtReg may invalidate MO.
804     MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, 0);
805     if (setPhysReg(MI, MI.getOperand(I), PhysReg))
806       VirtDead.push_back(Reg);
807   }
808 
809   // Restore UsedInInstr to a state usable for allocating normal virtual uses.
810   UsedInInstr.clear();
811   for (const MachineOperand &MO : MI.operands()) {
812     if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue;
813     unsigned Reg = MO.getReg();
814     if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
815     LLVM_DEBUG(dbgs() << "\tSetting " << printReg(Reg, TRI)
816                       << " as used in instr\n");
817     markRegUsedInInstr(Reg);
818   }
819 
820   // Also mark PartialDefs as used to avoid reallocation.
821   for (unsigned PartialDef : PartialDefs)
822     markRegUsedInInstr(PartialDef);
823 }
824 
825 #ifndef NDEBUG
826 void RegAllocFast::dumpState() {
827   for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) {
828     if (PhysRegState[Reg] == regDisabled) continue;
829     dbgs() << " " << printReg(Reg, TRI);
830     switch(PhysRegState[Reg]) {
831     case regFree:
832       break;
833     case regReserved:
834       dbgs() << "*";
835       break;
836     default: {
837       dbgs() << '=' << printReg(PhysRegState[Reg]);
838       LiveRegMap::iterator LRI = findLiveVirtReg(PhysRegState[Reg]);
839       assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
840              "Missing VirtReg entry");
841       if (LRI->Dirty)
842         dbgs() << "*";
843       assert(LRI->PhysReg == Reg && "Bad inverse map");
844       break;
845     }
846     }
847   }
848   dbgs() << '\n';
849   // Check that LiveVirtRegs is the inverse.
850   for (LiveRegMap::iterator i = LiveVirtRegs.begin(),
851        e = LiveVirtRegs.end(); i != e; ++i) {
852     if (!i->PhysReg)
853       continue;
854     assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) &&
855            "Bad map key");
856     assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) &&
857            "Bad map value");
858     assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map");
859   }
860 }
861 #endif
862 
863 void RegAllocFast::allocateInstruction(MachineInstr &MI) {
864   const MCInstrDesc &MCID = MI.getDesc();
865 
866   // If this is a copy, we may be able to coalesce.
867   unsigned CopySrcReg = 0;
868   unsigned CopyDstReg = 0;
869   unsigned CopySrcSub = 0;
870   unsigned CopyDstSub = 0;
871   if (MI.isCopy()) {
872     CopyDstReg = MI.getOperand(0).getReg();
873     CopySrcReg = MI.getOperand(1).getReg();
874     CopyDstSub = MI.getOperand(0).getSubReg();
875     CopySrcSub = MI.getOperand(1).getSubReg();
876   }
877 
878   // Track registers used by instruction.
879   UsedInInstr.clear();
880 
881   // First scan.
882   // Mark physreg uses and early clobbers as used.
883   // Find the end of the virtreg operands
884   unsigned VirtOpEnd = 0;
885   bool hasTiedOps = false;
886   bool hasEarlyClobbers = false;
887   bool hasPartialRedefs = false;
888   bool hasPhysDefs = false;
889   for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
890     MachineOperand &MO = MI.getOperand(i);
891     // Make sure MRI knows about registers clobbered by regmasks.
892     if (MO.isRegMask()) {
893       MRI->addPhysRegsUsedFromRegMask(MO.getRegMask());
894       continue;
895     }
896     if (!MO.isReg()) continue;
897     unsigned Reg = MO.getReg();
898     if (!Reg) continue;
899     if (TargetRegisterInfo::isVirtualRegister(Reg)) {
900       VirtOpEnd = i+1;
901       if (MO.isUse()) {
902         hasTiedOps = hasTiedOps ||
903                             MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1;
904       } else {
905         if (MO.isEarlyClobber())
906           hasEarlyClobbers = true;
907         if (MO.getSubReg() && MI.readsVirtualRegister(Reg))
908           hasPartialRedefs = true;
909       }
910       continue;
911     }
912     if (!MRI->isAllocatable(Reg)) continue;
913     if (MO.isUse()) {
914       usePhysReg(MO);
915     } else if (MO.isEarlyClobber()) {
916       definePhysReg(MI, Reg,
917                     (MO.isImplicit() || MO.isDead()) ? regFree : regReserved);
918       hasEarlyClobbers = true;
919     } else
920       hasPhysDefs = true;
921   }
922 
923   // The instruction may have virtual register operands that must be allocated
924   // the same register at use-time and def-time: early clobbers and tied
925   // operands. If there are also physical defs, these registers must avoid
926   // both physical defs and uses, making them more constrained than normal
927   // operands.
928   // Similarly, if there are multiple defs and tied operands, we must make
929   // sure the same register is allocated to uses and defs.
930   // We didn't detect inline asm tied operands above, so just make this extra
931   // pass for all inline asm.
932   if (MI.isInlineAsm() || hasEarlyClobbers || hasPartialRedefs ||
933       (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) {
934     handleThroughOperands(MI, VirtDead);
935     // Don't attempt coalescing when we have funny stuff going on.
936     CopyDstReg = 0;
937     // Pretend we have early clobbers so the use operands get marked below.
938     // This is not necessary for the common case of a single tied use.
939     hasEarlyClobbers = true;
940   }
941 
942   // Second scan.
943   // Allocate virtreg uses.
944   for (unsigned I = 0; I != VirtOpEnd; ++I) {
945     MachineOperand &MO = MI.getOperand(I);
946     if (!MO.isReg()) continue;
947     unsigned Reg = MO.getReg();
948     if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
949     if (MO.isUse()) {
950       LiveReg &LR = reloadVirtReg(MI, I, Reg, CopyDstReg);
951       MCPhysReg PhysReg = LR.PhysReg;
952       CopySrcReg = (CopySrcReg == Reg || CopySrcReg == PhysReg) ? PhysReg : 0;
953       if (setPhysReg(MI, MO, PhysReg))
954         killVirtReg(LR);
955     }
956   }
957 
958   // Track registers defined by instruction - early clobbers and tied uses at
959   // this point.
960   UsedInInstr.clear();
961   if (hasEarlyClobbers) {
962     for (const MachineOperand &MO : MI.operands()) {
963       if (!MO.isReg()) continue;
964       unsigned Reg = MO.getReg();
965       if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
966       // Look for physreg defs and tied uses.
967       if (!MO.isDef() && !MO.isTied()) continue;
968       markRegUsedInInstr(Reg);
969     }
970   }
971 
972   unsigned DefOpEnd = MI.getNumOperands();
973   if (MI.isCall()) {
974     // Spill all virtregs before a call. This serves one purpose: If an
975     // exception is thrown, the landing pad is going to expect to find
976     // registers in their spill slots.
977     // Note: although this is appealing to just consider all definitions
978     // as call-clobbered, this is not correct because some of those
979     // definitions may be used later on and we do not want to reuse
980     // those for virtual registers in between.
981     LLVM_DEBUG(dbgs() << "  Spilling remaining registers before call.\n");
982     spillAll(MI);
983   }
984 
985   // Third scan.
986   // Allocate defs and collect dead defs.
987   for (unsigned I = 0; I != DefOpEnd; ++I) {
988     const MachineOperand &MO = MI.getOperand(I);
989     if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber())
990       continue;
991     unsigned Reg = MO.getReg();
992 
993     if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
994       if (!MRI->isAllocatable(Reg)) continue;
995       definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved);
996       continue;
997     }
998     MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, CopySrcReg);
999     if (setPhysReg(MI, MI.getOperand(I), PhysReg)) {
1000       VirtDead.push_back(Reg);
1001       CopyDstReg = 0; // cancel coalescing;
1002     } else
1003       CopyDstReg = (CopyDstReg == Reg || CopyDstReg == PhysReg) ? PhysReg : 0;
1004   }
1005 
1006   // Kill dead defs after the scan to ensure that multiple defs of the same
1007   // register are allocated identically. We didn't need to do this for uses
1008   // because we are crerating our own kill flags, and they are always at the
1009   // last use.
1010   for (unsigned VirtReg : VirtDead)
1011     killVirtReg(VirtReg);
1012   VirtDead.clear();
1013 
1014   LLVM_DEBUG(dbgs() << "<< " << MI);
1015   if (CopyDstReg && CopyDstReg == CopySrcReg && CopyDstSub == CopySrcSub) {
1016     LLVM_DEBUG(dbgs() << "Mark identity copy for removal\n");
1017     Coalesced.push_back(&MI);
1018   }
1019 }
1020 
1021 void RegAllocFast::handleDebugValue(MachineInstr &MI) {
1022   MachineOperand &MO = MI.getOperand(0);
1023 
1024   // Ignore DBG_VALUEs that aren't based on virtual registers. These are
1025   // mostly constants and frame indices.
1026   if (!MO.isReg())
1027     return;
1028   unsigned Reg = MO.getReg();
1029   if (!TargetRegisterInfo::isVirtualRegister(Reg))
1030     return;
1031 
1032   // See if this virtual register has already been allocated to a physical
1033   // register or spilled to a stack slot.
1034   LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
1035   if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1036     setPhysReg(MI, MO, LRI->PhysReg);
1037   } else {
1038     int SS = StackSlotForVirtReg[Reg];
1039     if (SS != -1) {
1040       // Modify DBG_VALUE now that the value is in a spill slot.
1041       updateDbgValueForSpill(MI, SS);
1042       LLVM_DEBUG(dbgs() << "Modifying debug info due to spill:" << "\t" << MI);
1043       return;
1044     }
1045 
1046     // We can't allocate a physreg for a DebugValue, sorry!
1047     LLVM_DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE");
1048     MO.setReg(0);
1049   }
1050 
1051   // If Reg hasn't been spilled, put this DBG_VALUE in LiveDbgValueMap so
1052   // that future spills of Reg will have DBG_VALUEs.
1053   LiveDbgValueMap[Reg].push_back(&MI);
1054 }
1055 
1056 void RegAllocFast::allocateBasicBlock(MachineBasicBlock &MBB) {
1057   this->MBB = &MBB;
1058   LLVM_DEBUG(dbgs() << "\nAllocating " << MBB);
1059 
1060   PhysRegState.assign(TRI->getNumRegs(), regDisabled);
1061   assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?");
1062 
1063   MachineBasicBlock::iterator MII = MBB.begin();
1064 
1065   // Add live-in registers as live.
1066   for (const MachineBasicBlock::RegisterMaskPair LI : MBB.liveins())
1067     if (MRI->isAllocatable(LI.PhysReg))
1068       definePhysReg(MII, LI.PhysReg, regReserved);
1069 
1070   VirtDead.clear();
1071   Coalesced.clear();
1072 
1073   // Otherwise, sequentially allocate each instruction in the MBB.
1074   for (MachineInstr &MI : MBB) {
1075     LLVM_DEBUG(
1076       dbgs() << "\n>> " << MI << "Regs:";
1077       dumpState()
1078     );
1079 
1080     // Special handling for debug values. Note that they are not allowed to
1081     // affect codegen of the other instructions in any way.
1082     if (MI.isDebugValue()) {
1083       handleDebugValue(MI);
1084       continue;
1085     }
1086 
1087     allocateInstruction(MI);
1088   }
1089 
1090   // Spill all physical registers holding virtual registers now.
1091   LLVM_DEBUG(dbgs() << "Spilling live registers at end of block.\n");
1092   spillAll(MBB.getFirstTerminator());
1093 
1094   // Erase all the coalesced copies. We are delaying it until now because
1095   // LiveVirtRegs might refer to the instrs.
1096   for (MachineInstr *MI : Coalesced)
1097     MBB.erase(MI);
1098   NumCoalesced += Coalesced.size();
1099 
1100   LLVM_DEBUG(MBB.dump());
1101 }
1102 
1103 bool RegAllocFast::runOnMachineFunction(MachineFunction &MF) {
1104   LLVM_DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1105                     << "********** Function: " << MF.getName() << '\n');
1106   MRI = &MF.getRegInfo();
1107   const TargetSubtargetInfo &STI = MF.getSubtarget();
1108   TRI = STI.getRegisterInfo();
1109   TII = STI.getInstrInfo();
1110   MFI = &MF.getFrameInfo();
1111   MRI->freezeReservedRegs(MF);
1112   RegClassInfo.runOnMachineFunction(MF);
1113   UsedInInstr.clear();
1114   UsedInInstr.setUniverse(TRI->getNumRegUnits());
1115 
1116   // initialize the virtual->physical register map to have a 'null'
1117   // mapping for all virtual registers
1118   unsigned NumVirtRegs = MRI->getNumVirtRegs();
1119   StackSlotForVirtReg.resize(NumVirtRegs);
1120   LiveVirtRegs.setUniverse(NumVirtRegs);
1121 
1122   // Loop over all of the basic blocks, eliminating virtual register references
1123   for (MachineBasicBlock &MBB : MF)
1124     allocateBasicBlock(MBB);
1125 
1126   // All machine operands and other references to virtual registers have been
1127   // replaced. Remove the virtual registers.
1128   MRI->clearVirtRegs();
1129 
1130   StackSlotForVirtReg.clear();
1131   LiveDbgValueMap.clear();
1132   return true;
1133 }
1134 
1135 FunctionPass *llvm::createFastRegisterAllocator() {
1136   return new RegAllocFast();
1137 }
1138