1 //===- RegAllocFast.cpp - A fast register allocator for debug code --------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 /// \file This register allocator allocates registers to a basic block at a
10 /// time, attempting to keep values in registers and reusing registers as
11 /// appropriate.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/ADT/ArrayRef.h"
16 #include "llvm/ADT/DenseMap.h"
17 #include "llvm/ADT/IndexedMap.h"
18 #include "llvm/ADT/SmallSet.h"
19 #include "llvm/ADT/SmallVector.h"
20 #include "llvm/ADT/SparseSet.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/CodeGen/MachineBasicBlock.h"
23 #include "llvm/CodeGen/MachineFrameInfo.h"
24 #include "llvm/CodeGen/MachineFunction.h"
25 #include "llvm/CodeGen/MachineFunctionPass.h"
26 #include "llvm/CodeGen/MachineInstr.h"
27 #include "llvm/CodeGen/MachineInstrBuilder.h"
28 #include "llvm/CodeGen/MachineOperand.h"
29 #include "llvm/CodeGen/MachineRegisterInfo.h"
30 #include "llvm/CodeGen/RegAllocRegistry.h"
31 #include "llvm/CodeGen/RegisterClassInfo.h"
32 #include "llvm/CodeGen/TargetInstrInfo.h"
33 #include "llvm/CodeGen/TargetOpcodes.h"
34 #include "llvm/CodeGen/TargetRegisterInfo.h"
35 #include "llvm/CodeGen/TargetSubtargetInfo.h"
36 #include "llvm/IR/DebugLoc.h"
37 #include "llvm/IR/Metadata.h"
38 #include "llvm/MC/MCInstrDesc.h"
39 #include "llvm/MC/MCRegisterInfo.h"
40 #include "llvm/Pass.h"
41 #include "llvm/Support/Casting.h"
42 #include "llvm/Support/Compiler.h"
43 #include "llvm/Support/Debug.h"
44 #include "llvm/Support/ErrorHandling.h"
45 #include "llvm/Support/raw_ostream.h"
46 #include <cassert>
47 #include <tuple>
48 #include <vector>
49 
50 using namespace llvm;
51 
52 #define DEBUG_TYPE "regalloc"
53 
54 STATISTIC(NumStores, "Number of stores added");
55 STATISTIC(NumLoads , "Number of loads added");
56 STATISTIC(NumCoalesced, "Number of copies coalesced");
57 
58 static RegisterRegAlloc
59   fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator);
60 
61 namespace {
62 
63   class RegAllocFast : public MachineFunctionPass {
64   public:
65     static char ID;
66 
67     RegAllocFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1) {}
68 
69   private:
70     MachineFrameInfo *MFI;
71     MachineRegisterInfo *MRI;
72     const TargetRegisterInfo *TRI;
73     const TargetInstrInfo *TII;
74     RegisterClassInfo RegClassInfo;
75 
76     /// Basic block currently being allocated.
77     MachineBasicBlock *MBB;
78 
79     /// Maps virtual regs to the frame index where these values are spilled.
80     IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
81 
82     /// Everything we know about a live virtual register.
83     struct LiveReg {
84       MachineInstr *LastUse = nullptr; ///< Last instr to use reg.
85       unsigned VirtReg;                ///< Virtual register number.
86       MCPhysReg PhysReg = 0;           ///< Currently held here.
87       unsigned short LastOpNum = 0;    ///< OpNum on LastUse.
88       bool Dirty = false;              ///< Register needs spill.
89 
90       explicit LiveReg(unsigned VirtReg) : VirtReg(VirtReg) {}
91 
92       unsigned getSparseSetIndex() const {
93         return TargetRegisterInfo::virtReg2Index(VirtReg);
94       }
95     };
96 
97     using LiveRegMap = SparseSet<LiveReg>;
98     /// This map contains entries for each virtual register that is currently
99     /// available in a physical register.
100     LiveRegMap LiveVirtRegs;
101 
102     DenseMap<unsigned, SmallVector<MachineInstr *, 2>> LiveDbgValueMap;
103 
104     /// State of a physical register.
105     enum RegState {
106       /// A disabled register is not available for allocation, but an alias may
107       /// be in use. A register can only be moved out of the disabled state if
108       /// all aliases are disabled.
109       regDisabled,
110 
111       /// A free register is not currently in use and can be allocated
112       /// immediately without checking aliases.
113       regFree,
114 
115       /// A reserved register has been assigned explicitly (e.g., setting up a
116       /// call parameter), and it remains reserved until it is used.
117       regReserved
118 
119       /// A register state may also be a virtual register number, indication
120       /// that the physical register is currently allocated to a virtual
121       /// register. In that case, LiveVirtRegs contains the inverse mapping.
122     };
123 
124     /// Maps each physical register to a RegState enum or a virtual register.
125     std::vector<unsigned> PhysRegState;
126 
127     SmallVector<unsigned, 16> VirtDead;
128     SmallVector<MachineInstr *, 32> Coalesced;
129 
130     using RegUnitSet = SparseSet<uint16_t, identity<uint16_t>>;
131     /// Set of register units that are used in the current instruction, and so
132     /// cannot be allocated.
133     RegUnitSet UsedInInstr;
134 
135     void setPhysRegState(MCPhysReg PhysReg, unsigned NewState);
136 
137     /// Mark a physreg as used in this instruction.
138     void markRegUsedInInstr(MCPhysReg PhysReg) {
139       for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
140         UsedInInstr.insert(*Units);
141     }
142 
143     /// Check if a physreg or any of its aliases are used in this instruction.
144     bool isRegUsedInInstr(MCPhysReg PhysReg) const {
145       for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
146         if (UsedInInstr.count(*Units))
147           return true;
148       return false;
149     }
150 
151     enum : unsigned {
152       spillClean = 50,
153       spillDirty = 100,
154       spillImpossible = ~0u
155     };
156 
157   public:
158     StringRef getPassName() const override { return "Fast Register Allocator"; }
159 
160     void getAnalysisUsage(AnalysisUsage &AU) const override {
161       AU.setPreservesCFG();
162       MachineFunctionPass::getAnalysisUsage(AU);
163     }
164 
165     MachineFunctionProperties getRequiredProperties() const override {
166       return MachineFunctionProperties().set(
167           MachineFunctionProperties::Property::NoPHIs);
168     }
169 
170     MachineFunctionProperties getSetProperties() const override {
171       return MachineFunctionProperties().set(
172           MachineFunctionProperties::Property::NoVRegs);
173     }
174 
175   private:
176     bool runOnMachineFunction(MachineFunction &MF) override;
177 
178     void allocateBasicBlock(MachineBasicBlock &MBB);
179     void allocateInstruction(MachineInstr &MI);
180     void handleDebugValue(MachineInstr &MI);
181     void handleThroughOperands(MachineInstr &MI,
182                                SmallVectorImpl<unsigned> &VirtDead);
183     bool isLastUseOfLocalReg(const MachineOperand &MO) const;
184 
185     void addKillFlag(const LiveReg &LRI);
186     void killVirtReg(LiveReg &LR);
187     void killVirtReg(unsigned VirtReg);
188     void spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR);
189     void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg);
190 
191     void usePhysReg(MachineOperand &MO);
192     void definePhysReg(MachineBasicBlock::iterator MI, MCPhysReg PhysReg,
193                        RegState NewState);
194     unsigned calcSpillCost(MCPhysReg PhysReg) const;
195     void assignVirtToPhysReg(LiveReg &, MCPhysReg PhysReg);
196 
197     LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) {
198       return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
199     }
200 
201     LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const {
202       return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
203     }
204 
205     void allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint);
206     MCPhysReg defineVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg,
207                             unsigned Hint);
208     LiveReg &reloadVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg,
209                            unsigned Hint);
210     void spillAll(MachineBasicBlock::iterator MI);
211     bool setPhysReg(MachineInstr &MI, MachineOperand &MO, MCPhysReg PhysReg);
212 
213     int getStackSpaceFor(unsigned VirtReg);
214     void spill(MachineBasicBlock::iterator Before, unsigned VirtReg,
215                MCPhysReg AssignedReg, bool Kill);
216     void reload(MachineBasicBlock::iterator Before, unsigned VirtReg,
217                 MCPhysReg PhysReg);
218 
219     void dumpState();
220   };
221 
222 } // end anonymous namespace
223 
224 char RegAllocFast::ID = 0;
225 
226 INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false,
227                 false)
228 
229 void RegAllocFast::setPhysRegState(MCPhysReg PhysReg, unsigned NewState) {
230   PhysRegState[PhysReg] = NewState;
231 }
232 
233 /// This allocates space for the specified virtual register to be held on the
234 /// stack.
235 int RegAllocFast::getStackSpaceFor(unsigned VirtReg) {
236   // Find the location Reg would belong...
237   int SS = StackSlotForVirtReg[VirtReg];
238   // Already has space allocated?
239   if (SS != -1)
240     return SS;
241 
242   // Allocate a new stack object for this spill location...
243   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
244   unsigned Size = TRI->getSpillSize(RC);
245   unsigned Align = TRI->getSpillAlignment(RC);
246   int FrameIdx = MFI->CreateSpillStackObject(Size, Align);
247 
248   // Assign the slot.
249   StackSlotForVirtReg[VirtReg] = FrameIdx;
250   return FrameIdx;
251 }
252 
253 /// Insert spill instruction for \p AssignedReg before \p Before. Update
254 /// DBG_VALUEs with \p VirtReg operands with the stack slot.
255 void RegAllocFast::spill(MachineBasicBlock::iterator Before, unsigned VirtReg,
256                          MCPhysReg AssignedReg, bool Kill) {
257   LLVM_DEBUG(dbgs() << "Spilling " << printReg(VirtReg, TRI)
258                     << " in " << printReg(AssignedReg, TRI));
259   int FI = getStackSpaceFor(VirtReg);
260   LLVM_DEBUG(dbgs() << " to stack slot #" << FI << '\n');
261 
262   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
263   TII->storeRegToStackSlot(*MBB, Before, AssignedReg, Kill, FI, &RC, TRI);
264   ++NumStores;
265 
266   // If this register is used by DBG_VALUE then insert new DBG_VALUE to
267   // identify spilled location as the place to find corresponding variable's
268   // value.
269   SmallVectorImpl<MachineInstr *> &LRIDbgValues = LiveDbgValueMap[VirtReg];
270   for (MachineInstr *DBG : LRIDbgValues) {
271     MachineInstr *NewDV = buildDbgValueForSpill(*MBB, Before, *DBG, FI);
272     assert(NewDV->getParent() == MBB && "dangling parent pointer");
273     (void)NewDV;
274     LLVM_DEBUG(dbgs() << "Inserting debug info due to spill:\n" << *NewDV);
275   }
276   // Now this register is spilled there is should not be any DBG_VALUE
277   // pointing to this register because they are all pointing to spilled value
278   // now.
279   LRIDbgValues.clear();
280 }
281 
282 /// Insert reload instruction for \p PhysReg before \p Before.
283 void RegAllocFast::reload(MachineBasicBlock::iterator Before, unsigned VirtReg,
284                           MCPhysReg PhysReg) {
285   LLVM_DEBUG(dbgs() << "Reloading " << printReg(VirtReg, TRI) << " into "
286                     << printReg(PhysReg, TRI) << '\n');
287   int FI = getStackSpaceFor(VirtReg);
288   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
289   TII->loadRegFromStackSlot(*MBB, Before, PhysReg, FI, &RC, TRI);
290   ++NumLoads;
291 }
292 
293 /// Return true if MO is the only remaining reference to its virtual register,
294 /// and it is guaranteed to be a block-local register.
295 bool RegAllocFast::isLastUseOfLocalReg(const MachineOperand &MO) const {
296   // If the register has ever been spilled or reloaded, we conservatively assume
297   // it is a global register used in multiple blocks.
298   if (StackSlotForVirtReg[MO.getReg()] != -1)
299     return false;
300 
301   // Check that the use/def chain has exactly one operand - MO.
302   MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg());
303   if (&*I != &MO)
304     return false;
305   return ++I == MRI->reg_nodbg_end();
306 }
307 
308 /// Set kill flags on last use of a virtual register.
309 void RegAllocFast::addKillFlag(const LiveReg &LR) {
310   if (!LR.LastUse) return;
311   MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum);
312   if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) {
313     if (MO.getReg() == LR.PhysReg)
314       MO.setIsKill();
315     // else, don't do anything we are problably redefining a
316     // subreg of this register and given we don't track which
317     // lanes are actually dead, we cannot insert a kill flag here.
318     // Otherwise we may end up in a situation like this:
319     // ... = (MO) physreg:sub1, implicit killed physreg
320     // ... <== Here we would allow later pass to reuse physreg:sub1
321     //         which is potentially wrong.
322     // LR:sub0 = ...
323     // ... = LR.sub1 <== This is going to use physreg:sub1
324   }
325 }
326 
327 /// Mark virtreg as no longer available.
328 void RegAllocFast::killVirtReg(LiveReg &LR) {
329   addKillFlag(LR);
330   assert(PhysRegState[LR.PhysReg] == LR.VirtReg &&
331          "Broken RegState mapping");
332   setPhysRegState(LR.PhysReg, regFree);
333   LR.PhysReg = 0;
334 }
335 
336 /// Mark virtreg as no longer available.
337 void RegAllocFast::killVirtReg(unsigned VirtReg) {
338   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
339          "killVirtReg needs a virtual register");
340   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
341   if (LRI != LiveVirtRegs.end() && LRI->PhysReg)
342     killVirtReg(*LRI);
343 }
344 
345 /// This method spills the value specified by VirtReg into the corresponding
346 /// stack slot if needed.
347 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI,
348                                 unsigned VirtReg) {
349   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
350          "Spilling a physical register is illegal!");
351   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
352   assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
353          "Spilling unmapped virtual register");
354   spillVirtReg(MI, *LRI);
355 }
356 
357 /// Do the actual work of spilling.
358 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR) {
359   assert(PhysRegState[LR.PhysReg] == LR.VirtReg && "Broken RegState mapping");
360 
361   if (LR.Dirty) {
362     // If this physreg is used by the instruction, we want to kill it on the
363     // instruction, not on the spill.
364     bool SpillKill = MachineBasicBlock::iterator(LR.LastUse) != MI;
365     LR.Dirty = false;
366 
367     spill(MI, LR.VirtReg, LR.PhysReg, SpillKill);
368 
369     if (SpillKill)
370       LR.LastUse = nullptr; // Don't kill register again
371   }
372   killVirtReg(LR);
373 }
374 
375 /// Spill all dirty virtregs without killing them.
376 void RegAllocFast::spillAll(MachineBasicBlock::iterator MI) {
377   if (LiveVirtRegs.empty())
378     return;
379   // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
380   // of spilling here is deterministic, if arbitrary.
381   for (LiveReg &LR : LiveVirtRegs) {
382     if (!LR.PhysReg)
383       continue;
384     spillVirtReg(MI, LR);
385   }
386   LiveVirtRegs.clear();
387 }
388 
389 /// Handle the direct use of a physical register.  Check that the register is
390 /// not used by a virtreg. Kill the physreg, marking it free. This may add
391 /// implicit kills to MO->getParent() and invalidate MO.
392 void RegAllocFast::usePhysReg(MachineOperand &MO) {
393   // Ignore undef uses.
394   if (MO.isUndef())
395     return;
396 
397   unsigned PhysReg = MO.getReg();
398   assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) &&
399          "Bad usePhysReg operand");
400 
401   markRegUsedInInstr(PhysReg);
402   switch (PhysRegState[PhysReg]) {
403   case regDisabled:
404     break;
405   case regReserved:
406     PhysRegState[PhysReg] = regFree;
407     LLVM_FALLTHROUGH;
408   case regFree:
409     MO.setIsKill();
410     return;
411   default:
412     // The physreg was allocated to a virtual register. That means the value we
413     // wanted has been clobbered.
414     llvm_unreachable("Instruction uses an allocated register");
415   }
416 
417   // Maybe a superregister is reserved?
418   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
419     MCPhysReg Alias = *AI;
420     switch (PhysRegState[Alias]) {
421     case regDisabled:
422       break;
423     case regReserved:
424       // Either PhysReg is a subregister of Alias and we mark the
425       // whole register as free, or PhysReg is the superregister of
426       // Alias and we mark all the aliases as disabled before freeing
427       // PhysReg.
428       // In the latter case, since PhysReg was disabled, this means that
429       // its value is defined only by physical sub-registers. This check
430       // is performed by the assert of the default case in this loop.
431       // Note: The value of the superregister may only be partial
432       // defined, that is why regDisabled is a valid state for aliases.
433       assert((TRI->isSuperRegister(PhysReg, Alias) ||
434               TRI->isSuperRegister(Alias, PhysReg)) &&
435              "Instruction is not using a subregister of a reserved register");
436       LLVM_FALLTHROUGH;
437     case regFree:
438       if (TRI->isSuperRegister(PhysReg, Alias)) {
439         // Leave the superregister in the working set.
440         setPhysRegState(Alias, regFree);
441         MO.getParent()->addRegisterKilled(Alias, TRI, true);
442         return;
443       }
444       // Some other alias was in the working set - clear it.
445       setPhysRegState(Alias, regDisabled);
446       break;
447     default:
448       llvm_unreachable("Instruction uses an alias of an allocated register");
449     }
450   }
451 
452   // All aliases are disabled, bring register into working set.
453   setPhysRegState(PhysReg, regFree);
454   MO.setIsKill();
455 }
456 
457 /// Mark PhysReg as reserved or free after spilling any virtregs. This is very
458 /// similar to defineVirtReg except the physreg is reserved instead of
459 /// allocated.
460 void RegAllocFast::definePhysReg(MachineBasicBlock::iterator MI,
461                                  MCPhysReg PhysReg, RegState NewState) {
462   markRegUsedInInstr(PhysReg);
463   switch (unsigned VirtReg = PhysRegState[PhysReg]) {
464   case regDisabled:
465     break;
466   default:
467     spillVirtReg(MI, VirtReg);
468     LLVM_FALLTHROUGH;
469   case regFree:
470   case regReserved:
471     setPhysRegState(PhysReg, NewState);
472     return;
473   }
474 
475   // This is a disabled register, disable all aliases.
476   setPhysRegState(PhysReg, NewState);
477   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
478     MCPhysReg Alias = *AI;
479     switch (unsigned VirtReg = PhysRegState[Alias]) {
480     case regDisabled:
481       break;
482     default:
483       spillVirtReg(MI, VirtReg);
484       LLVM_FALLTHROUGH;
485     case regFree:
486     case regReserved:
487       setPhysRegState(Alias, regDisabled);
488       if (TRI->isSuperRegister(PhysReg, Alias))
489         return;
490       break;
491     }
492   }
493 }
494 
495 /// Return the cost of spilling clearing out PhysReg and aliases so it is free
496 /// for allocation. Returns 0 when PhysReg is free or disabled with all aliases
497 /// disabled - it can be allocated directly.
498 /// \returns spillImpossible when PhysReg or an alias can't be spilled.
499 unsigned RegAllocFast::calcSpillCost(MCPhysReg PhysReg) const {
500   if (isRegUsedInInstr(PhysReg)) {
501     LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI)
502                       << " is already used in instr.\n");
503     return spillImpossible;
504   }
505   switch (unsigned VirtReg = PhysRegState[PhysReg]) {
506   case regDisabled:
507     break;
508   case regFree:
509     return 0;
510   case regReserved:
511     LLVM_DEBUG(dbgs() << printReg(VirtReg, TRI) << " corresponding "
512                       << printReg(PhysReg, TRI) << " is reserved already.\n");
513     return spillImpossible;
514   default: {
515     LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
516     assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
517            "Missing VirtReg entry");
518     return LRI->Dirty ? spillDirty : spillClean;
519   }
520   }
521 
522   // This is a disabled register, add up cost of aliases.
523   LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI) << " is disabled.\n");
524   unsigned Cost = 0;
525   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
526     MCPhysReg Alias = *AI;
527     switch (unsigned VirtReg = PhysRegState[Alias]) {
528     case regDisabled:
529       break;
530     case regFree:
531       ++Cost;
532       break;
533     case regReserved:
534       return spillImpossible;
535     default: {
536       LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
537       assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
538              "Missing VirtReg entry");
539       Cost += LRI->Dirty ? spillDirty : spillClean;
540       break;
541     }
542     }
543   }
544   return Cost;
545 }
546 
547 /// This method updates local state so that we know that PhysReg is the
548 /// proper container for VirtReg now.  The physical register must not be used
549 /// for anything else when this is called.
550 void RegAllocFast::assignVirtToPhysReg(LiveReg &LR, MCPhysReg PhysReg) {
551   unsigned VirtReg = LR.VirtReg;
552   LLVM_DEBUG(dbgs() << "Assigning " << printReg(VirtReg, TRI) << " to "
553                     << printReg(PhysReg, TRI) << '\n');
554   assert(LR.PhysReg == 0 && "Already assigned a physreg");
555   assert(PhysReg != 0 && "Trying to assign no register");
556   LR.PhysReg = PhysReg;
557   setPhysRegState(PhysReg, VirtReg);
558 }
559 
560 /// Allocates a physical register for VirtReg.
561 void RegAllocFast::allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint) {
562   const unsigned VirtReg = LR.VirtReg;
563 
564   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
565          "Can only allocate virtual registers");
566 
567   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
568   LLVM_DEBUG(dbgs() << "Search register for " << printReg(VirtReg)
569                     << " in class " << TRI->getRegClassName(&RC) << '\n');
570 
571   // Take hint when possible.
572   if (TargetRegisterInfo::isPhysicalRegister(Hint) &&
573       MRI->isAllocatable(Hint) && RC.contains(Hint)) {
574     // Ignore the hint if we would have to spill a dirty register.
575     unsigned Cost = calcSpillCost(Hint);
576     if (Cost < spillDirty) {
577       if (Cost)
578         definePhysReg(MI, Hint, regFree);
579       assignVirtToPhysReg(LR, Hint);
580       return;
581     }
582   }
583 
584   // First try to find a completely free register.
585   ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
586   for (MCPhysReg PhysReg : AllocationOrder) {
587     if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) {
588       assignVirtToPhysReg(LR, PhysReg);
589       return;
590     }
591   }
592 
593   MCPhysReg BestReg = 0;
594   unsigned BestCost = spillImpossible;
595   for (MCPhysReg PhysReg : AllocationOrder) {
596     LLVM_DEBUG(dbgs() << "\tRegister: " << printReg(PhysReg, TRI) << ' ');
597     unsigned Cost = calcSpillCost(PhysReg);
598     LLVM_DEBUG(dbgs() << "Cost: " << Cost << " BestCost: " << BestCost << '\n');
599     // Immediate take a register with cost 0.
600     if (Cost == 0) {
601       assignVirtToPhysReg(LR, PhysReg);
602       return;
603     }
604     if (Cost < BestCost) {
605       BestReg = PhysReg;
606       BestCost = Cost;
607     }
608   }
609 
610   if (!BestReg) {
611     // Nothing we can do: Report an error and keep going with an invalid
612     // allocation.
613     if (MI.isInlineAsm())
614       MI.emitError("inline assembly requires more registers than available");
615     else
616       MI.emitError("ran out of registers during register allocation");
617     definePhysReg(MI, *AllocationOrder.begin(), regFree);
618     assignVirtToPhysReg(LR, *AllocationOrder.begin());
619     return;
620   }
621 
622   definePhysReg(MI, BestReg, regFree);
623   assignVirtToPhysReg(LR, BestReg);
624 }
625 
626 /// Allocates a register for VirtReg and mark it as dirty.
627 MCPhysReg RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum,
628                                       unsigned VirtReg, unsigned Hint) {
629   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
630          "Not a virtual register");
631   LiveRegMap::iterator LRI;
632   bool New;
633   std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
634   if (!LRI->PhysReg) {
635     // If there is no hint, peek at the only use of this register.
636     if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) &&
637         MRI->hasOneNonDBGUse(VirtReg)) {
638       const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg);
639       // It's a copy, use the destination register as a hint.
640       if (UseMI.isCopyLike())
641         Hint = UseMI.getOperand(0).getReg();
642     }
643     allocVirtReg(MI, *LRI, Hint);
644   } else if (LRI->LastUse) {
645     // Redefining a live register - kill at the last use, unless it is this
646     // instruction defining VirtReg multiple times.
647     if (LRI->LastUse != &MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse())
648       addKillFlag(*LRI);
649   }
650   assert(LRI->PhysReg && "Register not assigned");
651   LRI->LastUse = &MI;
652   LRI->LastOpNum = OpNum;
653   LRI->Dirty = true;
654   markRegUsedInInstr(LRI->PhysReg);
655   return LRI->PhysReg;
656 }
657 
658 /// Make sure VirtReg is available in a physreg and return it.
659 RegAllocFast::LiveReg &RegAllocFast::reloadVirtReg(MachineInstr &MI,
660                                                    unsigned OpNum,
661                                                    unsigned VirtReg,
662                                                    unsigned Hint) {
663   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
664          "Not a virtual register");
665   LiveRegMap::iterator LRI;
666   bool New;
667   std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
668   MachineOperand &MO = MI.getOperand(OpNum);
669   if (!LRI->PhysReg) {
670     allocVirtReg(MI, *LRI, Hint);
671     reload(MI, VirtReg, LRI->PhysReg);
672   } else if (LRI->Dirty) {
673     if (isLastUseOfLocalReg(MO)) {
674       LLVM_DEBUG(dbgs() << "Killing last use: " << MO << '\n');
675       if (MO.isUse())
676         MO.setIsKill();
677       else
678         MO.setIsDead();
679     } else if (MO.isKill()) {
680       LLVM_DEBUG(dbgs() << "Clearing dubious kill: " << MO << '\n');
681       MO.setIsKill(false);
682     } else if (MO.isDead()) {
683       LLVM_DEBUG(dbgs() << "Clearing dubious dead: " << MO << '\n');
684       MO.setIsDead(false);
685     }
686   } else if (MO.isKill()) {
687     // We must remove kill flags from uses of reloaded registers because the
688     // register would be killed immediately, and there might be a second use:
689     //   %foo = OR killed %x, %x
690     // This would cause a second reload of %x into a different register.
691     LLVM_DEBUG(dbgs() << "Clearing clean kill: " << MO << '\n');
692     MO.setIsKill(false);
693   } else if (MO.isDead()) {
694     LLVM_DEBUG(dbgs() << "Clearing clean dead: " << MO << '\n');
695     MO.setIsDead(false);
696   }
697   assert(LRI->PhysReg && "Register not assigned");
698   LRI->LastUse = &MI;
699   LRI->LastOpNum = OpNum;
700   markRegUsedInInstr(LRI->PhysReg);
701   return *LRI;
702 }
703 
704 /// Changes operand OpNum in MI the refer the PhysReg, considering subregs. This
705 /// may invalidate any operand pointers.  Return true if the operand kills its
706 /// register.
707 bool RegAllocFast::setPhysReg(MachineInstr &MI, MachineOperand &MO,
708                               MCPhysReg PhysReg) {
709   bool Dead = MO.isDead();
710   if (!MO.getSubReg()) {
711     MO.setReg(PhysReg);
712     MO.setIsRenamable(true);
713     return MO.isKill() || Dead;
714   }
715 
716   // Handle subregister index.
717   MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0);
718   MO.setIsRenamable(true);
719   MO.setSubReg(0);
720 
721   // A kill flag implies killing the full register. Add corresponding super
722   // register kill.
723   if (MO.isKill()) {
724     MI.addRegisterKilled(PhysReg, TRI, true);
725     return true;
726   }
727 
728   // A <def,read-undef> of a sub-register requires an implicit def of the full
729   // register.
730   if (MO.isDef() && MO.isUndef())
731     MI.addRegisterDefined(PhysReg, TRI);
732 
733   return Dead;
734 }
735 
736 // Handles special instruction operand like early clobbers and tied ops when
737 // there are additional physreg defines.
738 void RegAllocFast::handleThroughOperands(MachineInstr &MI,
739                                          SmallVectorImpl<unsigned> &VirtDead) {
740   LLVM_DEBUG(dbgs() << "Scanning for through registers:");
741   SmallSet<unsigned, 8> ThroughRegs;
742   for (const MachineOperand &MO : MI.operands()) {
743     if (!MO.isReg()) continue;
744     unsigned Reg = MO.getReg();
745     if (!TargetRegisterInfo::isVirtualRegister(Reg))
746       continue;
747     if (MO.isEarlyClobber() || (MO.isUse() && MO.isTied()) ||
748         (MO.getSubReg() && MI.readsVirtualRegister(Reg))) {
749       if (ThroughRegs.insert(Reg).second)
750         LLVM_DEBUG(dbgs() << ' ' << printReg(Reg));
751     }
752   }
753 
754   // If any physreg defines collide with preallocated through registers,
755   // we must spill and reallocate.
756   LLVM_DEBUG(dbgs() << "\nChecking for physdef collisions.\n");
757   for (const MachineOperand &MO : MI.operands()) {
758     if (!MO.isReg() || !MO.isDef()) continue;
759     unsigned Reg = MO.getReg();
760     if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
761     markRegUsedInInstr(Reg);
762     for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
763       if (ThroughRegs.count(PhysRegState[*AI]))
764         definePhysReg(MI, *AI, regFree);
765     }
766   }
767 
768   SmallVector<unsigned, 8> PartialDefs;
769   LLVM_DEBUG(dbgs() << "Allocating tied uses.\n");
770   for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
771     MachineOperand &MO = MI.getOperand(I);
772     if (!MO.isReg()) continue;
773     unsigned Reg = MO.getReg();
774     if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
775     if (MO.isUse()) {
776       if (!MO.isTied()) continue;
777       LLVM_DEBUG(dbgs() << "Operand " << I << "(" << MO
778                         << ") is tied to operand " << MI.findTiedOperandIdx(I)
779                         << ".\n");
780       LiveReg &LR = reloadVirtReg(MI, I, Reg, 0);
781       MCPhysReg PhysReg = LR.PhysReg;
782       setPhysReg(MI, MO, PhysReg);
783       // Note: we don't update the def operand yet. That would cause the normal
784       // def-scan to attempt spilling.
785     } else if (MO.getSubReg() && MI.readsVirtualRegister(Reg)) {
786       LLVM_DEBUG(dbgs() << "Partial redefine: " << MO << '\n');
787       // Reload the register, but don't assign to the operand just yet.
788       // That would confuse the later phys-def processing pass.
789       LiveReg &LR = reloadVirtReg(MI, I, Reg, 0);
790       PartialDefs.push_back(LR.PhysReg);
791     }
792   }
793 
794   LLVM_DEBUG(dbgs() << "Allocating early clobbers.\n");
795   for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
796     const MachineOperand &MO = MI.getOperand(I);
797     if (!MO.isReg()) continue;
798     unsigned Reg = MO.getReg();
799     if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
800     if (!MO.isEarlyClobber())
801       continue;
802     // Note: defineVirtReg may invalidate MO.
803     MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, 0);
804     if (setPhysReg(MI, MI.getOperand(I), PhysReg))
805       VirtDead.push_back(Reg);
806   }
807 
808   // Restore UsedInInstr to a state usable for allocating normal virtual uses.
809   UsedInInstr.clear();
810   for (const MachineOperand &MO : MI.operands()) {
811     if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue;
812     unsigned Reg = MO.getReg();
813     if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
814     LLVM_DEBUG(dbgs() << "\tSetting " << printReg(Reg, TRI)
815                       << " as used in instr\n");
816     markRegUsedInInstr(Reg);
817   }
818 
819   // Also mark PartialDefs as used to avoid reallocation.
820   for (unsigned PartialDef : PartialDefs)
821     markRegUsedInInstr(PartialDef);
822 }
823 
824 #ifndef NDEBUG
825 void RegAllocFast::dumpState() {
826   for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) {
827     if (PhysRegState[Reg] == regDisabled) continue;
828     dbgs() << " " << printReg(Reg, TRI);
829     switch(PhysRegState[Reg]) {
830     case regFree:
831       break;
832     case regReserved:
833       dbgs() << "*";
834       break;
835     default: {
836       dbgs() << '=' << printReg(PhysRegState[Reg]);
837       LiveRegMap::iterator LRI = findLiveVirtReg(PhysRegState[Reg]);
838       assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
839              "Missing VirtReg entry");
840       if (LRI->Dirty)
841         dbgs() << "*";
842       assert(LRI->PhysReg == Reg && "Bad inverse map");
843       break;
844     }
845     }
846   }
847   dbgs() << '\n';
848   // Check that LiveVirtRegs is the inverse.
849   for (LiveRegMap::iterator i = LiveVirtRegs.begin(),
850        e = LiveVirtRegs.end(); i != e; ++i) {
851     if (!i->PhysReg)
852       continue;
853     assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) &&
854            "Bad map key");
855     assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) &&
856            "Bad map value");
857     assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map");
858   }
859 }
860 #endif
861 
862 void RegAllocFast::allocateInstruction(MachineInstr &MI) {
863   const MCInstrDesc &MCID = MI.getDesc();
864 
865   // If this is a copy, we may be able to coalesce.
866   unsigned CopySrcReg = 0;
867   unsigned CopyDstReg = 0;
868   unsigned CopySrcSub = 0;
869   unsigned CopyDstSub = 0;
870   if (MI.isCopy()) {
871     CopyDstReg = MI.getOperand(0).getReg();
872     CopySrcReg = MI.getOperand(1).getReg();
873     CopyDstSub = MI.getOperand(0).getSubReg();
874     CopySrcSub = MI.getOperand(1).getSubReg();
875   }
876 
877   // Track registers used by instruction.
878   UsedInInstr.clear();
879 
880   // First scan.
881   // Mark physreg uses and early clobbers as used.
882   // Find the end of the virtreg operands
883   unsigned VirtOpEnd = 0;
884   bool hasTiedOps = false;
885   bool hasEarlyClobbers = false;
886   bool hasPartialRedefs = false;
887   bool hasPhysDefs = false;
888   for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
889     MachineOperand &MO = MI.getOperand(i);
890     // Make sure MRI knows about registers clobbered by regmasks.
891     if (MO.isRegMask()) {
892       MRI->addPhysRegsUsedFromRegMask(MO.getRegMask());
893       continue;
894     }
895     if (!MO.isReg()) continue;
896     unsigned Reg = MO.getReg();
897     if (!Reg) continue;
898     if (TargetRegisterInfo::isVirtualRegister(Reg)) {
899       VirtOpEnd = i+1;
900       if (MO.isUse()) {
901         hasTiedOps = hasTiedOps ||
902                             MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1;
903       } else {
904         if (MO.isEarlyClobber())
905           hasEarlyClobbers = true;
906         if (MO.getSubReg() && MI.readsVirtualRegister(Reg))
907           hasPartialRedefs = true;
908       }
909       continue;
910     }
911     if (!MRI->isAllocatable(Reg)) continue;
912     if (MO.isUse()) {
913       usePhysReg(MO);
914     } else if (MO.isEarlyClobber()) {
915       definePhysReg(MI, Reg,
916                     (MO.isImplicit() || MO.isDead()) ? regFree : regReserved);
917       hasEarlyClobbers = true;
918     } else
919       hasPhysDefs = true;
920   }
921 
922   // The instruction may have virtual register operands that must be allocated
923   // the same register at use-time and def-time: early clobbers and tied
924   // operands. If there are also physical defs, these registers must avoid
925   // both physical defs and uses, making them more constrained than normal
926   // operands.
927   // Similarly, if there are multiple defs and tied operands, we must make
928   // sure the same register is allocated to uses and defs.
929   // We didn't detect inline asm tied operands above, so just make this extra
930   // pass for all inline asm.
931   if (MI.isInlineAsm() || hasEarlyClobbers || hasPartialRedefs ||
932       (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) {
933     handleThroughOperands(MI, VirtDead);
934     // Don't attempt coalescing when we have funny stuff going on.
935     CopyDstReg = 0;
936     // Pretend we have early clobbers so the use operands get marked below.
937     // This is not necessary for the common case of a single tied use.
938     hasEarlyClobbers = true;
939   }
940 
941   // Second scan.
942   // Allocate virtreg uses.
943   for (unsigned I = 0; I != VirtOpEnd; ++I) {
944     MachineOperand &MO = MI.getOperand(I);
945     if (!MO.isReg()) continue;
946     unsigned Reg = MO.getReg();
947     if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
948     if (MO.isUse()) {
949       LiveReg &LR = reloadVirtReg(MI, I, Reg, CopyDstReg);
950       MCPhysReg PhysReg = LR.PhysReg;
951       CopySrcReg = (CopySrcReg == Reg || CopySrcReg == PhysReg) ? PhysReg : 0;
952       if (setPhysReg(MI, MO, PhysReg))
953         killVirtReg(LR);
954     }
955   }
956 
957   // Track registers defined by instruction - early clobbers and tied uses at
958   // this point.
959   UsedInInstr.clear();
960   if (hasEarlyClobbers) {
961     for (const MachineOperand &MO : MI.operands()) {
962       if (!MO.isReg()) continue;
963       unsigned Reg = MO.getReg();
964       if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
965       // Look for physreg defs and tied uses.
966       if (!MO.isDef() && !MO.isTied()) continue;
967       markRegUsedInInstr(Reg);
968     }
969   }
970 
971   unsigned DefOpEnd = MI.getNumOperands();
972   if (MI.isCall()) {
973     // Spill all virtregs before a call. This serves one purpose: If an
974     // exception is thrown, the landing pad is going to expect to find
975     // registers in their spill slots.
976     // Note: although this is appealing to just consider all definitions
977     // as call-clobbered, this is not correct because some of those
978     // definitions may be used later on and we do not want to reuse
979     // those for virtual registers in between.
980     LLVM_DEBUG(dbgs() << "  Spilling remaining registers before call.\n");
981     spillAll(MI);
982   }
983 
984   // Third scan.
985   // Allocate defs and collect dead defs.
986   for (unsigned I = 0; I != DefOpEnd; ++I) {
987     const MachineOperand &MO = MI.getOperand(I);
988     if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber())
989       continue;
990     unsigned Reg = MO.getReg();
991 
992     if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
993       if (!MRI->isAllocatable(Reg)) continue;
994       definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved);
995       continue;
996     }
997     MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, CopySrcReg);
998     if (setPhysReg(MI, MI.getOperand(I), PhysReg)) {
999       VirtDead.push_back(Reg);
1000       CopyDstReg = 0; // cancel coalescing;
1001     } else
1002       CopyDstReg = (CopyDstReg == Reg || CopyDstReg == PhysReg) ? PhysReg : 0;
1003   }
1004 
1005   // Kill dead defs after the scan to ensure that multiple defs of the same
1006   // register are allocated identically. We didn't need to do this for uses
1007   // because we are crerating our own kill flags, and they are always at the
1008   // last use.
1009   for (unsigned VirtReg : VirtDead)
1010     killVirtReg(VirtReg);
1011   VirtDead.clear();
1012 
1013   LLVM_DEBUG(dbgs() << "<< " << MI);
1014   if (CopyDstReg && CopyDstReg == CopySrcReg && CopyDstSub == CopySrcSub) {
1015     LLVM_DEBUG(dbgs() << "Mark identity copy for removal\n");
1016     Coalesced.push_back(&MI);
1017   }
1018 }
1019 
1020 void RegAllocFast::handleDebugValue(MachineInstr &MI) {
1021   MachineOperand &MO = MI.getOperand(0);
1022 
1023   // Ignore DBG_VALUEs that aren't based on virtual registers. These are
1024   // mostly constants and frame indices.
1025   if (!MO.isReg())
1026     return;
1027   unsigned Reg = MO.getReg();
1028   if (!TargetRegisterInfo::isVirtualRegister(Reg))
1029     return;
1030 
1031   // See if this virtual register has already been allocated to a physical
1032   // register or spilled to a stack slot.
1033   LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
1034   if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1035     setPhysReg(MI, MO, LRI->PhysReg);
1036   } else {
1037     int SS = StackSlotForVirtReg[Reg];
1038     if (SS != -1) {
1039       // Modify DBG_VALUE now that the value is in a spill slot.
1040       updateDbgValueForSpill(MI, SS);
1041       LLVM_DEBUG(dbgs() << "Modifying debug info due to spill:" << "\t" << MI);
1042       return;
1043     }
1044 
1045     // We can't allocate a physreg for a DebugValue, sorry!
1046     LLVM_DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE");
1047     MO.setReg(0);
1048   }
1049 
1050   // If Reg hasn't been spilled, put this DBG_VALUE in LiveDbgValueMap so
1051   // that future spills of Reg will have DBG_VALUEs.
1052   LiveDbgValueMap[Reg].push_back(&MI);
1053 }
1054 
1055 void RegAllocFast::allocateBasicBlock(MachineBasicBlock &MBB) {
1056   this->MBB = &MBB;
1057   LLVM_DEBUG(dbgs() << "\nAllocating " << MBB);
1058 
1059   PhysRegState.assign(TRI->getNumRegs(), regDisabled);
1060   assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?");
1061 
1062   MachineBasicBlock::iterator MII = MBB.begin();
1063 
1064   // Add live-in registers as live.
1065   for (const MachineBasicBlock::RegisterMaskPair LI : MBB.liveins())
1066     if (MRI->isAllocatable(LI.PhysReg))
1067       definePhysReg(MII, LI.PhysReg, regReserved);
1068 
1069   VirtDead.clear();
1070   Coalesced.clear();
1071 
1072   // Otherwise, sequentially allocate each instruction in the MBB.
1073   for (MachineInstr &MI : MBB) {
1074     LLVM_DEBUG(
1075       dbgs() << "\n>> " << MI << "Regs:";
1076       dumpState()
1077     );
1078 
1079     // Special handling for debug values. Note that they are not allowed to
1080     // affect codegen of the other instructions in any way.
1081     if (MI.isDebugValue()) {
1082       handleDebugValue(MI);
1083       continue;
1084     }
1085 
1086     allocateInstruction(MI);
1087   }
1088 
1089   // Spill all physical registers holding virtual registers now.
1090   LLVM_DEBUG(dbgs() << "Spilling live registers at end of block.\n");
1091   spillAll(MBB.getFirstTerminator());
1092 
1093   // Erase all the coalesced copies. We are delaying it until now because
1094   // LiveVirtRegs might refer to the instrs.
1095   for (MachineInstr *MI : Coalesced)
1096     MBB.erase(MI);
1097   NumCoalesced += Coalesced.size();
1098 
1099   LLVM_DEBUG(MBB.dump());
1100 }
1101 
1102 bool RegAllocFast::runOnMachineFunction(MachineFunction &MF) {
1103   LLVM_DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1104                     << "********** Function: " << MF.getName() << '\n');
1105   MRI = &MF.getRegInfo();
1106   const TargetSubtargetInfo &STI = MF.getSubtarget();
1107   TRI = STI.getRegisterInfo();
1108   TII = STI.getInstrInfo();
1109   MFI = &MF.getFrameInfo();
1110   MRI->freezeReservedRegs(MF);
1111   RegClassInfo.runOnMachineFunction(MF);
1112   UsedInInstr.clear();
1113   UsedInInstr.setUniverse(TRI->getNumRegUnits());
1114 
1115   // initialize the virtual->physical register map to have a 'null'
1116   // mapping for all virtual registers
1117   unsigned NumVirtRegs = MRI->getNumVirtRegs();
1118   StackSlotForVirtReg.resize(NumVirtRegs);
1119   LiveVirtRegs.setUniverse(NumVirtRegs);
1120 
1121   // Loop over all of the basic blocks, eliminating virtual register references
1122   for (MachineBasicBlock &MBB : MF)
1123     allocateBasicBlock(MBB);
1124 
1125   // All machine operands and other references to virtual registers have been
1126   // replaced. Remove the virtual registers.
1127   MRI->clearVirtRegs();
1128 
1129   StackSlotForVirtReg.clear();
1130   LiveDbgValueMap.clear();
1131   return true;
1132 }
1133 
1134 FunctionPass *llvm::createFastRegisterAllocator() {
1135   return new RegAllocFast();
1136 }
1137