1 //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This register allocator allocates registers to a basic block at a time,
11 // attempting to keep values in registers and reusing registers as appropriate.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/CodeGen/Passes.h"
16 #include "llvm/ADT/DenseMap.h"
17 #include "llvm/ADT/IndexedMap.h"
18 #include "llvm/ADT/STLExtras.h"
19 #include "llvm/ADT/SmallSet.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/SparseSet.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/CodeGen/MachineFrameInfo.h"
24 #include "llvm/CodeGen/MachineFunctionPass.h"
25 #include "llvm/CodeGen/MachineInstr.h"
26 #include "llvm/CodeGen/MachineInstrBuilder.h"
27 #include "llvm/CodeGen/MachineRegisterInfo.h"
28 #include "llvm/CodeGen/RegAllocRegistry.h"
29 #include "llvm/CodeGen/RegisterClassInfo.h"
30 #include "llvm/IR/BasicBlock.h"
31 #include "llvm/IR/DebugInfo.h"
32 #include "llvm/Support/CommandLine.h"
33 #include "llvm/Support/Debug.h"
34 #include "llvm/Support/ErrorHandling.h"
35 #include "llvm/Support/raw_ostream.h"
36 #include "llvm/Target/TargetInstrInfo.h"
37 #include "llvm/Target/TargetSubtargetInfo.h"
38 #include <algorithm>
39 using namespace llvm;
40 
41 #define DEBUG_TYPE "regalloc"
42 
43 STATISTIC(NumStores, "Number of stores added");
44 STATISTIC(NumLoads , "Number of loads added");
45 STATISTIC(NumCopies, "Number of copies coalesced");
46 
47 static RegisterRegAlloc
48   fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator);
49 
50 namespace {
51   class RAFast : public MachineFunctionPass {
52   public:
53     static char ID;
54     RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1),
55                isBulkSpilling(false) {}
56 
57   private:
58     MachineFunction *MF;
59     MachineRegisterInfo *MRI;
60     const TargetRegisterInfo *TRI;
61     const TargetInstrInfo *TII;
62     RegisterClassInfo RegClassInfo;
63 
64     // Basic block currently being allocated.
65     MachineBasicBlock *MBB;
66 
67     // StackSlotForVirtReg - Maps virtual regs to the frame index where these
68     // values are spilled.
69     IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
70 
71     // Everything we know about a live virtual register.
72     struct LiveReg {
73       MachineInstr *LastUse;    // Last instr to use reg.
74       unsigned VirtReg;         // Virtual register number.
75       unsigned PhysReg;         // Currently held here.
76       unsigned short LastOpNum; // OpNum on LastUse.
77       bool Dirty;               // Register needs spill.
78 
79       explicit LiveReg(unsigned v)
80         : LastUse(nullptr), VirtReg(v), PhysReg(0), LastOpNum(0), Dirty(false){}
81 
82       unsigned getSparseSetIndex() const {
83         return TargetRegisterInfo::virtReg2Index(VirtReg);
84       }
85     };
86 
87     typedef SparseSet<LiveReg> LiveRegMap;
88 
89     // LiveVirtRegs - This map contains entries for each virtual register
90     // that is currently available in a physical register.
91     LiveRegMap LiveVirtRegs;
92 
93     DenseMap<unsigned, SmallVector<MachineInstr *, 4> > LiveDbgValueMap;
94 
95     // RegState - Track the state of a physical register.
96     enum RegState {
97       // A disabled register is not available for allocation, but an alias may
98       // be in use. A register can only be moved out of the disabled state if
99       // all aliases are disabled.
100       regDisabled,
101 
102       // A free register is not currently in use and can be allocated
103       // immediately without checking aliases.
104       regFree,
105 
106       // A reserved register has been assigned explicitly (e.g., setting up a
107       // call parameter), and it remains reserved until it is used.
108       regReserved
109 
110       // A register state may also be a virtual register number, indication that
111       // the physical register is currently allocated to a virtual register. In
112       // that case, LiveVirtRegs contains the inverse mapping.
113     };
114 
115     // PhysRegState - One of the RegState enums, or a virtreg.
116     std::vector<unsigned> PhysRegState;
117 
118     // Set of register units.
119     typedef SparseSet<unsigned> UsedInInstrSet;
120 
121     // Set of register units that are used in the current instruction, and so
122     // cannot be allocated.
123     UsedInInstrSet UsedInInstr;
124 
125     // Mark a physreg as used in this instruction.
126     void markRegUsedInInstr(unsigned PhysReg) {
127       for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
128         UsedInInstr.insert(*Units);
129     }
130 
131     // Check if a physreg or any of its aliases are used in this instruction.
132     bool isRegUsedInInstr(unsigned PhysReg) const {
133       for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
134         if (UsedInInstr.count(*Units))
135           return true;
136       return false;
137     }
138 
139     // SkippedInstrs - Descriptors of instructions whose clobber list was
140     // ignored because all registers were spilled. It is still necessary to
141     // mark all the clobbered registers as used by the function.
142     SmallPtrSet<const MCInstrDesc*, 4> SkippedInstrs;
143 
144     // isBulkSpilling - This flag is set when LiveRegMap will be cleared
145     // completely after spilling all live registers. LiveRegMap entries should
146     // not be erased.
147     bool isBulkSpilling;
148 
149     enum : unsigned {
150       spillClean = 1,
151       spillDirty = 100,
152       spillImpossible = ~0u
153     };
154   public:
155     const char *getPassName() const override {
156       return "Fast Register Allocator";
157     }
158 
159     void getAnalysisUsage(AnalysisUsage &AU) const override {
160       AU.setPreservesCFG();
161       MachineFunctionPass::getAnalysisUsage(AU);
162     }
163 
164     MachineFunctionProperties getSetProperties() const override {
165       return MachineFunctionProperties().set(
166           MachineFunctionProperties::Property::AllVRegsAllocated);
167     }
168 
169   private:
170     bool runOnMachineFunction(MachineFunction &Fn) override;
171     void AllocateBasicBlock();
172     void handleThroughOperands(MachineInstr *MI,
173                                SmallVectorImpl<unsigned> &VirtDead);
174     int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC);
175     bool isLastUseOfLocalReg(MachineOperand&);
176 
177     void addKillFlag(const LiveReg&);
178     void killVirtReg(LiveRegMap::iterator);
179     void killVirtReg(unsigned VirtReg);
180     void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator);
181     void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg);
182 
183     void usePhysReg(MachineOperand&);
184     void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState);
185     unsigned calcSpillCost(unsigned PhysReg) const;
186     void assignVirtToPhysReg(LiveReg&, unsigned PhysReg);
187     LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) {
188       return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
189     }
190     LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const {
191       return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
192     }
193     LiveRegMap::iterator assignVirtToPhysReg(unsigned VReg, unsigned PhysReg);
194     LiveRegMap::iterator allocVirtReg(MachineInstr *MI, LiveRegMap::iterator,
195                                       unsigned Hint);
196     LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum,
197                                        unsigned VirtReg, unsigned Hint);
198     LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum,
199                                        unsigned VirtReg, unsigned Hint);
200     void spillAll(MachineBasicBlock::iterator MI);
201     bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg);
202   };
203   char RAFast::ID = 0;
204 }
205 
206 /// getStackSpaceFor - This allocates space for the specified virtual register
207 /// to be held on the stack.
208 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) {
209   // Find the location Reg would belong...
210   int SS = StackSlotForVirtReg[VirtReg];
211   if (SS != -1)
212     return SS;          // Already has space allocated?
213 
214   // Allocate a new stack object for this spill location...
215   int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(),
216                                                             RC->getAlignment());
217 
218   // Assign the slot.
219   StackSlotForVirtReg[VirtReg] = FrameIdx;
220   return FrameIdx;
221 }
222 
223 /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to
224 /// its virtual register, and it is guaranteed to be a block-local register.
225 ///
226 bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) {
227   // If the register has ever been spilled or reloaded, we conservatively assume
228   // it is a global register used in multiple blocks.
229   if (StackSlotForVirtReg[MO.getReg()] != -1)
230     return false;
231 
232   // Check that the use/def chain has exactly one operand - MO.
233   MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg());
234   if (&*I != &MO)
235     return false;
236   return ++I == MRI->reg_nodbg_end();
237 }
238 
239 /// addKillFlag - Set kill flags on last use of a virtual register.
240 void RAFast::addKillFlag(const LiveReg &LR) {
241   if (!LR.LastUse) return;
242   MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum);
243   if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) {
244     if (MO.getReg() == LR.PhysReg)
245       MO.setIsKill();
246     else
247       LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true);
248   }
249 }
250 
251 /// killVirtReg - Mark virtreg as no longer available.
252 void RAFast::killVirtReg(LiveRegMap::iterator LRI) {
253   addKillFlag(*LRI);
254   assert(PhysRegState[LRI->PhysReg] == LRI->VirtReg &&
255          "Broken RegState mapping");
256   PhysRegState[LRI->PhysReg] = regFree;
257   // Erase from LiveVirtRegs unless we're spilling in bulk.
258   if (!isBulkSpilling)
259     LiveVirtRegs.erase(LRI);
260 }
261 
262 /// killVirtReg - Mark virtreg as no longer available.
263 void RAFast::killVirtReg(unsigned VirtReg) {
264   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
265          "killVirtReg needs a virtual register");
266   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
267   if (LRI != LiveVirtRegs.end())
268     killVirtReg(LRI);
269 }
270 
271 /// spillVirtReg - This method spills the value specified by VirtReg into the
272 /// corresponding stack slot if needed.
273 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) {
274   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
275          "Spilling a physical register is illegal!");
276   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
277   assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register");
278   spillVirtReg(MI, LRI);
279 }
280 
281 /// spillVirtReg - Do the actual work of spilling.
282 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI,
283                           LiveRegMap::iterator LRI) {
284   LiveReg &LR = *LRI;
285   assert(PhysRegState[LR.PhysReg] == LRI->VirtReg && "Broken RegState mapping");
286 
287   if (LR.Dirty) {
288     // If this physreg is used by the instruction, we want to kill it on the
289     // instruction, not on the spill.
290     bool SpillKill = LR.LastUse != MI;
291     LR.Dirty = false;
292     DEBUG(dbgs() << "Spilling " << PrintReg(LRI->VirtReg, TRI)
293                  << " in " << PrintReg(LR.PhysReg, TRI));
294     const TargetRegisterClass *RC = MRI->getRegClass(LRI->VirtReg);
295     int FI = getStackSpaceFor(LRI->VirtReg, RC);
296     DEBUG(dbgs() << " to stack slot #" << FI << "\n");
297     TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI);
298     ++NumStores;   // Update statistics
299 
300     // If this register is used by DBG_VALUE then insert new DBG_VALUE to
301     // identify spilled location as the place to find corresponding variable's
302     // value.
303     SmallVectorImpl<MachineInstr *> &LRIDbgValues =
304       LiveDbgValueMap[LRI->VirtReg];
305     for (unsigned li = 0, le = LRIDbgValues.size(); li != le; ++li) {
306       MachineInstr *DBG = LRIDbgValues[li];
307       const MDNode *Var = DBG->getDebugVariable();
308       const MDNode *Expr = DBG->getDebugExpression();
309       bool IsIndirect = DBG->isIndirectDebugValue();
310       uint64_t Offset = IsIndirect ? DBG->getOperand(1).getImm() : 0;
311       DebugLoc DL = DBG->getDebugLoc();
312       assert(cast<DILocalVariable>(Var)->isValidLocationForIntrinsic(DL) &&
313              "Expected inlined-at fields to agree");
314       MachineInstr *NewDV =
315           BuildMI(*MBB, MI, DL, TII->get(TargetOpcode::DBG_VALUE))
316               .addFrameIndex(FI)
317               .addImm(Offset)
318               .addMetadata(Var)
319               .addMetadata(Expr);
320       assert(NewDV->getParent() == MBB && "dangling parent pointer");
321       (void)NewDV;
322       DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV);
323     }
324     // Now this register is spilled there is should not be any DBG_VALUE
325     // pointing to this register because they are all pointing to spilled value
326     // now.
327     LRIDbgValues.clear();
328     if (SpillKill)
329       LR.LastUse = nullptr; // Don't kill register again
330   }
331   killVirtReg(LRI);
332 }
333 
334 /// spillAll - Spill all dirty virtregs without killing them.
335 void RAFast::spillAll(MachineBasicBlock::iterator MI) {
336   if (LiveVirtRegs.empty()) return;
337   isBulkSpilling = true;
338   // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
339   // of spilling here is deterministic, if arbitrary.
340   for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end();
341        i != e; ++i)
342     spillVirtReg(MI, i);
343   LiveVirtRegs.clear();
344   isBulkSpilling = false;
345 }
346 
347 /// usePhysReg - Handle the direct use of a physical register.
348 /// Check that the register is not used by a virtreg.
349 /// Kill the physreg, marking it free.
350 /// This may add implicit kills to MO->getParent() and invalidate MO.
351 void RAFast::usePhysReg(MachineOperand &MO) {
352   unsigned PhysReg = MO.getReg();
353   assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) &&
354          "Bad usePhysReg operand");
355   markRegUsedInInstr(PhysReg);
356   switch (PhysRegState[PhysReg]) {
357   case regDisabled:
358     break;
359   case regReserved:
360     PhysRegState[PhysReg] = regFree;
361     // Fall through
362   case regFree:
363     MO.setIsKill();
364     return;
365   default:
366     // The physreg was allocated to a virtual register. That means the value we
367     // wanted has been clobbered.
368     llvm_unreachable("Instruction uses an allocated register");
369   }
370 
371   // Maybe a superregister is reserved?
372   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
373     unsigned Alias = *AI;
374     switch (PhysRegState[Alias]) {
375     case regDisabled:
376       break;
377     case regReserved:
378       // Either PhysReg is a subregister of Alias and we mark the
379       // whole register as free, or PhysReg is the superregister of
380       // Alias and we mark all the aliases as disabled before freeing
381       // PhysReg.
382       // In the latter case, since PhysReg was disabled, this means that
383       // its value is defined only by physical sub-registers. This check
384       // is performed by the assert of the default case in this loop.
385       // Note: The value of the superregister may only be partial
386       // defined, that is why regDisabled is a valid state for aliases.
387       assert((TRI->isSuperRegister(PhysReg, Alias) ||
388               TRI->isSuperRegister(Alias, PhysReg)) &&
389              "Instruction is not using a subregister of a reserved register");
390       // Fall through.
391     case regFree:
392       if (TRI->isSuperRegister(PhysReg, Alias)) {
393         // Leave the superregister in the working set.
394         PhysRegState[Alias] = regFree;
395         MO.getParent()->addRegisterKilled(Alias, TRI, true);
396         return;
397       }
398       // Some other alias was in the working set - clear it.
399       PhysRegState[Alias] = regDisabled;
400       break;
401     default:
402       llvm_unreachable("Instruction uses an alias of an allocated register");
403     }
404   }
405 
406   // All aliases are disabled, bring register into working set.
407   PhysRegState[PhysReg] = regFree;
408   MO.setIsKill();
409 }
410 
411 /// definePhysReg - Mark PhysReg as reserved or free after spilling any
412 /// virtregs. This is very similar to defineVirtReg except the physreg is
413 /// reserved instead of allocated.
414 void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg,
415                            RegState NewState) {
416   markRegUsedInInstr(PhysReg);
417   switch (unsigned VirtReg = PhysRegState[PhysReg]) {
418   case regDisabled:
419     break;
420   default:
421     spillVirtReg(MI, VirtReg);
422     // Fall through.
423   case regFree:
424   case regReserved:
425     PhysRegState[PhysReg] = NewState;
426     return;
427   }
428 
429   // This is a disabled register, disable all aliases.
430   PhysRegState[PhysReg] = NewState;
431   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
432     unsigned Alias = *AI;
433     switch (unsigned VirtReg = PhysRegState[Alias]) {
434     case regDisabled:
435       break;
436     default:
437       spillVirtReg(MI, VirtReg);
438       // Fall through.
439     case regFree:
440     case regReserved:
441       PhysRegState[Alias] = regDisabled;
442       if (TRI->isSuperRegister(PhysReg, Alias))
443         return;
444       break;
445     }
446   }
447 }
448 
449 
450 // calcSpillCost - Return the cost of spilling clearing out PhysReg and
451 // aliases so it is free for allocation.
452 // Returns 0 when PhysReg is free or disabled with all aliases disabled - it
453 // can be allocated directly.
454 // Returns spillImpossible when PhysReg or an alias can't be spilled.
455 unsigned RAFast::calcSpillCost(unsigned PhysReg) const {
456   if (isRegUsedInInstr(PhysReg)) {
457     DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is already used in instr.\n");
458     return spillImpossible;
459   }
460   switch (unsigned VirtReg = PhysRegState[PhysReg]) {
461   case regDisabled:
462     break;
463   case regFree:
464     return 0;
465   case regReserved:
466     DEBUG(dbgs() << PrintReg(VirtReg, TRI) << " corresponding "
467                  << PrintReg(PhysReg, TRI) << " is reserved already.\n");
468     return spillImpossible;
469   default: {
470     LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
471     assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
472     return I->Dirty ? spillDirty : spillClean;
473   }
474   }
475 
476   // This is a disabled register, add up cost of aliases.
477   DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is disabled.\n");
478   unsigned Cost = 0;
479   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
480     unsigned Alias = *AI;
481     switch (unsigned VirtReg = PhysRegState[Alias]) {
482     case regDisabled:
483       break;
484     case regFree:
485       ++Cost;
486       break;
487     case regReserved:
488       return spillImpossible;
489     default: {
490       LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
491       assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
492       Cost += I->Dirty ? spillDirty : spillClean;
493       break;
494     }
495     }
496   }
497   return Cost;
498 }
499 
500 
501 /// assignVirtToPhysReg - This method updates local state so that we know
502 /// that PhysReg is the proper container for VirtReg now.  The physical
503 /// register must not be used for anything else when this is called.
504 ///
505 void RAFast::assignVirtToPhysReg(LiveReg &LR, unsigned PhysReg) {
506   DEBUG(dbgs() << "Assigning " << PrintReg(LR.VirtReg, TRI) << " to "
507                << PrintReg(PhysReg, TRI) << "\n");
508   PhysRegState[PhysReg] = LR.VirtReg;
509   assert(!LR.PhysReg && "Already assigned a physreg");
510   LR.PhysReg = PhysReg;
511 }
512 
513 RAFast::LiveRegMap::iterator
514 RAFast::assignVirtToPhysReg(unsigned VirtReg, unsigned PhysReg) {
515   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
516   assert(LRI != LiveVirtRegs.end() && "VirtReg disappeared");
517   assignVirtToPhysReg(*LRI, PhysReg);
518   return LRI;
519 }
520 
521 /// allocVirtReg - Allocate a physical register for VirtReg.
522 RAFast::LiveRegMap::iterator RAFast::allocVirtReg(MachineInstr *MI,
523                                                   LiveRegMap::iterator LRI,
524                                                   unsigned Hint) {
525   const unsigned VirtReg = LRI->VirtReg;
526 
527   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
528          "Can only allocate virtual registers");
529 
530   const TargetRegisterClass *RC = MRI->getRegClass(VirtReg);
531 
532   // Ignore invalid hints.
533   if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) ||
534                !RC->contains(Hint) || !MRI->isAllocatable(Hint)))
535     Hint = 0;
536 
537   // Take hint when possible.
538   if (Hint) {
539     // Ignore the hint if we would have to spill a dirty register.
540     unsigned Cost = calcSpillCost(Hint);
541     if (Cost < spillDirty) {
542       if (Cost)
543         definePhysReg(MI, Hint, regFree);
544       // definePhysReg may kill virtual registers and modify LiveVirtRegs.
545       // That invalidates LRI, so run a new lookup for VirtReg.
546       return assignVirtToPhysReg(VirtReg, Hint);
547     }
548   }
549 
550   ArrayRef<MCPhysReg> AO = RegClassInfo.getOrder(RC);
551 
552   // First try to find a completely free register.
553   for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){
554     unsigned PhysReg = *I;
555     if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) {
556       assignVirtToPhysReg(*LRI, PhysReg);
557       return LRI;
558     }
559   }
560 
561   DEBUG(dbgs() << "Allocating " << PrintReg(VirtReg) << " from "
562                << TRI->getRegClassName(RC) << "\n");
563 
564   unsigned BestReg = 0, BestCost = spillImpossible;
565   for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){
566     unsigned Cost = calcSpillCost(*I);
567     DEBUG(dbgs() << "\tRegister: " << PrintReg(*I, TRI) << "\n");
568     DEBUG(dbgs() << "\tCost: " << Cost << "\n");
569     DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n");
570     // Cost is 0 when all aliases are already disabled.
571     if (Cost == 0) {
572       assignVirtToPhysReg(*LRI, *I);
573       return LRI;
574     }
575     if (Cost < BestCost)
576       BestReg = *I, BestCost = Cost;
577   }
578 
579   if (BestReg) {
580     definePhysReg(MI, BestReg, regFree);
581     // definePhysReg may kill virtual registers and modify LiveVirtRegs.
582     // That invalidates LRI, so run a new lookup for VirtReg.
583     return assignVirtToPhysReg(VirtReg, BestReg);
584   }
585 
586   // Nothing we can do. Report an error and keep going with a bad allocation.
587   if (MI->isInlineAsm())
588     MI->emitError("inline assembly requires more registers than available");
589   else
590     MI->emitError("ran out of registers during register allocation");
591   definePhysReg(MI, *AO.begin(), regFree);
592   return assignVirtToPhysReg(VirtReg, *AO.begin());
593 }
594 
595 /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty.
596 RAFast::LiveRegMap::iterator
597 RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum,
598                       unsigned VirtReg, unsigned Hint) {
599   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
600          "Not a virtual register");
601   LiveRegMap::iterator LRI;
602   bool New;
603   std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
604   if (New) {
605     // If there is no hint, peek at the only use of this register.
606     if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) &&
607         MRI->hasOneNonDBGUse(VirtReg)) {
608       const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg);
609       // It's a copy, use the destination register as a hint.
610       if (UseMI.isCopyLike())
611         Hint = UseMI.getOperand(0).getReg();
612     }
613     LRI = allocVirtReg(MI, LRI, Hint);
614   } else if (LRI->LastUse) {
615     // Redefining a live register - kill at the last use, unless it is this
616     // instruction defining VirtReg multiple times.
617     if (LRI->LastUse != MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse())
618       addKillFlag(*LRI);
619   }
620   assert(LRI->PhysReg && "Register not assigned");
621   LRI->LastUse = MI;
622   LRI->LastOpNum = OpNum;
623   LRI->Dirty = true;
624   markRegUsedInInstr(LRI->PhysReg);
625   return LRI;
626 }
627 
628 /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it.
629 RAFast::LiveRegMap::iterator
630 RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum,
631                       unsigned VirtReg, unsigned Hint) {
632   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
633          "Not a virtual register");
634   LiveRegMap::iterator LRI;
635   bool New;
636   std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
637   MachineOperand &MO = MI->getOperand(OpNum);
638   if (New) {
639     LRI = allocVirtReg(MI, LRI, Hint);
640     const TargetRegisterClass *RC = MRI->getRegClass(VirtReg);
641     int FrameIndex = getStackSpaceFor(VirtReg, RC);
642     DEBUG(dbgs() << "Reloading " << PrintReg(VirtReg, TRI) << " into "
643                  << PrintReg(LRI->PhysReg, TRI) << "\n");
644     TII->loadRegFromStackSlot(*MBB, MI, LRI->PhysReg, FrameIndex, RC, TRI);
645     ++NumLoads;
646   } else if (LRI->Dirty) {
647     if (isLastUseOfLocalReg(MO)) {
648       DEBUG(dbgs() << "Killing last use: " << MO << "\n");
649       if (MO.isUse())
650         MO.setIsKill();
651       else
652         MO.setIsDead();
653     } else if (MO.isKill()) {
654       DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n");
655       MO.setIsKill(false);
656     } else if (MO.isDead()) {
657       DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n");
658       MO.setIsDead(false);
659     }
660   } else if (MO.isKill()) {
661     // We must remove kill flags from uses of reloaded registers because the
662     // register would be killed immediately, and there might be a second use:
663     //   %foo = OR %x<kill>, %x
664     // This would cause a second reload of %x into a different register.
665     DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n");
666     MO.setIsKill(false);
667   } else if (MO.isDead()) {
668     DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n");
669     MO.setIsDead(false);
670   }
671   assert(LRI->PhysReg && "Register not assigned");
672   LRI->LastUse = MI;
673   LRI->LastOpNum = OpNum;
674   markRegUsedInInstr(LRI->PhysReg);
675   return LRI;
676 }
677 
678 // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering
679 // subregs. This may invalidate any operand pointers.
680 // Return true if the operand kills its register.
681 bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) {
682   MachineOperand &MO = MI->getOperand(OpNum);
683   bool Dead = MO.isDead();
684   if (!MO.getSubReg()) {
685     MO.setReg(PhysReg);
686     return MO.isKill() || Dead;
687   }
688 
689   // Handle subregister index.
690   MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0);
691   MO.setSubReg(0);
692 
693   // A kill flag implies killing the full register. Add corresponding super
694   // register kill.
695   if (MO.isKill()) {
696     MI->addRegisterKilled(PhysReg, TRI, true);
697     return true;
698   }
699 
700   // A <def,read-undef> of a sub-register requires an implicit def of the full
701   // register.
702   if (MO.isDef() && MO.isUndef())
703     MI->addRegisterDefined(PhysReg, TRI);
704 
705   return Dead;
706 }
707 
708 // Handle special instruction operand like early clobbers and tied ops when
709 // there are additional physreg defines.
710 void RAFast::handleThroughOperands(MachineInstr *MI,
711                                    SmallVectorImpl<unsigned> &VirtDead) {
712   DEBUG(dbgs() << "Scanning for through registers:");
713   SmallSet<unsigned, 8> ThroughRegs;
714   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
715     MachineOperand &MO = MI->getOperand(i);
716     if (!MO.isReg()) continue;
717     unsigned Reg = MO.getReg();
718     if (!TargetRegisterInfo::isVirtualRegister(Reg))
719       continue;
720     if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) ||
721         (MO.getSubReg() && MI->readsVirtualRegister(Reg))) {
722       if (ThroughRegs.insert(Reg).second)
723         DEBUG(dbgs() << ' ' << PrintReg(Reg));
724     }
725   }
726 
727   // If any physreg defines collide with preallocated through registers,
728   // we must spill and reallocate.
729   DEBUG(dbgs() << "\nChecking for physdef collisions.\n");
730   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
731     MachineOperand &MO = MI->getOperand(i);
732     if (!MO.isReg() || !MO.isDef()) continue;
733     unsigned Reg = MO.getReg();
734     if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
735     markRegUsedInInstr(Reg);
736     for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
737       if (ThroughRegs.count(PhysRegState[*AI]))
738         definePhysReg(MI, *AI, regFree);
739     }
740   }
741 
742   SmallVector<unsigned, 8> PartialDefs;
743   DEBUG(dbgs() << "Allocating tied uses.\n");
744   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
745     MachineOperand &MO = MI->getOperand(i);
746     if (!MO.isReg()) continue;
747     unsigned Reg = MO.getReg();
748     if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
749     if (MO.isUse()) {
750       unsigned DefIdx = 0;
751       if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue;
752       DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand "
753         << DefIdx << ".\n");
754       LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0);
755       unsigned PhysReg = LRI->PhysReg;
756       setPhysReg(MI, i, PhysReg);
757       // Note: we don't update the def operand yet. That would cause the normal
758       // def-scan to attempt spilling.
759     } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) {
760       DEBUG(dbgs() << "Partial redefine: " << MO << "\n");
761       // Reload the register, but don't assign to the operand just yet.
762       // That would confuse the later phys-def processing pass.
763       LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0);
764       PartialDefs.push_back(LRI->PhysReg);
765     }
766   }
767 
768   DEBUG(dbgs() << "Allocating early clobbers.\n");
769   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
770     MachineOperand &MO = MI->getOperand(i);
771     if (!MO.isReg()) continue;
772     unsigned Reg = MO.getReg();
773     if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
774     if (!MO.isEarlyClobber())
775       continue;
776     // Note: defineVirtReg may invalidate MO.
777     LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0);
778     unsigned PhysReg = LRI->PhysReg;
779     if (setPhysReg(MI, i, PhysReg))
780       VirtDead.push_back(Reg);
781   }
782 
783   // Restore UsedInInstr to a state usable for allocating normal virtual uses.
784   UsedInInstr.clear();
785   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
786     MachineOperand &MO = MI->getOperand(i);
787     if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue;
788     unsigned Reg = MO.getReg();
789     if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
790     DEBUG(dbgs() << "\tSetting " << PrintReg(Reg, TRI)
791                  << " as used in instr\n");
792     markRegUsedInInstr(Reg);
793   }
794 
795   // Also mark PartialDefs as used to avoid reallocation.
796   for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i)
797     markRegUsedInInstr(PartialDefs[i]);
798 }
799 
800 void RAFast::AllocateBasicBlock() {
801   DEBUG(dbgs() << "\nAllocating " << *MBB);
802 
803   PhysRegState.assign(TRI->getNumRegs(), regDisabled);
804   assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?");
805 
806   MachineBasicBlock::iterator MII = MBB->begin();
807 
808   // Add live-in registers as live.
809   for (const auto &LI : MBB->liveins())
810     if (MRI->isAllocatable(LI.PhysReg))
811       definePhysReg(MII, LI.PhysReg, regReserved);
812 
813   SmallVector<unsigned, 8> VirtDead;
814   SmallVector<MachineInstr*, 32> Coalesced;
815 
816   // Otherwise, sequentially allocate each instruction in the MBB.
817   while (MII != MBB->end()) {
818     MachineInstr *MI = MII++;
819     const MCInstrDesc &MCID = MI->getDesc();
820     DEBUG({
821         dbgs() << "\n>> " << *MI << "Regs:";
822         for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) {
823           if (PhysRegState[Reg] == regDisabled) continue;
824           dbgs() << " " << TRI->getName(Reg);
825           switch(PhysRegState[Reg]) {
826           case regFree:
827             break;
828           case regReserved:
829             dbgs() << "*";
830             break;
831           default: {
832             dbgs() << '=' << PrintReg(PhysRegState[Reg]);
833             LiveRegMap::iterator I = findLiveVirtReg(PhysRegState[Reg]);
834             assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
835             if (I->Dirty)
836               dbgs() << "*";
837             assert(I->PhysReg == Reg && "Bad inverse map");
838             break;
839           }
840           }
841         }
842         dbgs() << '\n';
843         // Check that LiveVirtRegs is the inverse.
844         for (LiveRegMap::iterator i = LiveVirtRegs.begin(),
845              e = LiveVirtRegs.end(); i != e; ++i) {
846            assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) &&
847                   "Bad map key");
848            assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) &&
849                   "Bad map value");
850            assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map");
851         }
852       });
853 
854     // Debug values are not allowed to change codegen in any way.
855     if (MI->isDebugValue()) {
856       bool ScanDbgValue = true;
857       while (ScanDbgValue) {
858         ScanDbgValue = false;
859         for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
860           MachineOperand &MO = MI->getOperand(i);
861           if (!MO.isReg()) continue;
862           unsigned Reg = MO.getReg();
863           if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
864           LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
865           if (LRI != LiveVirtRegs.end())
866             setPhysReg(MI, i, LRI->PhysReg);
867           else {
868             int SS = StackSlotForVirtReg[Reg];
869             if (SS == -1) {
870               // We can't allocate a physreg for a DebugValue, sorry!
871               DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE");
872               MO.setReg(0);
873             }
874             else {
875               // Modify DBG_VALUE now that the value is in a spill slot.
876               bool IsIndirect = MI->isIndirectDebugValue();
877               uint64_t Offset = IsIndirect ? MI->getOperand(1).getImm() : 0;
878               const MDNode *Var = MI->getDebugVariable();
879               const MDNode *Expr = MI->getDebugExpression();
880               DebugLoc DL = MI->getDebugLoc();
881               MachineBasicBlock *MBB = MI->getParent();
882               assert(
883                   cast<DILocalVariable>(Var)->isValidLocationForIntrinsic(DL) &&
884                   "Expected inlined-at fields to agree");
885               MachineInstr *NewDV = BuildMI(*MBB, MBB->erase(MI), DL,
886                                             TII->get(TargetOpcode::DBG_VALUE))
887                                         .addFrameIndex(SS)
888                                         .addImm(Offset)
889                                         .addMetadata(Var)
890                                         .addMetadata(Expr);
891               DEBUG(dbgs() << "Modifying debug info due to spill:"
892                            << "\t" << *NewDV);
893               // Scan NewDV operands from the beginning.
894               MI = NewDV;
895               ScanDbgValue = true;
896               break;
897             }
898           }
899           LiveDbgValueMap[Reg].push_back(MI);
900         }
901       }
902       // Next instruction.
903       continue;
904     }
905 
906     // If this is a copy, we may be able to coalesce.
907     unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0;
908     if (MI->isCopy()) {
909       CopyDst = MI->getOperand(0).getReg();
910       CopySrc = MI->getOperand(1).getReg();
911       CopyDstSub = MI->getOperand(0).getSubReg();
912       CopySrcSub = MI->getOperand(1).getSubReg();
913     }
914 
915     // Track registers used by instruction.
916     UsedInInstr.clear();
917 
918     // First scan.
919     // Mark physreg uses and early clobbers as used.
920     // Find the end of the virtreg operands
921     unsigned VirtOpEnd = 0;
922     bool hasTiedOps = false;
923     bool hasEarlyClobbers = false;
924     bool hasPartialRedefs = false;
925     bool hasPhysDefs = false;
926     for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
927       MachineOperand &MO = MI->getOperand(i);
928       // Make sure MRI knows about registers clobbered by regmasks.
929       if (MO.isRegMask()) {
930         MRI->addPhysRegsUsedFromRegMask(MO.getRegMask());
931         continue;
932       }
933       if (!MO.isReg()) continue;
934       unsigned Reg = MO.getReg();
935       if (!Reg) continue;
936       if (TargetRegisterInfo::isVirtualRegister(Reg)) {
937         VirtOpEnd = i+1;
938         if (MO.isUse()) {
939           hasTiedOps = hasTiedOps ||
940                               MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1;
941         } else {
942           if (MO.isEarlyClobber())
943             hasEarlyClobbers = true;
944           if (MO.getSubReg() && MI->readsVirtualRegister(Reg))
945             hasPartialRedefs = true;
946         }
947         continue;
948       }
949       if (!MRI->isAllocatable(Reg)) continue;
950       if (MO.isUse()) {
951         usePhysReg(MO);
952       } else if (MO.isEarlyClobber()) {
953         definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ?
954                                regFree : regReserved);
955         hasEarlyClobbers = true;
956       } else
957         hasPhysDefs = true;
958     }
959 
960     // The instruction may have virtual register operands that must be allocated
961     // the same register at use-time and def-time: early clobbers and tied
962     // operands. If there are also physical defs, these registers must avoid
963     // both physical defs and uses, making them more constrained than normal
964     // operands.
965     // Similarly, if there are multiple defs and tied operands, we must make
966     // sure the same register is allocated to uses and defs.
967     // We didn't detect inline asm tied operands above, so just make this extra
968     // pass for all inline asm.
969     if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs ||
970         (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) {
971       handleThroughOperands(MI, VirtDead);
972       // Don't attempt coalescing when we have funny stuff going on.
973       CopyDst = 0;
974       // Pretend we have early clobbers so the use operands get marked below.
975       // This is not necessary for the common case of a single tied use.
976       hasEarlyClobbers = true;
977     }
978 
979     // Second scan.
980     // Allocate virtreg uses.
981     for (unsigned i = 0; i != VirtOpEnd; ++i) {
982       MachineOperand &MO = MI->getOperand(i);
983       if (!MO.isReg()) continue;
984       unsigned Reg = MO.getReg();
985       if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
986       if (MO.isUse()) {
987         LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst);
988         unsigned PhysReg = LRI->PhysReg;
989         CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0;
990         if (setPhysReg(MI, i, PhysReg))
991           killVirtReg(LRI);
992       }
993     }
994 
995     // Track registers defined by instruction - early clobbers and tied uses at
996     // this point.
997     UsedInInstr.clear();
998     if (hasEarlyClobbers) {
999       for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
1000         MachineOperand &MO = MI->getOperand(i);
1001         if (!MO.isReg()) continue;
1002         unsigned Reg = MO.getReg();
1003         if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
1004         // Look for physreg defs and tied uses.
1005         if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue;
1006         markRegUsedInInstr(Reg);
1007       }
1008     }
1009 
1010     unsigned DefOpEnd = MI->getNumOperands();
1011     if (MI->isCall()) {
1012       // Spill all virtregs before a call. This serves one purpose: If an
1013       // exception is thrown, the landing pad is going to expect to find
1014       // registers in their spill slots.
1015       // Note: although this is appealing to just consider all definitions
1016       // as call-clobbered, this is not correct because some of those
1017       // definitions may be used later on and we do not want to reuse
1018       // those for virtual registers in between.
1019       DEBUG(dbgs() << "  Spilling remaining registers before call.\n");
1020       spillAll(MI);
1021 
1022       // The imp-defs are skipped below, but we still need to mark those
1023       // registers as used by the function.
1024       SkippedInstrs.insert(&MCID);
1025     }
1026 
1027     // Third scan.
1028     // Allocate defs and collect dead defs.
1029     for (unsigned i = 0; i != DefOpEnd; ++i) {
1030       MachineOperand &MO = MI->getOperand(i);
1031       if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber())
1032         continue;
1033       unsigned Reg = MO.getReg();
1034 
1035       if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1036         if (!MRI->isAllocatable(Reg)) continue;
1037         definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved);
1038         continue;
1039       }
1040       LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc);
1041       unsigned PhysReg = LRI->PhysReg;
1042       if (setPhysReg(MI, i, PhysReg)) {
1043         VirtDead.push_back(Reg);
1044         CopyDst = 0; // cancel coalescing;
1045       } else
1046         CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0;
1047     }
1048 
1049     // Kill dead defs after the scan to ensure that multiple defs of the same
1050     // register are allocated identically. We didn't need to do this for uses
1051     // because we are crerating our own kill flags, and they are always at the
1052     // last use.
1053     for (unsigned i = 0, e = VirtDead.size(); i != e; ++i)
1054       killVirtReg(VirtDead[i]);
1055     VirtDead.clear();
1056 
1057     if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) {
1058       DEBUG(dbgs() << "-- coalescing: " << *MI);
1059       Coalesced.push_back(MI);
1060     } else {
1061       DEBUG(dbgs() << "<< " << *MI);
1062     }
1063   }
1064 
1065   // Spill all physical registers holding virtual registers now.
1066   DEBUG(dbgs() << "Spilling live registers at end of block.\n");
1067   spillAll(MBB->getFirstTerminator());
1068 
1069   // Erase all the coalesced copies. We are delaying it until now because
1070   // LiveVirtRegs might refer to the instrs.
1071   for (unsigned i = 0, e = Coalesced.size(); i != e; ++i)
1072     MBB->erase(Coalesced[i]);
1073   NumCopies += Coalesced.size();
1074 
1075   DEBUG(MBB->dump());
1076 }
1077 
1078 /// runOnMachineFunction - Register allocate the whole function
1079 ///
1080 bool RAFast::runOnMachineFunction(MachineFunction &Fn) {
1081   DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1082                << "********** Function: " << Fn.getName() << '\n');
1083   MF = &Fn;
1084   MRI = &MF->getRegInfo();
1085   TRI = MF->getSubtarget().getRegisterInfo();
1086   TII = MF->getSubtarget().getInstrInfo();
1087   MRI->freezeReservedRegs(Fn);
1088   RegClassInfo.runOnMachineFunction(Fn);
1089   UsedInInstr.clear();
1090   UsedInInstr.setUniverse(TRI->getNumRegUnits());
1091 
1092   assert(!MRI->isSSA() && "regalloc requires leaving SSA");
1093 
1094   // initialize the virtual->physical register map to have a 'null'
1095   // mapping for all virtual registers
1096   StackSlotForVirtReg.resize(MRI->getNumVirtRegs());
1097   LiveVirtRegs.setUniverse(MRI->getNumVirtRegs());
1098 
1099   // Loop over all of the basic blocks, eliminating virtual register references
1100   for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end();
1101        MBBi != MBBe; ++MBBi) {
1102     MBB = &*MBBi;
1103     AllocateBasicBlock();
1104   }
1105 
1106   // All machine operands and other references to virtual registers have been
1107   // replaced. Remove the virtual registers.
1108   MRI->clearVirtRegs();
1109 
1110   SkippedInstrs.clear();
1111   StackSlotForVirtReg.clear();
1112   LiveDbgValueMap.clear();
1113   return true;
1114 }
1115 
1116 FunctionPass *llvm::createFastRegisterAllocator() {
1117   return new RAFast();
1118 }
1119