1 //===-- RegAllocFast.cpp - A fast register allocator for debug code -------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This register allocator allocates registers to a basic block at a time,
11 // attempting to keep values in registers and reusing registers as appropriate.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/CodeGen/Passes.h"
16 #include "llvm/ADT/DenseMap.h"
17 #include "llvm/ADT/IndexedMap.h"
18 #include "llvm/ADT/STLExtras.h"
19 #include "llvm/ADT/SmallSet.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/SparseSet.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/CodeGen/MachineFrameInfo.h"
24 #include "llvm/CodeGen/MachineFunctionPass.h"
25 #include "llvm/CodeGen/MachineInstr.h"
26 #include "llvm/CodeGen/MachineInstrBuilder.h"
27 #include "llvm/CodeGen/MachineRegisterInfo.h"
28 #include "llvm/CodeGen/RegAllocRegistry.h"
29 #include "llvm/CodeGen/RegisterClassInfo.h"
30 #include "llvm/IR/BasicBlock.h"
31 #include "llvm/Support/CommandLine.h"
32 #include "llvm/Support/Debug.h"
33 #include "llvm/Support/ErrorHandling.h"
34 #include "llvm/Support/raw_ostream.h"
35 #include "llvm/Target/TargetInstrInfo.h"
36 #include "llvm/Target/TargetSubtargetInfo.h"
37 #include <algorithm>
38 using namespace llvm;
39 
40 #define DEBUG_TYPE "regalloc"
41 
42 STATISTIC(NumStores, "Number of stores added");
43 STATISTIC(NumLoads , "Number of loads added");
44 STATISTIC(NumCopies, "Number of copies coalesced");
45 
46 static RegisterRegAlloc
47   fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator);
48 
49 namespace {
50   class RAFast : public MachineFunctionPass {
51   public:
52     static char ID;
53     RAFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1),
54                isBulkSpilling(false) {}
55 
56   private:
57     MachineFunction *MF;
58     MachineRegisterInfo *MRI;
59     const TargetRegisterInfo *TRI;
60     const TargetInstrInfo *TII;
61     RegisterClassInfo RegClassInfo;
62 
63     // Basic block currently being allocated.
64     MachineBasicBlock *MBB;
65 
66     // StackSlotForVirtReg - Maps virtual regs to the frame index where these
67     // values are spilled.
68     IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
69 
70     // Everything we know about a live virtual register.
71     struct LiveReg {
72       MachineInstr *LastUse;    // Last instr to use reg.
73       unsigned VirtReg;         // Virtual register number.
74       unsigned PhysReg;         // Currently held here.
75       unsigned short LastOpNum; // OpNum on LastUse.
76       bool Dirty;               // Register needs spill.
77 
78       explicit LiveReg(unsigned v)
79         : LastUse(nullptr), VirtReg(v), PhysReg(0), LastOpNum(0), Dirty(false){}
80 
81       unsigned getSparseSetIndex() const {
82         return TargetRegisterInfo::virtReg2Index(VirtReg);
83       }
84     };
85 
86     typedef SparseSet<LiveReg> LiveRegMap;
87 
88     // LiveVirtRegs - This map contains entries for each virtual register
89     // that is currently available in a physical register.
90     LiveRegMap LiveVirtRegs;
91 
92     DenseMap<unsigned, SmallVector<MachineInstr *, 4> > LiveDbgValueMap;
93 
94     // RegState - Track the state of a physical register.
95     enum RegState {
96       // A disabled register is not available for allocation, but an alias may
97       // be in use. A register can only be moved out of the disabled state if
98       // all aliases are disabled.
99       regDisabled,
100 
101       // A free register is not currently in use and can be allocated
102       // immediately without checking aliases.
103       regFree,
104 
105       // A reserved register has been assigned explicitly (e.g., setting up a
106       // call parameter), and it remains reserved until it is used.
107       regReserved
108 
109       // A register state may also be a virtual register number, indication that
110       // the physical register is currently allocated to a virtual register. In
111       // that case, LiveVirtRegs contains the inverse mapping.
112     };
113 
114     // PhysRegState - One of the RegState enums, or a virtreg.
115     std::vector<unsigned> PhysRegState;
116 
117     // Set of register units.
118     typedef SparseSet<unsigned> UsedInInstrSet;
119 
120     // Set of register units that are used in the current instruction, and so
121     // cannot be allocated.
122     UsedInInstrSet UsedInInstr;
123 
124     // Mark a physreg as used in this instruction.
125     void markRegUsedInInstr(unsigned PhysReg) {
126       for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
127         UsedInInstr.insert(*Units);
128     }
129 
130     // Check if a physreg or any of its aliases are used in this instruction.
131     bool isRegUsedInInstr(unsigned PhysReg) const {
132       for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
133         if (UsedInInstr.count(*Units))
134           return true;
135       return false;
136     }
137 
138     // SkippedInstrs - Descriptors of instructions whose clobber list was
139     // ignored because all registers were spilled. It is still necessary to
140     // mark all the clobbered registers as used by the function.
141     SmallPtrSet<const MCInstrDesc*, 4> SkippedInstrs;
142 
143     // isBulkSpilling - This flag is set when LiveRegMap will be cleared
144     // completely after spilling all live registers. LiveRegMap entries should
145     // not be erased.
146     bool isBulkSpilling;
147 
148     enum : unsigned {
149       spillClean = 1,
150       spillDirty = 100,
151       spillImpossible = ~0u
152     };
153   public:
154     const char *getPassName() const override {
155       return "Fast Register Allocator";
156     }
157 
158     void getAnalysisUsage(AnalysisUsage &AU) const override {
159       AU.setPreservesCFG();
160       MachineFunctionPass::getAnalysisUsage(AU);
161     }
162 
163     MachineFunctionProperties getSetProperties() const override {
164       return MachineFunctionProperties().set(
165           MachineFunctionProperties::Property::AllVRegsAllocated);
166     }
167 
168   private:
169     bool runOnMachineFunction(MachineFunction &Fn) override;
170     void AllocateBasicBlock();
171     void handleThroughOperands(MachineInstr *MI,
172                                SmallVectorImpl<unsigned> &VirtDead);
173     int getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC);
174     bool isLastUseOfLocalReg(MachineOperand&);
175 
176     void addKillFlag(const LiveReg&);
177     void killVirtReg(LiveRegMap::iterator);
178     void killVirtReg(unsigned VirtReg);
179     void spillVirtReg(MachineBasicBlock::iterator MI, LiveRegMap::iterator);
180     void spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg);
181 
182     void usePhysReg(MachineOperand&);
183     void definePhysReg(MachineInstr *MI, unsigned PhysReg, RegState NewState);
184     unsigned calcSpillCost(unsigned PhysReg) const;
185     void assignVirtToPhysReg(LiveReg&, unsigned PhysReg);
186     LiveRegMap::iterator findLiveVirtReg(unsigned VirtReg) {
187       return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
188     }
189     LiveRegMap::const_iterator findLiveVirtReg(unsigned VirtReg) const {
190       return LiveVirtRegs.find(TargetRegisterInfo::virtReg2Index(VirtReg));
191     }
192     LiveRegMap::iterator assignVirtToPhysReg(unsigned VReg, unsigned PhysReg);
193     LiveRegMap::iterator allocVirtReg(MachineInstr *MI, LiveRegMap::iterator,
194                                       unsigned Hint);
195     LiveRegMap::iterator defineVirtReg(MachineInstr *MI, unsigned OpNum,
196                                        unsigned VirtReg, unsigned Hint);
197     LiveRegMap::iterator reloadVirtReg(MachineInstr *MI, unsigned OpNum,
198                                        unsigned VirtReg, unsigned Hint);
199     void spillAll(MachineBasicBlock::iterator MI);
200     bool setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg);
201   };
202   char RAFast::ID = 0;
203 }
204 
205 /// getStackSpaceFor - This allocates space for the specified virtual register
206 /// to be held on the stack.
207 int RAFast::getStackSpaceFor(unsigned VirtReg, const TargetRegisterClass *RC) {
208   // Find the location Reg would belong...
209   int SS = StackSlotForVirtReg[VirtReg];
210   if (SS != -1)
211     return SS;          // Already has space allocated?
212 
213   // Allocate a new stack object for this spill location...
214   int FrameIdx = MF->getFrameInfo()->CreateSpillStackObject(RC->getSize(),
215                                                             RC->getAlignment());
216 
217   // Assign the slot.
218   StackSlotForVirtReg[VirtReg] = FrameIdx;
219   return FrameIdx;
220 }
221 
222 /// isLastUseOfLocalReg - Return true if MO is the only remaining reference to
223 /// its virtual register, and it is guaranteed to be a block-local register.
224 ///
225 bool RAFast::isLastUseOfLocalReg(MachineOperand &MO) {
226   // If the register has ever been spilled or reloaded, we conservatively assume
227   // it is a global register used in multiple blocks.
228   if (StackSlotForVirtReg[MO.getReg()] != -1)
229     return false;
230 
231   // Check that the use/def chain has exactly one operand - MO.
232   MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg());
233   if (&*I != &MO)
234     return false;
235   return ++I == MRI->reg_nodbg_end();
236 }
237 
238 /// addKillFlag - Set kill flags on last use of a virtual register.
239 void RAFast::addKillFlag(const LiveReg &LR) {
240   if (!LR.LastUse) return;
241   MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum);
242   if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) {
243     if (MO.getReg() == LR.PhysReg)
244       MO.setIsKill();
245     else
246       LR.LastUse->addRegisterKilled(LR.PhysReg, TRI, true);
247   }
248 }
249 
250 /// killVirtReg - Mark virtreg as no longer available.
251 void RAFast::killVirtReg(LiveRegMap::iterator LRI) {
252   addKillFlag(*LRI);
253   assert(PhysRegState[LRI->PhysReg] == LRI->VirtReg &&
254          "Broken RegState mapping");
255   PhysRegState[LRI->PhysReg] = regFree;
256   // Erase from LiveVirtRegs unless we're spilling in bulk.
257   if (!isBulkSpilling)
258     LiveVirtRegs.erase(LRI);
259 }
260 
261 /// killVirtReg - Mark virtreg as no longer available.
262 void RAFast::killVirtReg(unsigned VirtReg) {
263   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
264          "killVirtReg needs a virtual register");
265   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
266   if (LRI != LiveVirtRegs.end())
267     killVirtReg(LRI);
268 }
269 
270 /// spillVirtReg - This method spills the value specified by VirtReg into the
271 /// corresponding stack slot if needed.
272 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI, unsigned VirtReg) {
273   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
274          "Spilling a physical register is illegal!");
275   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
276   assert(LRI != LiveVirtRegs.end() && "Spilling unmapped virtual register");
277   spillVirtReg(MI, LRI);
278 }
279 
280 /// spillVirtReg - Do the actual work of spilling.
281 void RAFast::spillVirtReg(MachineBasicBlock::iterator MI,
282                           LiveRegMap::iterator LRI) {
283   LiveReg &LR = *LRI;
284   assert(PhysRegState[LR.PhysReg] == LRI->VirtReg && "Broken RegState mapping");
285 
286   if (LR.Dirty) {
287     // If this physreg is used by the instruction, we want to kill it on the
288     // instruction, not on the spill.
289     bool SpillKill = LR.LastUse != MI;
290     LR.Dirty = false;
291     DEBUG(dbgs() << "Spilling " << PrintReg(LRI->VirtReg, TRI)
292                  << " in " << PrintReg(LR.PhysReg, TRI));
293     const TargetRegisterClass *RC = MRI->getRegClass(LRI->VirtReg);
294     int FI = getStackSpaceFor(LRI->VirtReg, RC);
295     DEBUG(dbgs() << " to stack slot #" << FI << "\n");
296     TII->storeRegToStackSlot(*MBB, MI, LR.PhysReg, SpillKill, FI, RC, TRI);
297     ++NumStores;   // Update statistics
298 
299     // If this register is used by DBG_VALUE then insert new DBG_VALUE to
300     // identify spilled location as the place to find corresponding variable's
301     // value.
302     SmallVectorImpl<MachineInstr *> &LRIDbgValues =
303       LiveDbgValueMap[LRI->VirtReg];
304     for (unsigned li = 0, le = LRIDbgValues.size(); li != le; ++li) {
305       MachineInstr *DBG = LRIDbgValues[li];
306       const MDNode *Var = DBG->getDebugVariable();
307       const MDNode *Expr = DBG->getDebugExpression();
308       bool IsIndirect = DBG->isIndirectDebugValue();
309       uint64_t Offset = IsIndirect ? DBG->getOperand(1).getImm() : 0;
310       DebugLoc DL = DBG->getDebugLoc();
311       assert(cast<DILocalVariable>(Var)->isValidLocationForIntrinsic(DL) &&
312              "Expected inlined-at fields to agree");
313       MachineInstr *NewDV =
314           BuildMI(*MBB, MI, DL, TII->get(TargetOpcode::DBG_VALUE))
315               .addFrameIndex(FI)
316               .addImm(Offset)
317               .addMetadata(Var)
318               .addMetadata(Expr);
319       assert(NewDV->getParent() == MBB && "dangling parent pointer");
320       (void)NewDV;
321       DEBUG(dbgs() << "Inserting debug info due to spill:" << "\n" << *NewDV);
322     }
323     // Now this register is spilled there is should not be any DBG_VALUE
324     // pointing to this register because they are all pointing to spilled value
325     // now.
326     LRIDbgValues.clear();
327     if (SpillKill)
328       LR.LastUse = nullptr; // Don't kill register again
329   }
330   killVirtReg(LRI);
331 }
332 
333 /// spillAll - Spill all dirty virtregs without killing them.
334 void RAFast::spillAll(MachineBasicBlock::iterator MI) {
335   if (LiveVirtRegs.empty()) return;
336   isBulkSpilling = true;
337   // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
338   // of spilling here is deterministic, if arbitrary.
339   for (LiveRegMap::iterator i = LiveVirtRegs.begin(), e = LiveVirtRegs.end();
340        i != e; ++i)
341     spillVirtReg(MI, i);
342   LiveVirtRegs.clear();
343   isBulkSpilling = false;
344 }
345 
346 /// usePhysReg - Handle the direct use of a physical register.
347 /// Check that the register is not used by a virtreg.
348 /// Kill the physreg, marking it free.
349 /// This may add implicit kills to MO->getParent() and invalidate MO.
350 void RAFast::usePhysReg(MachineOperand &MO) {
351   unsigned PhysReg = MO.getReg();
352   assert(TargetRegisterInfo::isPhysicalRegister(PhysReg) &&
353          "Bad usePhysReg operand");
354   markRegUsedInInstr(PhysReg);
355   switch (PhysRegState[PhysReg]) {
356   case regDisabled:
357     break;
358   case regReserved:
359     PhysRegState[PhysReg] = regFree;
360     // Fall through
361   case regFree:
362     MO.setIsKill();
363     return;
364   default:
365     // The physreg was allocated to a virtual register. That means the value we
366     // wanted has been clobbered.
367     llvm_unreachable("Instruction uses an allocated register");
368   }
369 
370   // Maybe a superregister is reserved?
371   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
372     unsigned Alias = *AI;
373     switch (PhysRegState[Alias]) {
374     case regDisabled:
375       break;
376     case regReserved:
377       // Either PhysReg is a subregister of Alias and we mark the
378       // whole register as free, or PhysReg is the superregister of
379       // Alias and we mark all the aliases as disabled before freeing
380       // PhysReg.
381       // In the latter case, since PhysReg was disabled, this means that
382       // its value is defined only by physical sub-registers. This check
383       // is performed by the assert of the default case in this loop.
384       // Note: The value of the superregister may only be partial
385       // defined, that is why regDisabled is a valid state for aliases.
386       assert((TRI->isSuperRegister(PhysReg, Alias) ||
387               TRI->isSuperRegister(Alias, PhysReg)) &&
388              "Instruction is not using a subregister of a reserved register");
389       // Fall through.
390     case regFree:
391       if (TRI->isSuperRegister(PhysReg, Alias)) {
392         // Leave the superregister in the working set.
393         PhysRegState[Alias] = regFree;
394         MO.getParent()->addRegisterKilled(Alias, TRI, true);
395         return;
396       }
397       // Some other alias was in the working set - clear it.
398       PhysRegState[Alias] = regDisabled;
399       break;
400     default:
401       llvm_unreachable("Instruction uses an alias of an allocated register");
402     }
403   }
404 
405   // All aliases are disabled, bring register into working set.
406   PhysRegState[PhysReg] = regFree;
407   MO.setIsKill();
408 }
409 
410 /// definePhysReg - Mark PhysReg as reserved or free after spilling any
411 /// virtregs. This is very similar to defineVirtReg except the physreg is
412 /// reserved instead of allocated.
413 void RAFast::definePhysReg(MachineInstr *MI, unsigned PhysReg,
414                            RegState NewState) {
415   markRegUsedInInstr(PhysReg);
416   switch (unsigned VirtReg = PhysRegState[PhysReg]) {
417   case regDisabled:
418     break;
419   default:
420     spillVirtReg(MI, VirtReg);
421     // Fall through.
422   case regFree:
423   case regReserved:
424     PhysRegState[PhysReg] = NewState;
425     return;
426   }
427 
428   // This is a disabled register, disable all aliases.
429   PhysRegState[PhysReg] = NewState;
430   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
431     unsigned Alias = *AI;
432     switch (unsigned VirtReg = PhysRegState[Alias]) {
433     case regDisabled:
434       break;
435     default:
436       spillVirtReg(MI, VirtReg);
437       // Fall through.
438     case regFree:
439     case regReserved:
440       PhysRegState[Alias] = regDisabled;
441       if (TRI->isSuperRegister(PhysReg, Alias))
442         return;
443       break;
444     }
445   }
446 }
447 
448 
449 // calcSpillCost - Return the cost of spilling clearing out PhysReg and
450 // aliases so it is free for allocation.
451 // Returns 0 when PhysReg is free or disabled with all aliases disabled - it
452 // can be allocated directly.
453 // Returns spillImpossible when PhysReg or an alias can't be spilled.
454 unsigned RAFast::calcSpillCost(unsigned PhysReg) const {
455   if (isRegUsedInInstr(PhysReg)) {
456     DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is already used in instr.\n");
457     return spillImpossible;
458   }
459   switch (unsigned VirtReg = PhysRegState[PhysReg]) {
460   case regDisabled:
461     break;
462   case regFree:
463     return 0;
464   case regReserved:
465     DEBUG(dbgs() << PrintReg(VirtReg, TRI) << " corresponding "
466                  << PrintReg(PhysReg, TRI) << " is reserved already.\n");
467     return spillImpossible;
468   default: {
469     LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
470     assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
471     return I->Dirty ? spillDirty : spillClean;
472   }
473   }
474 
475   // This is a disabled register, add up cost of aliases.
476   DEBUG(dbgs() << PrintReg(PhysReg, TRI) << " is disabled.\n");
477   unsigned Cost = 0;
478   for (MCRegAliasIterator AI(PhysReg, TRI, false); AI.isValid(); ++AI) {
479     unsigned Alias = *AI;
480     switch (unsigned VirtReg = PhysRegState[Alias]) {
481     case regDisabled:
482       break;
483     case regFree:
484       ++Cost;
485       break;
486     case regReserved:
487       return spillImpossible;
488     default: {
489       LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
490       assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
491       Cost += I->Dirty ? spillDirty : spillClean;
492       break;
493     }
494     }
495   }
496   return Cost;
497 }
498 
499 
500 /// assignVirtToPhysReg - This method updates local state so that we know
501 /// that PhysReg is the proper container for VirtReg now.  The physical
502 /// register must not be used for anything else when this is called.
503 ///
504 void RAFast::assignVirtToPhysReg(LiveReg &LR, unsigned PhysReg) {
505   DEBUG(dbgs() << "Assigning " << PrintReg(LR.VirtReg, TRI) << " to "
506                << PrintReg(PhysReg, TRI) << "\n");
507   PhysRegState[PhysReg] = LR.VirtReg;
508   assert(!LR.PhysReg && "Already assigned a physreg");
509   LR.PhysReg = PhysReg;
510 }
511 
512 RAFast::LiveRegMap::iterator
513 RAFast::assignVirtToPhysReg(unsigned VirtReg, unsigned PhysReg) {
514   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
515   assert(LRI != LiveVirtRegs.end() && "VirtReg disappeared");
516   assignVirtToPhysReg(*LRI, PhysReg);
517   return LRI;
518 }
519 
520 /// allocVirtReg - Allocate a physical register for VirtReg.
521 RAFast::LiveRegMap::iterator RAFast::allocVirtReg(MachineInstr *MI,
522                                                   LiveRegMap::iterator LRI,
523                                                   unsigned Hint) {
524   const unsigned VirtReg = LRI->VirtReg;
525 
526   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
527          "Can only allocate virtual registers");
528 
529   const TargetRegisterClass *RC = MRI->getRegClass(VirtReg);
530 
531   // Ignore invalid hints.
532   if (Hint && (!TargetRegisterInfo::isPhysicalRegister(Hint) ||
533                !RC->contains(Hint) || !MRI->isAllocatable(Hint)))
534     Hint = 0;
535 
536   // Take hint when possible.
537   if (Hint) {
538     // Ignore the hint if we would have to spill a dirty register.
539     unsigned Cost = calcSpillCost(Hint);
540     if (Cost < spillDirty) {
541       if (Cost)
542         definePhysReg(MI, Hint, regFree);
543       // definePhysReg may kill virtual registers and modify LiveVirtRegs.
544       // That invalidates LRI, so run a new lookup for VirtReg.
545       return assignVirtToPhysReg(VirtReg, Hint);
546     }
547   }
548 
549   ArrayRef<MCPhysReg> AO = RegClassInfo.getOrder(RC);
550 
551   // First try to find a completely free register.
552   for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){
553     unsigned PhysReg = *I;
554     if (PhysRegState[PhysReg] == regFree && !isRegUsedInInstr(PhysReg)) {
555       assignVirtToPhysReg(*LRI, PhysReg);
556       return LRI;
557     }
558   }
559 
560   DEBUG(dbgs() << "Allocating " << PrintReg(VirtReg) << " from "
561                << TRI->getRegClassName(RC) << "\n");
562 
563   unsigned BestReg = 0, BestCost = spillImpossible;
564   for (ArrayRef<MCPhysReg>::iterator I = AO.begin(), E = AO.end(); I != E; ++I){
565     unsigned Cost = calcSpillCost(*I);
566     DEBUG(dbgs() << "\tRegister: " << PrintReg(*I, TRI) << "\n");
567     DEBUG(dbgs() << "\tCost: " << Cost << "\n");
568     DEBUG(dbgs() << "\tBestCost: " << BestCost << "\n");
569     // Cost is 0 when all aliases are already disabled.
570     if (Cost == 0) {
571       assignVirtToPhysReg(*LRI, *I);
572       return LRI;
573     }
574     if (Cost < BestCost)
575       BestReg = *I, BestCost = Cost;
576   }
577 
578   if (BestReg) {
579     definePhysReg(MI, BestReg, regFree);
580     // definePhysReg may kill virtual registers and modify LiveVirtRegs.
581     // That invalidates LRI, so run a new lookup for VirtReg.
582     return assignVirtToPhysReg(VirtReg, BestReg);
583   }
584 
585   // Nothing we can do. Report an error and keep going with a bad allocation.
586   if (MI->isInlineAsm())
587     MI->emitError("inline assembly requires more registers than available");
588   else
589     MI->emitError("ran out of registers during register allocation");
590   definePhysReg(MI, *AO.begin(), regFree);
591   return assignVirtToPhysReg(VirtReg, *AO.begin());
592 }
593 
594 /// defineVirtReg - Allocate a register for VirtReg and mark it as dirty.
595 RAFast::LiveRegMap::iterator
596 RAFast::defineVirtReg(MachineInstr *MI, unsigned OpNum,
597                       unsigned VirtReg, unsigned Hint) {
598   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
599          "Not a virtual register");
600   LiveRegMap::iterator LRI;
601   bool New;
602   std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
603   if (New) {
604     // If there is no hint, peek at the only use of this register.
605     if ((!Hint || !TargetRegisterInfo::isPhysicalRegister(Hint)) &&
606         MRI->hasOneNonDBGUse(VirtReg)) {
607       const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg);
608       // It's a copy, use the destination register as a hint.
609       if (UseMI.isCopyLike())
610         Hint = UseMI.getOperand(0).getReg();
611     }
612     LRI = allocVirtReg(MI, LRI, Hint);
613   } else if (LRI->LastUse) {
614     // Redefining a live register - kill at the last use, unless it is this
615     // instruction defining VirtReg multiple times.
616     if (LRI->LastUse != MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse())
617       addKillFlag(*LRI);
618   }
619   assert(LRI->PhysReg && "Register not assigned");
620   LRI->LastUse = MI;
621   LRI->LastOpNum = OpNum;
622   LRI->Dirty = true;
623   markRegUsedInInstr(LRI->PhysReg);
624   return LRI;
625 }
626 
627 /// reloadVirtReg - Make sure VirtReg is available in a physreg and return it.
628 RAFast::LiveRegMap::iterator
629 RAFast::reloadVirtReg(MachineInstr *MI, unsigned OpNum,
630                       unsigned VirtReg, unsigned Hint) {
631   assert(TargetRegisterInfo::isVirtualRegister(VirtReg) &&
632          "Not a virtual register");
633   LiveRegMap::iterator LRI;
634   bool New;
635   std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
636   MachineOperand &MO = MI->getOperand(OpNum);
637   if (New) {
638     LRI = allocVirtReg(MI, LRI, Hint);
639     const TargetRegisterClass *RC = MRI->getRegClass(VirtReg);
640     int FrameIndex = getStackSpaceFor(VirtReg, RC);
641     DEBUG(dbgs() << "Reloading " << PrintReg(VirtReg, TRI) << " into "
642                  << PrintReg(LRI->PhysReg, TRI) << "\n");
643     TII->loadRegFromStackSlot(*MBB, MI, LRI->PhysReg, FrameIndex, RC, TRI);
644     ++NumLoads;
645   } else if (LRI->Dirty) {
646     if (isLastUseOfLocalReg(MO)) {
647       DEBUG(dbgs() << "Killing last use: " << MO << "\n");
648       if (MO.isUse())
649         MO.setIsKill();
650       else
651         MO.setIsDead();
652     } else if (MO.isKill()) {
653       DEBUG(dbgs() << "Clearing dubious kill: " << MO << "\n");
654       MO.setIsKill(false);
655     } else if (MO.isDead()) {
656       DEBUG(dbgs() << "Clearing dubious dead: " << MO << "\n");
657       MO.setIsDead(false);
658     }
659   } else if (MO.isKill()) {
660     // We must remove kill flags from uses of reloaded registers because the
661     // register would be killed immediately, and there might be a second use:
662     //   %foo = OR %x<kill>, %x
663     // This would cause a second reload of %x into a different register.
664     DEBUG(dbgs() << "Clearing clean kill: " << MO << "\n");
665     MO.setIsKill(false);
666   } else if (MO.isDead()) {
667     DEBUG(dbgs() << "Clearing clean dead: " << MO << "\n");
668     MO.setIsDead(false);
669   }
670   assert(LRI->PhysReg && "Register not assigned");
671   LRI->LastUse = MI;
672   LRI->LastOpNum = OpNum;
673   markRegUsedInInstr(LRI->PhysReg);
674   return LRI;
675 }
676 
677 // setPhysReg - Change operand OpNum in MI the refer the PhysReg, considering
678 // subregs. This may invalidate any operand pointers.
679 // Return true if the operand kills its register.
680 bool RAFast::setPhysReg(MachineInstr *MI, unsigned OpNum, unsigned PhysReg) {
681   MachineOperand &MO = MI->getOperand(OpNum);
682   bool Dead = MO.isDead();
683   if (!MO.getSubReg()) {
684     MO.setReg(PhysReg);
685     return MO.isKill() || Dead;
686   }
687 
688   // Handle subregister index.
689   MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : 0);
690   MO.setSubReg(0);
691 
692   // A kill flag implies killing the full register. Add corresponding super
693   // register kill.
694   if (MO.isKill()) {
695     MI->addRegisterKilled(PhysReg, TRI, true);
696     return true;
697   }
698 
699   // A <def,read-undef> of a sub-register requires an implicit def of the full
700   // register.
701   if (MO.isDef() && MO.isUndef())
702     MI->addRegisterDefined(PhysReg, TRI);
703 
704   return Dead;
705 }
706 
707 // Handle special instruction operand like early clobbers and tied ops when
708 // there are additional physreg defines.
709 void RAFast::handleThroughOperands(MachineInstr *MI,
710                                    SmallVectorImpl<unsigned> &VirtDead) {
711   DEBUG(dbgs() << "Scanning for through registers:");
712   SmallSet<unsigned, 8> ThroughRegs;
713   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
714     MachineOperand &MO = MI->getOperand(i);
715     if (!MO.isReg()) continue;
716     unsigned Reg = MO.getReg();
717     if (!TargetRegisterInfo::isVirtualRegister(Reg))
718       continue;
719     if (MO.isEarlyClobber() || MI->isRegTiedToDefOperand(i) ||
720         (MO.getSubReg() && MI->readsVirtualRegister(Reg))) {
721       if (ThroughRegs.insert(Reg).second)
722         DEBUG(dbgs() << ' ' << PrintReg(Reg));
723     }
724   }
725 
726   // If any physreg defines collide with preallocated through registers,
727   // we must spill and reallocate.
728   DEBUG(dbgs() << "\nChecking for physdef collisions.\n");
729   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
730     MachineOperand &MO = MI->getOperand(i);
731     if (!MO.isReg() || !MO.isDef()) continue;
732     unsigned Reg = MO.getReg();
733     if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
734     markRegUsedInInstr(Reg);
735     for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
736       if (ThroughRegs.count(PhysRegState[*AI]))
737         definePhysReg(MI, *AI, regFree);
738     }
739   }
740 
741   SmallVector<unsigned, 8> PartialDefs;
742   DEBUG(dbgs() << "Allocating tied uses.\n");
743   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
744     MachineOperand &MO = MI->getOperand(i);
745     if (!MO.isReg()) continue;
746     unsigned Reg = MO.getReg();
747     if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
748     if (MO.isUse()) {
749       unsigned DefIdx = 0;
750       if (!MI->isRegTiedToDefOperand(i, &DefIdx)) continue;
751       DEBUG(dbgs() << "Operand " << i << "("<< MO << ") is tied to operand "
752         << DefIdx << ".\n");
753       LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0);
754       unsigned PhysReg = LRI->PhysReg;
755       setPhysReg(MI, i, PhysReg);
756       // Note: we don't update the def operand yet. That would cause the normal
757       // def-scan to attempt spilling.
758     } else if (MO.getSubReg() && MI->readsVirtualRegister(Reg)) {
759       DEBUG(dbgs() << "Partial redefine: " << MO << "\n");
760       // Reload the register, but don't assign to the operand just yet.
761       // That would confuse the later phys-def processing pass.
762       LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, 0);
763       PartialDefs.push_back(LRI->PhysReg);
764     }
765   }
766 
767   DEBUG(dbgs() << "Allocating early clobbers.\n");
768   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
769     MachineOperand &MO = MI->getOperand(i);
770     if (!MO.isReg()) continue;
771     unsigned Reg = MO.getReg();
772     if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
773     if (!MO.isEarlyClobber())
774       continue;
775     // Note: defineVirtReg may invalidate MO.
776     LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, 0);
777     unsigned PhysReg = LRI->PhysReg;
778     if (setPhysReg(MI, i, PhysReg))
779       VirtDead.push_back(Reg);
780   }
781 
782   // Restore UsedInInstr to a state usable for allocating normal virtual uses.
783   UsedInInstr.clear();
784   for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
785     MachineOperand &MO = MI->getOperand(i);
786     if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue;
787     unsigned Reg = MO.getReg();
788     if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
789     DEBUG(dbgs() << "\tSetting " << PrintReg(Reg, TRI)
790                  << " as used in instr\n");
791     markRegUsedInInstr(Reg);
792   }
793 
794   // Also mark PartialDefs as used to avoid reallocation.
795   for (unsigned i = 0, e = PartialDefs.size(); i != e; ++i)
796     markRegUsedInInstr(PartialDefs[i]);
797 }
798 
799 void RAFast::AllocateBasicBlock() {
800   DEBUG(dbgs() << "\nAllocating " << *MBB);
801 
802   PhysRegState.assign(TRI->getNumRegs(), regDisabled);
803   assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?");
804 
805   MachineBasicBlock::iterator MII = MBB->begin();
806 
807   // Add live-in registers as live.
808   for (const auto &LI : MBB->liveins())
809     if (MRI->isAllocatable(LI.PhysReg))
810       definePhysReg(MII, LI.PhysReg, regReserved);
811 
812   SmallVector<unsigned, 8> VirtDead;
813   SmallVector<MachineInstr*, 32> Coalesced;
814 
815   // Otherwise, sequentially allocate each instruction in the MBB.
816   while (MII != MBB->end()) {
817     MachineInstr *MI = MII++;
818     const MCInstrDesc &MCID = MI->getDesc();
819     DEBUG({
820         dbgs() << "\n>> " << *MI << "Regs:";
821         for (unsigned Reg = 1, E = TRI->getNumRegs(); Reg != E; ++Reg) {
822           if (PhysRegState[Reg] == regDisabled) continue;
823           dbgs() << " " << TRI->getName(Reg);
824           switch(PhysRegState[Reg]) {
825           case regFree:
826             break;
827           case regReserved:
828             dbgs() << "*";
829             break;
830           default: {
831             dbgs() << '=' << PrintReg(PhysRegState[Reg]);
832             LiveRegMap::iterator I = findLiveVirtReg(PhysRegState[Reg]);
833             assert(I != LiveVirtRegs.end() && "Missing VirtReg entry");
834             if (I->Dirty)
835               dbgs() << "*";
836             assert(I->PhysReg == Reg && "Bad inverse map");
837             break;
838           }
839           }
840         }
841         dbgs() << '\n';
842         // Check that LiveVirtRegs is the inverse.
843         for (LiveRegMap::iterator i = LiveVirtRegs.begin(),
844              e = LiveVirtRegs.end(); i != e; ++i) {
845            assert(TargetRegisterInfo::isVirtualRegister(i->VirtReg) &&
846                   "Bad map key");
847            assert(TargetRegisterInfo::isPhysicalRegister(i->PhysReg) &&
848                   "Bad map value");
849            assert(PhysRegState[i->PhysReg] == i->VirtReg && "Bad inverse map");
850         }
851       });
852 
853     // Debug values are not allowed to change codegen in any way.
854     if (MI->isDebugValue()) {
855       bool ScanDbgValue = true;
856       while (ScanDbgValue) {
857         ScanDbgValue = false;
858         for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
859           MachineOperand &MO = MI->getOperand(i);
860           if (!MO.isReg()) continue;
861           unsigned Reg = MO.getReg();
862           if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
863           LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
864           if (LRI != LiveVirtRegs.end())
865             setPhysReg(MI, i, LRI->PhysReg);
866           else {
867             int SS = StackSlotForVirtReg[Reg];
868             if (SS == -1) {
869               // We can't allocate a physreg for a DebugValue, sorry!
870               DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE");
871               MO.setReg(0);
872             }
873             else {
874               // Modify DBG_VALUE now that the value is in a spill slot.
875               bool IsIndirect = MI->isIndirectDebugValue();
876               uint64_t Offset = IsIndirect ? MI->getOperand(1).getImm() : 0;
877               const MDNode *Var = MI->getDebugVariable();
878               const MDNode *Expr = MI->getDebugExpression();
879               DebugLoc DL = MI->getDebugLoc();
880               MachineBasicBlock *MBB = MI->getParent();
881               assert(
882                   cast<DILocalVariable>(Var)->isValidLocationForIntrinsic(DL) &&
883                   "Expected inlined-at fields to agree");
884               MachineInstr *NewDV = BuildMI(*MBB, MBB->erase(MI), DL,
885                                             TII->get(TargetOpcode::DBG_VALUE))
886                                         .addFrameIndex(SS)
887                                         .addImm(Offset)
888                                         .addMetadata(Var)
889                                         .addMetadata(Expr);
890               DEBUG(dbgs() << "Modifying debug info due to spill:"
891                            << "\t" << *NewDV);
892               // Scan NewDV operands from the beginning.
893               MI = NewDV;
894               ScanDbgValue = true;
895               break;
896             }
897           }
898           LiveDbgValueMap[Reg].push_back(MI);
899         }
900       }
901       // Next instruction.
902       continue;
903     }
904 
905     // If this is a copy, we may be able to coalesce.
906     unsigned CopySrc = 0, CopyDst = 0, CopySrcSub = 0, CopyDstSub = 0;
907     if (MI->isCopy()) {
908       CopyDst = MI->getOperand(0).getReg();
909       CopySrc = MI->getOperand(1).getReg();
910       CopyDstSub = MI->getOperand(0).getSubReg();
911       CopySrcSub = MI->getOperand(1).getSubReg();
912     }
913 
914     // Track registers used by instruction.
915     UsedInInstr.clear();
916 
917     // First scan.
918     // Mark physreg uses and early clobbers as used.
919     // Find the end of the virtreg operands
920     unsigned VirtOpEnd = 0;
921     bool hasTiedOps = false;
922     bool hasEarlyClobbers = false;
923     bool hasPartialRedefs = false;
924     bool hasPhysDefs = false;
925     for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
926       MachineOperand &MO = MI->getOperand(i);
927       // Make sure MRI knows about registers clobbered by regmasks.
928       if (MO.isRegMask()) {
929         MRI->addPhysRegsUsedFromRegMask(MO.getRegMask());
930         continue;
931       }
932       if (!MO.isReg()) continue;
933       unsigned Reg = MO.getReg();
934       if (!Reg) continue;
935       if (TargetRegisterInfo::isVirtualRegister(Reg)) {
936         VirtOpEnd = i+1;
937         if (MO.isUse()) {
938           hasTiedOps = hasTiedOps ||
939                               MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1;
940         } else {
941           if (MO.isEarlyClobber())
942             hasEarlyClobbers = true;
943           if (MO.getSubReg() && MI->readsVirtualRegister(Reg))
944             hasPartialRedefs = true;
945         }
946         continue;
947       }
948       if (!MRI->isAllocatable(Reg)) continue;
949       if (MO.isUse()) {
950         usePhysReg(MO);
951       } else if (MO.isEarlyClobber()) {
952         definePhysReg(MI, Reg, (MO.isImplicit() || MO.isDead()) ?
953                                regFree : regReserved);
954         hasEarlyClobbers = true;
955       } else
956         hasPhysDefs = true;
957     }
958 
959     // The instruction may have virtual register operands that must be allocated
960     // the same register at use-time and def-time: early clobbers and tied
961     // operands. If there are also physical defs, these registers must avoid
962     // both physical defs and uses, making them more constrained than normal
963     // operands.
964     // Similarly, if there are multiple defs and tied operands, we must make
965     // sure the same register is allocated to uses and defs.
966     // We didn't detect inline asm tied operands above, so just make this extra
967     // pass for all inline asm.
968     if (MI->isInlineAsm() || hasEarlyClobbers || hasPartialRedefs ||
969         (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) {
970       handleThroughOperands(MI, VirtDead);
971       // Don't attempt coalescing when we have funny stuff going on.
972       CopyDst = 0;
973       // Pretend we have early clobbers so the use operands get marked below.
974       // This is not necessary for the common case of a single tied use.
975       hasEarlyClobbers = true;
976     }
977 
978     // Second scan.
979     // Allocate virtreg uses.
980     for (unsigned i = 0; i != VirtOpEnd; ++i) {
981       MachineOperand &MO = MI->getOperand(i);
982       if (!MO.isReg()) continue;
983       unsigned Reg = MO.getReg();
984       if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
985       if (MO.isUse()) {
986         LiveRegMap::iterator LRI = reloadVirtReg(MI, i, Reg, CopyDst);
987         unsigned PhysReg = LRI->PhysReg;
988         CopySrc = (CopySrc == Reg || CopySrc == PhysReg) ? PhysReg : 0;
989         if (setPhysReg(MI, i, PhysReg))
990           killVirtReg(LRI);
991       }
992     }
993 
994     // Track registers defined by instruction - early clobbers and tied uses at
995     // this point.
996     UsedInInstr.clear();
997     if (hasEarlyClobbers) {
998       for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
999         MachineOperand &MO = MI->getOperand(i);
1000         if (!MO.isReg()) continue;
1001         unsigned Reg = MO.getReg();
1002         if (!Reg || !TargetRegisterInfo::isPhysicalRegister(Reg)) continue;
1003         // Look for physreg defs and tied uses.
1004         if (!MO.isDef() && !MI->isRegTiedToDefOperand(i)) continue;
1005         markRegUsedInInstr(Reg);
1006       }
1007     }
1008 
1009     unsigned DefOpEnd = MI->getNumOperands();
1010     if (MI->isCall()) {
1011       // Spill all virtregs before a call. This serves one purpose: If an
1012       // exception is thrown, the landing pad is going to expect to find
1013       // registers in their spill slots.
1014       // Note: although this is appealing to just consider all definitions
1015       // as call-clobbered, this is not correct because some of those
1016       // definitions may be used later on and we do not want to reuse
1017       // those for virtual registers in between.
1018       DEBUG(dbgs() << "  Spilling remaining registers before call.\n");
1019       spillAll(MI);
1020 
1021       // The imp-defs are skipped below, but we still need to mark those
1022       // registers as used by the function.
1023       SkippedInstrs.insert(&MCID);
1024     }
1025 
1026     // Third scan.
1027     // Allocate defs and collect dead defs.
1028     for (unsigned i = 0; i != DefOpEnd; ++i) {
1029       MachineOperand &MO = MI->getOperand(i);
1030       if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber())
1031         continue;
1032       unsigned Reg = MO.getReg();
1033 
1034       if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
1035         if (!MRI->isAllocatable(Reg)) continue;
1036         definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved);
1037         continue;
1038       }
1039       LiveRegMap::iterator LRI = defineVirtReg(MI, i, Reg, CopySrc);
1040       unsigned PhysReg = LRI->PhysReg;
1041       if (setPhysReg(MI, i, PhysReg)) {
1042         VirtDead.push_back(Reg);
1043         CopyDst = 0; // cancel coalescing;
1044       } else
1045         CopyDst = (CopyDst == Reg || CopyDst == PhysReg) ? PhysReg : 0;
1046     }
1047 
1048     // Kill dead defs after the scan to ensure that multiple defs of the same
1049     // register are allocated identically. We didn't need to do this for uses
1050     // because we are crerating our own kill flags, and they are always at the
1051     // last use.
1052     for (unsigned i = 0, e = VirtDead.size(); i != e; ++i)
1053       killVirtReg(VirtDead[i]);
1054     VirtDead.clear();
1055 
1056     if (CopyDst && CopyDst == CopySrc && CopyDstSub == CopySrcSub) {
1057       DEBUG(dbgs() << "-- coalescing: " << *MI);
1058       Coalesced.push_back(MI);
1059     } else {
1060       DEBUG(dbgs() << "<< " << *MI);
1061     }
1062   }
1063 
1064   // Spill all physical registers holding virtual registers now.
1065   DEBUG(dbgs() << "Spilling live registers at end of block.\n");
1066   spillAll(MBB->getFirstTerminator());
1067 
1068   // Erase all the coalesced copies. We are delaying it until now because
1069   // LiveVirtRegs might refer to the instrs.
1070   for (unsigned i = 0, e = Coalesced.size(); i != e; ++i)
1071     MBB->erase(Coalesced[i]);
1072   NumCopies += Coalesced.size();
1073 
1074   DEBUG(MBB->dump());
1075 }
1076 
1077 /// runOnMachineFunction - Register allocate the whole function
1078 ///
1079 bool RAFast::runOnMachineFunction(MachineFunction &Fn) {
1080   DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1081                << "********** Function: " << Fn.getName() << '\n');
1082   MF = &Fn;
1083   MRI = &MF->getRegInfo();
1084   TRI = MF->getSubtarget().getRegisterInfo();
1085   TII = MF->getSubtarget().getInstrInfo();
1086   MRI->freezeReservedRegs(Fn);
1087   RegClassInfo.runOnMachineFunction(Fn);
1088   UsedInInstr.clear();
1089   UsedInInstr.setUniverse(TRI->getNumRegUnits());
1090 
1091   assert(!MRI->isSSA() && "regalloc requires leaving SSA");
1092 
1093   // initialize the virtual->physical register map to have a 'null'
1094   // mapping for all virtual registers
1095   StackSlotForVirtReg.resize(MRI->getNumVirtRegs());
1096   LiveVirtRegs.setUniverse(MRI->getNumVirtRegs());
1097 
1098   // Loop over all of the basic blocks, eliminating virtual register references
1099   for (MachineFunction::iterator MBBi = Fn.begin(), MBBe = Fn.end();
1100        MBBi != MBBe; ++MBBi) {
1101     MBB = &*MBBi;
1102     AllocateBasicBlock();
1103   }
1104 
1105   // All machine operands and other references to virtual registers have been
1106   // replaced. Remove the virtual registers.
1107   MRI->clearVirtRegs();
1108 
1109   SkippedInstrs.clear();
1110   StackSlotForVirtReg.clear();
1111   LiveDbgValueMap.clear();
1112   return true;
1113 }
1114 
1115 FunctionPass *llvm::createFastRegisterAllocator() {
1116   return new RAFast();
1117 }
1118