1 //===- RegAllocFast.cpp - A fast register allocator for debug code --------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 /// \file This register allocator allocates registers to a basic block at a
10 /// time, attempting to keep values in registers and reusing registers as
11 /// appropriate.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/ADT/ArrayRef.h"
16 #include "llvm/ADT/DenseMap.h"
17 #include "llvm/ADT/IndexedMap.h"
18 #include "llvm/ADT/SmallSet.h"
19 #include "llvm/ADT/SmallVector.h"
20 #include "llvm/ADT/SparseSet.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/CodeGen/MachineBasicBlock.h"
23 #include "llvm/CodeGen/MachineFrameInfo.h"
24 #include "llvm/CodeGen/MachineFunction.h"
25 #include "llvm/CodeGen/MachineFunctionPass.h"
26 #include "llvm/CodeGen/MachineInstr.h"
27 #include "llvm/CodeGen/MachineInstrBuilder.h"
28 #include "llvm/CodeGen/MachineOperand.h"
29 #include "llvm/CodeGen/MachineRegisterInfo.h"
30 #include "llvm/CodeGen/RegAllocRegistry.h"
31 #include "llvm/CodeGen/RegisterClassInfo.h"
32 #include "llvm/CodeGen/TargetInstrInfo.h"
33 #include "llvm/CodeGen/TargetOpcodes.h"
34 #include "llvm/CodeGen/TargetRegisterInfo.h"
35 #include "llvm/CodeGen/TargetSubtargetInfo.h"
36 #include "llvm/IR/DebugLoc.h"
37 #include "llvm/IR/Metadata.h"
38 #include "llvm/InitializePasses.h"
39 #include "llvm/MC/MCInstrDesc.h"
40 #include "llvm/MC/MCRegisterInfo.h"
41 #include "llvm/Pass.h"
42 #include "llvm/Support/Casting.h"
43 #include "llvm/Support/Compiler.h"
44 #include "llvm/Support/Debug.h"
45 #include "llvm/Support/ErrorHandling.h"
46 #include "llvm/Support/raw_ostream.h"
47 #include <cassert>
48 #include <tuple>
49 #include <vector>
50 
51 using namespace llvm;
52 
53 #define DEBUG_TYPE "regalloc"
54 
55 STATISTIC(NumStores, "Number of stores added");
56 STATISTIC(NumLoads , "Number of loads added");
57 STATISTIC(NumCoalesced, "Number of copies coalesced");
58 
59 static RegisterRegAlloc
60   fastRegAlloc("fast", "fast register allocator", createFastRegisterAllocator);
61 
62 namespace {
63 
64   class RegAllocFast : public MachineFunctionPass {
65   public:
66     static char ID;
67 
68     RegAllocFast() : MachineFunctionPass(ID), StackSlotForVirtReg(-1) {}
69 
70   private:
71     MachineFrameInfo *MFI;
72     MachineRegisterInfo *MRI;
73     const TargetRegisterInfo *TRI;
74     const TargetInstrInfo *TII;
75     RegisterClassInfo RegClassInfo;
76 
77     /// Basic block currently being allocated.
78     MachineBasicBlock *MBB;
79 
80     /// Maps virtual regs to the frame index where these values are spilled.
81     IndexedMap<int, VirtReg2IndexFunctor> StackSlotForVirtReg;
82 
83     /// Everything we know about a live virtual register.
84     struct LiveReg {
85       MachineInstr *LastUse = nullptr; ///< Last instr to use reg.
86       Register VirtReg;                ///< Virtual register number.
87       MCPhysReg PhysReg = 0;           ///< Currently held here.
88       unsigned short LastOpNum = 0;    ///< OpNum on LastUse.
89       bool Dirty = false;              ///< Register needs spill.
90 
91       explicit LiveReg(Register VirtReg) : VirtReg(VirtReg) {}
92 
93       unsigned getSparseSetIndex() const {
94         return Register::virtReg2Index(VirtReg);
95       }
96     };
97 
98     using LiveRegMap = SparseSet<LiveReg>;
99     /// This map contains entries for each virtual register that is currently
100     /// available in a physical register.
101     LiveRegMap LiveVirtRegs;
102 
103     DenseMap<unsigned, SmallVector<MachineInstr *, 2>> LiveDbgValueMap;
104 
105     /// Has a bit set for every virtual register for which it was determined
106     /// that it is alive across blocks.
107     BitVector MayLiveAcrossBlocks;
108 
109     /// State of a register unit.
110     enum RegUnitState {
111       /// A free register is not currently in use and can be allocated
112       /// immediately without checking aliases.
113       regFree,
114 
115       /// A reserved register has been assigned explicitly (e.g., setting up a
116       /// call parameter), and it remains reserved until it is used.
117       regReserved
118 
119       /// A register state may also be a virtual register number, indication
120       /// that the physical register is currently allocated to a virtual
121       /// register. In that case, LiveVirtRegs contains the inverse mapping.
122     };
123 
124     /// Maps each physical register to a RegUnitState enum or virtual register.
125     std::vector<unsigned> RegUnitStates;
126 
127     SmallVector<Register, 16> VirtDead;
128     SmallVector<MachineInstr *, 32> Coalesced;
129 
130     using RegUnitSet = SparseSet<uint16_t, identity<uint16_t>>;
131     /// Set of register units that are used in the current instruction, and so
132     /// cannot be allocated.
133     RegUnitSet UsedInInstr;
134 
135     void setPhysRegState(MCPhysReg PhysReg, unsigned NewState);
136     bool isPhysRegFree(MCPhysReg PhysReg) const;
137 
138     /// Mark a physreg as used in this instruction.
139     void markRegUsedInInstr(MCPhysReg PhysReg) {
140       for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
141         UsedInInstr.insert(*Units);
142     }
143 
144     /// Check if a physreg or any of its aliases are used in this instruction.
145     bool isRegUsedInInstr(MCPhysReg PhysReg) const {
146       for (MCRegUnitIterator Units(PhysReg, TRI); Units.isValid(); ++Units)
147         if (UsedInInstr.count(*Units))
148           return true;
149       return false;
150     }
151 
152     enum : unsigned {
153       spillClean = 50,
154       spillDirty = 100,
155       spillPrefBonus = 20,
156       spillImpossible = ~0u
157     };
158 
159   public:
160     StringRef getPassName() const override { return "Fast Register Allocator"; }
161 
162     void getAnalysisUsage(AnalysisUsage &AU) const override {
163       AU.setPreservesCFG();
164       MachineFunctionPass::getAnalysisUsage(AU);
165     }
166 
167     MachineFunctionProperties getRequiredProperties() const override {
168       return MachineFunctionProperties().set(
169           MachineFunctionProperties::Property::NoPHIs);
170     }
171 
172     MachineFunctionProperties getSetProperties() const override {
173       return MachineFunctionProperties().set(
174           MachineFunctionProperties::Property::NoVRegs);
175     }
176 
177   private:
178     bool runOnMachineFunction(MachineFunction &MF) override;
179 
180     void allocateBasicBlock(MachineBasicBlock &MBB);
181     void allocateInstruction(MachineInstr &MI);
182     void handleDebugValue(MachineInstr &MI);
183     void handleThroughOperands(MachineInstr &MI,
184                                SmallVectorImpl<Register> &VirtDead);
185     bool isLastUseOfLocalReg(const MachineOperand &MO) const;
186 
187     void addKillFlag(const LiveReg &LRI);
188     bool verifyRegStateMapping(const LiveReg &LR) const;
189     void killVirtReg(LiveReg &LR);
190     void killVirtReg(Register VirtReg);
191     void spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR);
192     void spillVirtReg(MachineBasicBlock::iterator MI, Register VirtReg);
193 
194     void usePhysReg(MachineOperand &MO);
195     void definePhysReg(MachineBasicBlock::iterator MI, MCPhysReg PhysReg,
196                        unsigned NewState);
197     unsigned calcSpillCost(MCPhysReg PhysReg) const;
198     void assignVirtToPhysReg(LiveReg &, MCPhysReg PhysReg);
199 
200     LiveRegMap::iterator findLiveVirtReg(Register VirtReg) {
201       return LiveVirtRegs.find(Register::virtReg2Index(VirtReg));
202     }
203 
204     LiveRegMap::const_iterator findLiveVirtReg(Register VirtReg) const {
205       return LiveVirtRegs.find(Register::virtReg2Index(VirtReg));
206     }
207 
208     void allocVirtReg(MachineInstr &MI, LiveReg &LR, Register Hint);
209     void allocVirtRegUndef(MachineOperand &MO);
210     MCPhysReg defineVirtReg(MachineInstr &MI, unsigned OpNum, Register VirtReg,
211                             Register Hint);
212     LiveReg &reloadVirtReg(MachineInstr &MI, unsigned OpNum, Register VirtReg,
213                            Register Hint);
214     void spillAll(MachineBasicBlock::iterator MI, bool OnlyLiveOut);
215     bool setPhysReg(MachineInstr &MI, MachineOperand &MO, MCPhysReg PhysReg);
216 
217     Register traceCopies(Register VirtReg) const;
218     Register traceCopyChain(Register Reg) const;
219 
220     int getStackSpaceFor(Register VirtReg);
221     void spill(MachineBasicBlock::iterator Before, Register VirtReg,
222                MCPhysReg AssignedReg, bool Kill);
223     void reload(MachineBasicBlock::iterator Before, Register VirtReg,
224                 MCPhysReg PhysReg);
225 
226     bool mayLiveOut(Register VirtReg);
227     bool mayLiveIn(Register VirtReg);
228 
229     void printRegUnitState(unsigned State) const;
230     void dumpState() const;
231   };
232 
233 } // end anonymous namespace
234 
235 char RegAllocFast::ID = 0;
236 
237 INITIALIZE_PASS(RegAllocFast, "regallocfast", "Fast Register Allocator", false,
238                 false)
239 
240 void RegAllocFast::setPhysRegState(MCPhysReg PhysReg, unsigned NewState) {
241   for (MCRegUnitIterator UI(PhysReg, TRI); UI.isValid(); ++UI)
242     RegUnitStates[*UI] = NewState;
243 }
244 
245 bool RegAllocFast::isPhysRegFree(MCPhysReg PhysReg) const {
246   for (MCRegUnitIterator UI(PhysReg, TRI); UI.isValid(); ++UI) {
247     if (RegUnitStates[*UI] != regFree)
248       return false;
249   }
250   return true;
251 }
252 
253 /// This allocates space for the specified virtual register to be held on the
254 /// stack.
255 int RegAllocFast::getStackSpaceFor(Register VirtReg) {
256   // Find the location Reg would belong...
257   int SS = StackSlotForVirtReg[VirtReg];
258   // Already has space allocated?
259   if (SS != -1)
260     return SS;
261 
262   // Allocate a new stack object for this spill location...
263   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
264   unsigned Size = TRI->getSpillSize(RC);
265   Align Alignment = TRI->getSpillAlign(RC);
266   int FrameIdx = MFI->CreateSpillStackObject(Size, Alignment);
267 
268   // Assign the slot.
269   StackSlotForVirtReg[VirtReg] = FrameIdx;
270   return FrameIdx;
271 }
272 
273 /// Returns false if \p VirtReg is known to not live out of the current block.
274 bool RegAllocFast::mayLiveOut(Register VirtReg) {
275   if (MayLiveAcrossBlocks.test(Register::virtReg2Index(VirtReg))) {
276     // Cannot be live-out if there are no successors.
277     return !MBB->succ_empty();
278   }
279 
280   // If this block loops back to itself, it would be necessary to check whether
281   // the use comes after the def.
282   if (MBB->isSuccessor(MBB)) {
283     MayLiveAcrossBlocks.set(Register::virtReg2Index(VirtReg));
284     return true;
285   }
286 
287   // See if the first \p Limit uses of the register are all in the current
288   // block.
289   static const unsigned Limit = 8;
290   unsigned C = 0;
291   for (const MachineInstr &UseInst : MRI->reg_nodbg_instructions(VirtReg)) {
292     if (UseInst.getParent() != MBB || ++C >= Limit) {
293       MayLiveAcrossBlocks.set(Register::virtReg2Index(VirtReg));
294       // Cannot be live-out if there are no successors.
295       return !MBB->succ_empty();
296     }
297   }
298 
299   return false;
300 }
301 
302 /// Returns false if \p VirtReg is known to not be live into the current block.
303 bool RegAllocFast::mayLiveIn(Register VirtReg) {
304   if (MayLiveAcrossBlocks.test(Register::virtReg2Index(VirtReg)))
305     return !MBB->pred_empty();
306 
307   // See if the first \p Limit def of the register are all in the current block.
308   static const unsigned Limit = 8;
309   unsigned C = 0;
310   for (const MachineInstr &DefInst : MRI->def_instructions(VirtReg)) {
311     if (DefInst.getParent() != MBB || ++C >= Limit) {
312       MayLiveAcrossBlocks.set(Register::virtReg2Index(VirtReg));
313       return !MBB->pred_empty();
314     }
315   }
316 
317   return false;
318 }
319 
320 /// Insert spill instruction for \p AssignedReg before \p Before. Update
321 /// DBG_VALUEs with \p VirtReg operands with the stack slot.
322 void RegAllocFast::spill(MachineBasicBlock::iterator Before, Register VirtReg,
323                          MCPhysReg AssignedReg, bool Kill) {
324   LLVM_DEBUG(dbgs() << "Spilling " << printReg(VirtReg, TRI)
325                     << " in " << printReg(AssignedReg, TRI));
326   int FI = getStackSpaceFor(VirtReg);
327   LLVM_DEBUG(dbgs() << " to stack slot #" << FI << '\n');
328 
329   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
330   TII->storeRegToStackSlot(*MBB, Before, AssignedReg, Kill, FI, &RC, TRI);
331   ++NumStores;
332 
333   // If this register is used by DBG_VALUE then insert new DBG_VALUE to
334   // identify spilled location as the place to find corresponding variable's
335   // value.
336   SmallVectorImpl<MachineInstr *> &LRIDbgValues = LiveDbgValueMap[VirtReg];
337   for (MachineInstr *DBG : LRIDbgValues) {
338     MachineInstr *NewDV = buildDbgValueForSpill(*MBB, Before, *DBG, FI);
339     assert(NewDV->getParent() == MBB && "dangling parent pointer");
340     (void)NewDV;
341     LLVM_DEBUG(dbgs() << "Inserting debug info due to spill:\n" << *NewDV);
342   }
343   // Now this register is spilled there is should not be any DBG_VALUE
344   // pointing to this register because they are all pointing to spilled value
345   // now.
346   LRIDbgValues.clear();
347 }
348 
349 /// Insert reload instruction for \p PhysReg before \p Before.
350 void RegAllocFast::reload(MachineBasicBlock::iterator Before, Register VirtReg,
351                           MCPhysReg PhysReg) {
352   LLVM_DEBUG(dbgs() << "Reloading " << printReg(VirtReg, TRI) << " into "
353                     << printReg(PhysReg, TRI) << '\n');
354   int FI = getStackSpaceFor(VirtReg);
355   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
356   TII->loadRegFromStackSlot(*MBB, Before, PhysReg, FI, &RC, TRI);
357   ++NumLoads;
358 }
359 
360 /// Return true if MO is the only remaining reference to its virtual register,
361 /// and it is guaranteed to be a block-local register.
362 bool RegAllocFast::isLastUseOfLocalReg(const MachineOperand &MO) const {
363   // If the register has ever been spilled or reloaded, we conservatively assume
364   // it is a global register used in multiple blocks.
365   if (StackSlotForVirtReg[MO.getReg()] != -1)
366     return false;
367 
368   // Check that the use/def chain has exactly one operand - MO.
369   MachineRegisterInfo::reg_nodbg_iterator I = MRI->reg_nodbg_begin(MO.getReg());
370   if (&*I != &MO)
371     return false;
372   return ++I == MRI->reg_nodbg_end();
373 }
374 
375 /// Set kill flags on last use of a virtual register.
376 void RegAllocFast::addKillFlag(const LiveReg &LR) {
377   if (!LR.LastUse) return;
378   MachineOperand &MO = LR.LastUse->getOperand(LR.LastOpNum);
379   if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) {
380     if (MO.getReg() == LR.PhysReg)
381       MO.setIsKill();
382     // else, don't do anything we are problably redefining a
383     // subreg of this register and given we don't track which
384     // lanes are actually dead, we cannot insert a kill flag here.
385     // Otherwise we may end up in a situation like this:
386     // ... = (MO) physreg:sub1, implicit killed physreg
387     // ... <== Here we would allow later pass to reuse physreg:sub1
388     //         which is potentially wrong.
389     // LR:sub0 = ...
390     // ... = LR.sub1 <== This is going to use physreg:sub1
391   }
392 }
393 
394 bool RegAllocFast::verifyRegStateMapping(const LiveReg &LR) const {
395   for (MCRegUnitIterator UI(LR.PhysReg, TRI); UI.isValid(); ++UI) {
396     if (RegUnitStates[*UI] != LR.VirtReg)
397       return false;
398   }
399 
400   return true;
401 }
402 
403 /// Mark virtreg as no longer available.
404 void RegAllocFast::killVirtReg(LiveReg &LR) {
405   assert(verifyRegStateMapping(LR) && "Broken RegState mapping");
406   addKillFlag(LR);
407   MCPhysReg PhysReg = LR.PhysReg;
408   setPhysRegState(PhysReg, regFree);
409   LR.PhysReg = 0;
410 }
411 
412 /// Mark virtreg as no longer available.
413 void RegAllocFast::killVirtReg(Register VirtReg) {
414   assert(Register::isVirtualRegister(VirtReg) &&
415          "killVirtReg needs a virtual register");
416   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
417   if (LRI != LiveVirtRegs.end() && LRI->PhysReg)
418     killVirtReg(*LRI);
419 }
420 
421 /// This method spills the value specified by VirtReg into the corresponding
422 /// stack slot if needed.
423 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI,
424                                 Register VirtReg) {
425   assert(Register::isVirtualRegister(VirtReg) &&
426          "Spilling a physical register is illegal!");
427   LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
428   assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
429          "Spilling unmapped virtual register");
430   spillVirtReg(MI, *LRI);
431 }
432 
433 /// Do the actual work of spilling.
434 void RegAllocFast::spillVirtReg(MachineBasicBlock::iterator MI, LiveReg &LR) {
435   assert(verifyRegStateMapping(LR) && "Broken RegState mapping");
436 
437   MCPhysReg PhysReg = LR.PhysReg;
438 
439   if (LR.Dirty) {
440     // If this physreg is used by the instruction, we want to kill it on the
441     // instruction, not on the spill.
442     bool SpillKill = MachineBasicBlock::iterator(LR.LastUse) != MI;
443     LR.Dirty = false;
444 
445     spill(MI, LR.VirtReg, PhysReg, SpillKill);
446 
447     if (SpillKill)
448       LR.LastUse = nullptr; // Don't kill register again
449   }
450   killVirtReg(LR);
451 }
452 
453 /// Spill all dirty virtregs without killing them.
454 void RegAllocFast::spillAll(MachineBasicBlock::iterator MI, bool OnlyLiveOut) {
455   if (LiveVirtRegs.empty())
456     return;
457   // The LiveRegMap is keyed by an unsigned (the virtreg number), so the order
458   // of spilling here is deterministic, if arbitrary.
459   for (LiveReg &LR : LiveVirtRegs) {
460     if (!LR.PhysReg)
461       continue;
462     if (OnlyLiveOut && !mayLiveOut(LR.VirtReg))
463       continue;
464     spillVirtReg(MI, LR);
465   }
466   LiveVirtRegs.clear();
467 }
468 
469 /// Handle the direct use of a physical register.  Check that the register is
470 /// not used by a virtreg. Kill the physreg, marking it free. This may add
471 /// implicit kills to MO->getParent() and invalidate MO.
472 void RegAllocFast::usePhysReg(MachineOperand &MO) {
473   // Ignore undef uses.
474   if (MO.isUndef())
475     return;
476 
477   Register PhysReg = MO.getReg();
478   assert(PhysReg.isPhysical() && "Bad usePhysReg operand");
479 
480   markRegUsedInInstr(PhysReg);
481 
482   for (MCRegUnitIterator UI(PhysReg, TRI); UI.isValid(); ++UI) {
483     switch (RegUnitStates[*UI]) {
484     case regReserved:
485       RegUnitStates[*UI] = regFree;
486       LLVM_FALLTHROUGH;
487     case regFree:
488       break;
489     default:
490       llvm_unreachable("Unexpected reg unit state");
491     }
492   }
493 
494   // All aliases are disabled, bring register into working set.
495   setPhysRegState(PhysReg, regFree);
496   MO.setIsKill();
497 }
498 
499 /// Mark PhysReg as reserved or free after spilling any virtregs. This is very
500 /// similar to defineVirtReg except the physreg is reserved instead of
501 /// allocated.
502 void RegAllocFast::definePhysReg(MachineBasicBlock::iterator MI,
503                                  MCPhysReg PhysReg, unsigned NewState) {
504   for (MCRegUnitIterator UI(PhysReg, TRI); UI.isValid(); ++UI) {
505     switch (unsigned VirtReg = RegUnitStates[*UI]) {
506     default:
507       spillVirtReg(MI, VirtReg);
508       break;
509     case regFree:
510     case regReserved:
511       break;
512     }
513   }
514 
515   markRegUsedInInstr(PhysReg);
516   setPhysRegState(PhysReg, NewState);
517 }
518 
519 /// Return the cost of spilling clearing out PhysReg and aliases so it is free
520 /// for allocation. Returns 0 when PhysReg is free or disabled with all aliases
521 /// disabled - it can be allocated directly.
522 /// \returns spillImpossible when PhysReg or an alias can't be spilled.
523 unsigned RegAllocFast::calcSpillCost(MCPhysReg PhysReg) const {
524   if (isRegUsedInInstr(PhysReg)) {
525     LLVM_DEBUG(dbgs() << printReg(PhysReg, TRI)
526                       << " is already used in instr.\n");
527     return spillImpossible;
528   }
529 
530   for (MCRegUnitIterator UI(PhysReg, TRI); UI.isValid(); ++UI) {
531     switch (unsigned VirtReg = RegUnitStates[*UI]) {
532     case regFree:
533       break;
534     case regReserved:
535       LLVM_DEBUG(dbgs() << printReg(VirtReg, TRI) << " corresponding "
536                         << printReg(PhysReg, TRI) << " is reserved already.\n");
537       return spillImpossible;
538     default: {
539       LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
540       assert(LRI != LiveVirtRegs.end() && LRI->PhysReg &&
541              "Missing VirtReg entry");
542       return LRI->Dirty ? spillDirty : spillClean;
543     }
544     }
545   }
546   return 0;
547 }
548 
549 /// This method updates local state so that we know that PhysReg is the
550 /// proper container for VirtReg now.  The physical register must not be used
551 /// for anything else when this is called.
552 void RegAllocFast::assignVirtToPhysReg(LiveReg &LR, MCPhysReg PhysReg) {
553   Register VirtReg = LR.VirtReg;
554   LLVM_DEBUG(dbgs() << "Assigning " << printReg(VirtReg, TRI) << " to "
555                     << printReg(PhysReg, TRI) << '\n');
556   assert(LR.PhysReg == 0 && "Already assigned a physreg");
557   assert(PhysReg != 0 && "Trying to assign no register");
558   LR.PhysReg = PhysReg;
559   setPhysRegState(PhysReg, VirtReg);
560 }
561 
562 static bool isCoalescable(const MachineInstr &MI) {
563   return MI.isFullCopy();
564 }
565 
566 Register RegAllocFast::traceCopyChain(Register Reg) const {
567   static const unsigned ChainLengthLimit = 3;
568   unsigned C = 0;
569   do {
570     if (Reg.isPhysical())
571       return Reg;
572     assert(Reg.isVirtual());
573 
574     MachineInstr *VRegDef = MRI->getUniqueVRegDef(Reg);
575     if (!VRegDef || !isCoalescable(*VRegDef))
576       return 0;
577     Reg = VRegDef->getOperand(1).getReg();
578   } while (++C <= ChainLengthLimit);
579   return 0;
580 }
581 
582 /// Check if any of \p VirtReg's definitions is a copy. If it is follow the
583 /// chain of copies to check whether we reach a physical register we can
584 /// coalesce with.
585 Register RegAllocFast::traceCopies(Register VirtReg) const {
586   static const unsigned DefLimit = 3;
587   unsigned C = 0;
588   for (const MachineInstr &MI : MRI->def_instructions(VirtReg)) {
589     if (isCoalescable(MI)) {
590       Register Reg = MI.getOperand(1).getReg();
591       Reg = traceCopyChain(Reg);
592       if (Reg.isValid())
593         return Reg;
594     }
595 
596     if (++C >= DefLimit)
597       break;
598   }
599   return Register();
600 }
601 
602 /// Allocates a physical register for VirtReg.
603 void RegAllocFast::allocVirtReg(MachineInstr &MI, LiveReg &LR, Register Hint0) {
604   const Register VirtReg = LR.VirtReg;
605 
606   assert(Register::isVirtualRegister(VirtReg) &&
607          "Can only allocate virtual registers");
608 
609   const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
610   LLVM_DEBUG(dbgs() << "Search register for " << printReg(VirtReg)
611                     << " in class " << TRI->getRegClassName(&RC)
612                     << " with hint " << printReg(Hint0, TRI) << '\n');
613 
614   // Take hint when possible.
615   if (Hint0.isPhysical() && MRI->isAllocatable(Hint0) &&
616       RC.contains(Hint0)) {
617     // Ignore the hint if we would have to spill a dirty register.
618     unsigned Cost = calcSpillCost(Hint0);
619     if (Cost < spillDirty) {
620       LLVM_DEBUG(dbgs() << "\tPreferred Register 1: " << printReg(Hint0, TRI)
621                         << '\n');
622       if (Cost)
623         definePhysReg(MI, Hint0, regFree);
624       assignVirtToPhysReg(LR, Hint0);
625       return;
626     } else {
627       LLVM_DEBUG(dbgs() << "\tPreferred Register 1: " << printReg(Hint0, TRI)
628                         << "occupied\n");
629     }
630   } else {
631     Hint0 = Register();
632   }
633 
634   // Try other hint.
635   Register Hint1 = traceCopies(VirtReg);
636   if (Hint1.isPhysical() && MRI->isAllocatable(Hint1) &&
637       RC.contains(Hint1) && !isRegUsedInInstr(Hint1)) {
638     // Ignore the hint if we would have to spill a dirty register.
639     unsigned Cost = calcSpillCost(Hint1);
640     if (Cost < spillDirty) {
641       LLVM_DEBUG(dbgs() << "\tPreferred Register 0: " << printReg(Hint1, TRI)
642                         << '\n');
643       if (Cost)
644         definePhysReg(MI, Hint1, regFree);
645       assignVirtToPhysReg(LR, Hint1);
646       return;
647     } else {
648       LLVM_DEBUG(dbgs() << "\tPreferred Register 0: " << printReg(Hint1, TRI)
649                         << "occupied\n");
650     }
651   } else {
652     Hint1 = Register();
653   }
654 
655   MCPhysReg BestReg = 0;
656   unsigned BestCost = spillImpossible;
657   ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
658   for (MCPhysReg PhysReg : AllocationOrder) {
659     LLVM_DEBUG(dbgs() << "\tRegister: " << printReg(PhysReg, TRI) << ' ');
660     unsigned Cost = calcSpillCost(PhysReg);
661     LLVM_DEBUG(dbgs() << "Cost: " << Cost << " BestCost: " << BestCost << '\n');
662     // Immediate take a register with cost 0.
663     if (Cost == 0) {
664       assignVirtToPhysReg(LR, PhysReg);
665       return;
666     }
667 
668     if (PhysReg == Hint1 || PhysReg == Hint0)
669       Cost -= spillPrefBonus;
670 
671     if (Cost < BestCost) {
672       BestReg = PhysReg;
673       BestCost = Cost;
674     }
675   }
676 
677   if (!BestReg) {
678     // Nothing we can do: Report an error and keep going with an invalid
679     // allocation.
680     if (MI.isInlineAsm())
681       MI.emitError("inline assembly requires more registers than available");
682     else
683       MI.emitError("ran out of registers during register allocation");
684     definePhysReg(MI, *AllocationOrder.begin(), regFree);
685     assignVirtToPhysReg(LR, *AllocationOrder.begin());
686     return;
687   }
688 
689   definePhysReg(MI, BestReg, regFree);
690   assignVirtToPhysReg(LR, BestReg);
691 }
692 
693 void RegAllocFast::allocVirtRegUndef(MachineOperand &MO) {
694   assert(MO.isUndef() && "expected undef use");
695   Register VirtReg = MO.getReg();
696   assert(Register::isVirtualRegister(VirtReg) && "Expected virtreg");
697 
698   LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
699   MCPhysReg PhysReg;
700   if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
701     PhysReg = LRI->PhysReg;
702   } else {
703     const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
704     ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
705     assert(!AllocationOrder.empty() && "Allocation order must not be empty");
706     PhysReg = AllocationOrder[0];
707   }
708 
709   unsigned SubRegIdx = MO.getSubReg();
710   if (SubRegIdx != 0) {
711     PhysReg = TRI->getSubReg(PhysReg, SubRegIdx);
712     MO.setSubReg(0);
713   }
714   MO.setReg(PhysReg);
715   MO.setIsRenamable(true);
716 }
717 
718 /// Allocates a register for VirtReg and mark it as dirty.
719 MCPhysReg RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum,
720                                       Register VirtReg, Register Hint) {
721   assert(Register::isVirtualRegister(VirtReg) && "Not a virtual register");
722   LiveRegMap::iterator LRI;
723   bool New;
724   std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
725   if (!LRI->PhysReg) {
726     // If there is no hint, peek at the only use of this register.
727     if ((!Hint || !Hint.isPhysical()) &&
728         MRI->hasOneNonDBGUse(VirtReg)) {
729       const MachineInstr &UseMI = *MRI->use_instr_nodbg_begin(VirtReg);
730       // It's a copy, use the destination register as a hint.
731       if (UseMI.isCopyLike())
732         Hint = UseMI.getOperand(0).getReg();
733     }
734     allocVirtReg(MI, *LRI, Hint);
735   } else if (LRI->LastUse) {
736     // Redefining a live register - kill at the last use, unless it is this
737     // instruction defining VirtReg multiple times.
738     if (LRI->LastUse != &MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse())
739       addKillFlag(*LRI);
740   }
741   assert(LRI->PhysReg && "Register not assigned");
742   LRI->LastUse = &MI;
743   LRI->LastOpNum = OpNum;
744   LRI->Dirty = true;
745   markRegUsedInInstr(LRI->PhysReg);
746   return LRI->PhysReg;
747 }
748 
749 /// Make sure VirtReg is available in a physreg and return it.
750 RegAllocFast::LiveReg &RegAllocFast::reloadVirtReg(MachineInstr &MI,
751                                                    unsigned OpNum,
752                                                    Register VirtReg,
753                                                    Register Hint) {
754   assert(Register::isVirtualRegister(VirtReg) && "Not a virtual register");
755   LiveRegMap::iterator LRI;
756   bool New;
757   std::tie(LRI, New) = LiveVirtRegs.insert(LiveReg(VirtReg));
758   MachineOperand &MO = MI.getOperand(OpNum);
759   if (!LRI->PhysReg) {
760     allocVirtReg(MI, *LRI, Hint);
761     reload(MI, VirtReg, LRI->PhysReg);
762   } else if (LRI->Dirty) {
763     if (isLastUseOfLocalReg(MO)) {
764       LLVM_DEBUG(dbgs() << "Killing last use: " << MO << '\n');
765       if (MO.isUse())
766         MO.setIsKill();
767       else
768         MO.setIsDead();
769     } else if (MO.isKill()) {
770       LLVM_DEBUG(dbgs() << "Clearing dubious kill: " << MO << '\n');
771       MO.setIsKill(false);
772     } else if (MO.isDead()) {
773       LLVM_DEBUG(dbgs() << "Clearing dubious dead: " << MO << '\n');
774       MO.setIsDead(false);
775     }
776   } else if (MO.isKill()) {
777     // We must remove kill flags from uses of reloaded registers because the
778     // register would be killed immediately, and there might be a second use:
779     //   %foo = OR killed %x, %x
780     // This would cause a second reload of %x into a different register.
781     LLVM_DEBUG(dbgs() << "Clearing clean kill: " << MO << '\n');
782     MO.setIsKill(false);
783   } else if (MO.isDead()) {
784     LLVM_DEBUG(dbgs() << "Clearing clean dead: " << MO << '\n');
785     MO.setIsDead(false);
786   }
787   assert(LRI->PhysReg && "Register not assigned");
788   LRI->LastUse = &MI;
789   LRI->LastOpNum = OpNum;
790   markRegUsedInInstr(LRI->PhysReg);
791   return *LRI;
792 }
793 
794 /// Changes operand OpNum in MI the refer the PhysReg, considering subregs. This
795 /// may invalidate any operand pointers.  Return true if the operand kills its
796 /// register.
797 bool RegAllocFast::setPhysReg(MachineInstr &MI, MachineOperand &MO,
798                               MCPhysReg PhysReg) {
799   bool Dead = MO.isDead();
800   if (!MO.getSubReg()) {
801     MO.setReg(PhysReg);
802     MO.setIsRenamable(true);
803     return MO.isKill() || Dead;
804   }
805 
806   // Handle subregister index.
807   MO.setReg(PhysReg ? TRI->getSubReg(PhysReg, MO.getSubReg()) : Register());
808   MO.setIsRenamable(true);
809   MO.setSubReg(0);
810 
811   // A kill flag implies killing the full register. Add corresponding super
812   // register kill.
813   if (MO.isKill()) {
814     MI.addRegisterKilled(PhysReg, TRI, true);
815     return true;
816   }
817 
818   // A <def,read-undef> of a sub-register requires an implicit def of the full
819   // register.
820   if (MO.isDef() && MO.isUndef())
821     MI.addRegisterDefined(PhysReg, TRI);
822 
823   return Dead;
824 }
825 
826 // Handles special instruction operand like early clobbers and tied ops when
827 // there are additional physreg defines.
828 void RegAllocFast::handleThroughOperands(MachineInstr &MI,
829                                          SmallVectorImpl<Register> &VirtDead) {
830   LLVM_DEBUG(dbgs() << "Scanning for through registers:");
831   SmallSet<Register, 8> ThroughRegs;
832   for (const MachineOperand &MO : MI.operands()) {
833     if (!MO.isReg()) continue;
834     Register Reg = MO.getReg();
835     if (!Reg.isVirtual())
836       continue;
837     if (MO.isEarlyClobber() || (MO.isUse() && MO.isTied()) ||
838         (MO.getSubReg() && MI.readsVirtualRegister(Reg))) {
839       if (ThroughRegs.insert(Reg).second)
840         LLVM_DEBUG(dbgs() << ' ' << printReg(Reg));
841     }
842   }
843 
844   // If any physreg defines collide with preallocated through registers,
845   // we must spill and reallocate.
846   LLVM_DEBUG(dbgs() << "\nChecking for physdef collisions.\n");
847   for (const MachineOperand &MO : MI.operands()) {
848     if (!MO.isReg() || !MO.isDef()) continue;
849     Register Reg = MO.getReg();
850     if (!Reg || !Reg.isPhysical())
851       continue;
852     markRegUsedInInstr(Reg);
853 
854     for (MCRegUnitIterator UI(Reg, TRI); UI.isValid(); ++UI) {
855       if (!ThroughRegs.count(RegUnitStates[*UI]))
856         continue;
857 
858       // Need to spill any aliasing registers.
859       for (MCRegUnitRootIterator RI(*UI, TRI); RI.isValid(); ++RI) {
860         for (MCSuperRegIterator SI(*RI, TRI, true); SI.isValid(); ++SI) {
861           definePhysReg(MI, *SI, regFree);
862         }
863       }
864     }
865   }
866 
867   SmallVector<Register, 8> PartialDefs;
868   LLVM_DEBUG(dbgs() << "Allocating tied uses.\n");
869   for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
870     MachineOperand &MO = MI.getOperand(I);
871     if (!MO.isReg()) continue;
872     Register Reg = MO.getReg();
873     if (!Register::isVirtualRegister(Reg))
874       continue;
875     if (MO.isUse()) {
876       if (!MO.isTied()) continue;
877       LLVM_DEBUG(dbgs() << "Operand " << I << "(" << MO
878                         << ") is tied to operand " << MI.findTiedOperandIdx(I)
879                         << ".\n");
880       LiveReg &LR = reloadVirtReg(MI, I, Reg, 0);
881       MCPhysReg PhysReg = LR.PhysReg;
882       setPhysReg(MI, MO, PhysReg);
883       // Note: we don't update the def operand yet. That would cause the normal
884       // def-scan to attempt spilling.
885     } else if (MO.getSubReg() && MI.readsVirtualRegister(Reg)) {
886       LLVM_DEBUG(dbgs() << "Partial redefine: " << MO << '\n');
887       // Reload the register, but don't assign to the operand just yet.
888       // That would confuse the later phys-def processing pass.
889       LiveReg &LR = reloadVirtReg(MI, I, Reg, 0);
890       PartialDefs.push_back(LR.PhysReg);
891     }
892   }
893 
894   LLVM_DEBUG(dbgs() << "Allocating early clobbers.\n");
895   for (unsigned I = 0, E = MI.getNumOperands(); I != E; ++I) {
896     const MachineOperand &MO = MI.getOperand(I);
897     if (!MO.isReg()) continue;
898     Register Reg = MO.getReg();
899     if (!Register::isVirtualRegister(Reg))
900       continue;
901     if (!MO.isEarlyClobber())
902       continue;
903     // Note: defineVirtReg may invalidate MO.
904     MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, 0);
905     if (setPhysReg(MI, MI.getOperand(I), PhysReg))
906       VirtDead.push_back(Reg);
907   }
908 
909   // Restore UsedInInstr to a state usable for allocating normal virtual uses.
910   UsedInInstr.clear();
911   for (const MachineOperand &MO : MI.operands()) {
912     if (!MO.isReg() || (MO.isDef() && !MO.isEarlyClobber())) continue;
913     Register Reg = MO.getReg();
914     if (!Reg || !Reg.isPhysical())
915       continue;
916     LLVM_DEBUG(dbgs() << "\tSetting " << printReg(Reg, TRI)
917                       << " as used in instr\n");
918     markRegUsedInInstr(Reg);
919   }
920 
921   // Also mark PartialDefs as used to avoid reallocation.
922   for (Register PartialDef : PartialDefs)
923     markRegUsedInInstr(PartialDef);
924 }
925 
926 #ifndef NDEBUG
927 
928 void RegAllocFast::dumpState() const {
929   for (unsigned Unit = 1, UnitE = TRI->getNumRegUnits(); Unit != UnitE;
930        ++Unit) {
931     switch (unsigned VirtReg = RegUnitStates[Unit]) {
932     case regFree:
933       break;
934     case regReserved:
935       dbgs() << " " << printRegUnit(Unit, TRI) << "[P]";
936       break;
937     default: {
938       dbgs() << ' ' << printRegUnit(Unit, TRI) << '=' << printReg(VirtReg);
939       LiveRegMap::const_iterator I = findLiveVirtReg(VirtReg);
940       assert(I != LiveVirtRegs.end() && "have LiveVirtRegs entry");
941       if (I->Dirty)
942         dbgs() << "[D]";
943       assert(TRI->hasRegUnit(I->PhysReg, Unit) && "inverse mapping present");
944       break;
945     }
946     }
947   }
948   dbgs() << '\n';
949   // Check that LiveVirtRegs is the inverse.
950   for (const LiveReg &LR : LiveVirtRegs) {
951     Register VirtReg = LR.VirtReg;
952     assert(VirtReg.isVirtual() && "Bad map key");
953     MCPhysReg PhysReg = LR.PhysReg;
954     if (PhysReg != 0) {
955       assert(Register::isPhysicalRegister(PhysReg) &&
956              "mapped to physreg");
957       for (MCRegUnitIterator UI(PhysReg, TRI); UI.isValid(); ++UI) {
958         assert(RegUnitStates[*UI] == VirtReg && "inverse map valid");
959       }
960     }
961   }
962 }
963 #endif
964 
965 void RegAllocFast::allocateInstruction(MachineInstr &MI) {
966   const MCInstrDesc &MCID = MI.getDesc();
967 
968   // If this is a copy, we may be able to coalesce.
969   Register CopySrcReg;
970   Register CopyDstReg;
971   unsigned CopySrcSub = 0;
972   unsigned CopyDstSub = 0;
973   if (MI.isCopy()) {
974     CopyDstReg = MI.getOperand(0).getReg();
975     CopySrcReg = MI.getOperand(1).getReg();
976     CopyDstSub = MI.getOperand(0).getSubReg();
977     CopySrcSub = MI.getOperand(1).getSubReg();
978   }
979 
980   // Track registers used by instruction.
981   UsedInInstr.clear();
982 
983   // First scan.
984   // Mark physreg uses and early clobbers as used.
985   // Find the end of the virtreg operands
986   unsigned VirtOpEnd = 0;
987   bool hasTiedOps = false;
988   bool hasEarlyClobbers = false;
989   bool hasPartialRedefs = false;
990   bool hasPhysDefs = false;
991   for (unsigned i = 0, e = MI.getNumOperands(); i != e; ++i) {
992     MachineOperand &MO = MI.getOperand(i);
993     // Make sure MRI knows about registers clobbered by regmasks.
994     if (MO.isRegMask()) {
995       MRI->addPhysRegsUsedFromRegMask(MO.getRegMask());
996       continue;
997     }
998     if (!MO.isReg()) continue;
999     Register Reg = MO.getReg();
1000     if (!Reg) continue;
1001     if (Register::isVirtualRegister(Reg)) {
1002       VirtOpEnd = i+1;
1003       if (MO.isUse()) {
1004         hasTiedOps = hasTiedOps ||
1005                             MCID.getOperandConstraint(i, MCOI::TIED_TO) != -1;
1006       } else {
1007         if (MO.isEarlyClobber())
1008           hasEarlyClobbers = true;
1009         if (MO.getSubReg() && MI.readsVirtualRegister(Reg))
1010           hasPartialRedefs = true;
1011       }
1012       continue;
1013     }
1014     if (!MRI->isAllocatable(Reg)) continue;
1015     if (MO.isUse()) {
1016       usePhysReg(MO);
1017     } else if (MO.isEarlyClobber()) {
1018       definePhysReg(MI, Reg,
1019                     (MO.isImplicit() || MO.isDead()) ? regFree : regReserved);
1020       hasEarlyClobbers = true;
1021     } else
1022       hasPhysDefs = true;
1023   }
1024 
1025   // The instruction may have virtual register operands that must be allocated
1026   // the same register at use-time and def-time: early clobbers and tied
1027   // operands. If there are also physical defs, these registers must avoid
1028   // both physical defs and uses, making them more constrained than normal
1029   // operands.
1030   // Similarly, if there are multiple defs and tied operands, we must make
1031   // sure the same register is allocated to uses and defs.
1032   // We didn't detect inline asm tied operands above, so just make this extra
1033   // pass for all inline asm.
1034   if (MI.isInlineAsm() || hasEarlyClobbers || hasPartialRedefs ||
1035       (hasTiedOps && (hasPhysDefs || MCID.getNumDefs() > 1))) {
1036     handleThroughOperands(MI, VirtDead);
1037     // Don't attempt coalescing when we have funny stuff going on.
1038     CopyDstReg = Register();
1039     // Pretend we have early clobbers so the use operands get marked below.
1040     // This is not necessary for the common case of a single tied use.
1041     hasEarlyClobbers = true;
1042   }
1043 
1044   // Second scan.
1045   // Allocate virtreg uses.
1046   bool HasUndefUse = false;
1047   for (unsigned I = 0; I != VirtOpEnd; ++I) {
1048     MachineOperand &MO = MI.getOperand(I);
1049     if (!MO.isReg()) continue;
1050     Register Reg = MO.getReg();
1051     if (!Reg.isVirtual())
1052       continue;
1053     if (MO.isUse()) {
1054       if (MO.isUndef()) {
1055         HasUndefUse = true;
1056         // There is no need to allocate a register for an undef use.
1057         continue;
1058       }
1059 
1060       // Populate MayLiveAcrossBlocks in case the use block is allocated before
1061       // the def block (removing the vreg uses).
1062       mayLiveIn(Reg);
1063 
1064       LiveReg &LR = reloadVirtReg(MI, I, Reg, CopyDstReg);
1065       MCPhysReg PhysReg = LR.PhysReg;
1066       CopySrcReg = (CopySrcReg == Reg || CopySrcReg == PhysReg) ? PhysReg : 0;
1067       if (setPhysReg(MI, MO, PhysReg))
1068         killVirtReg(LR);
1069     }
1070   }
1071 
1072   // Allocate undef operands. This is a separate step because in a situation
1073   // like  ` = OP undef %X, %X`    both operands need the same register assign
1074   // so we should perform the normal assignment first.
1075   if (HasUndefUse) {
1076     for (MachineOperand &MO : MI.uses()) {
1077       if (!MO.isReg() || !MO.isUse())
1078         continue;
1079       Register Reg = MO.getReg();
1080       if (!Reg.isVirtual())
1081         continue;
1082 
1083       assert(MO.isUndef() && "Should only have undef virtreg uses left");
1084       allocVirtRegUndef(MO);
1085     }
1086   }
1087 
1088   // Track registers defined by instruction - early clobbers and tied uses at
1089   // this point.
1090   UsedInInstr.clear();
1091   if (hasEarlyClobbers) {
1092     for (const MachineOperand &MO : MI.operands()) {
1093       if (!MO.isReg()) continue;
1094       Register Reg = MO.getReg();
1095       if (!Reg || !Reg.isPhysical())
1096         continue;
1097       // Look for physreg defs and tied uses.
1098       if (!MO.isDef() && !MO.isTied()) continue;
1099       markRegUsedInInstr(Reg);
1100     }
1101   }
1102 
1103   unsigned DefOpEnd = MI.getNumOperands();
1104   if (MI.isCall()) {
1105     // Spill all virtregs before a call. This serves one purpose: If an
1106     // exception is thrown, the landing pad is going to expect to find
1107     // registers in their spill slots.
1108     // Note: although this is appealing to just consider all definitions
1109     // as call-clobbered, this is not correct because some of those
1110     // definitions may be used later on and we do not want to reuse
1111     // those for virtual registers in between.
1112     LLVM_DEBUG(dbgs() << "  Spilling remaining registers before call.\n");
1113     spillAll(MI, /*OnlyLiveOut*/ false);
1114   }
1115 
1116   // Third scan.
1117   // Mark all physreg defs as used before allocating virtreg defs.
1118   for (unsigned I = 0; I != DefOpEnd; ++I) {
1119     const MachineOperand &MO = MI.getOperand(I);
1120     if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber())
1121       continue;
1122     Register Reg = MO.getReg();
1123 
1124     if (!Reg || !Reg.isPhysical() || !MRI->isAllocatable(Reg))
1125       continue;
1126     definePhysReg(MI, Reg, MO.isDead() ? regFree : regReserved);
1127   }
1128 
1129   // Fourth scan.
1130   // Allocate defs and collect dead defs.
1131   for (unsigned I = 0; I != DefOpEnd; ++I) {
1132     const MachineOperand &MO = MI.getOperand(I);
1133     if (!MO.isReg() || !MO.isDef() || !MO.getReg() || MO.isEarlyClobber())
1134       continue;
1135     Register Reg = MO.getReg();
1136 
1137     // We have already dealt with phys regs in the previous scan.
1138     if (Reg.isPhysical())
1139       continue;
1140     MCPhysReg PhysReg = defineVirtReg(MI, I, Reg, CopySrcReg);
1141     if (setPhysReg(MI, MI.getOperand(I), PhysReg)) {
1142       VirtDead.push_back(Reg);
1143       CopyDstReg = Register(); // cancel coalescing;
1144     } else
1145       CopyDstReg = (CopyDstReg == Reg || CopyDstReg == PhysReg) ? PhysReg : 0;
1146   }
1147 
1148   // Kill dead defs after the scan to ensure that multiple defs of the same
1149   // register are allocated identically. We didn't need to do this for uses
1150   // because we are crerating our own kill flags, and they are always at the
1151   // last use.
1152   for (Register VirtReg : VirtDead)
1153     killVirtReg(VirtReg);
1154   VirtDead.clear();
1155 
1156   LLVM_DEBUG(dbgs() << "<< " << MI);
1157   if (CopyDstReg && CopyDstReg == CopySrcReg && CopyDstSub == CopySrcSub) {
1158     LLVM_DEBUG(dbgs() << "Mark identity copy for removal\n");
1159     Coalesced.push_back(&MI);
1160   }
1161 }
1162 
1163 void RegAllocFast::handleDebugValue(MachineInstr &MI) {
1164   MachineOperand &MO = MI.getOperand(0);
1165 
1166   // Ignore DBG_VALUEs that aren't based on virtual registers. These are
1167   // mostly constants and frame indices.
1168   if (!MO.isReg())
1169     return;
1170   Register Reg = MO.getReg();
1171   if (!Register::isVirtualRegister(Reg))
1172     return;
1173 
1174   // See if this virtual register has already been allocated to a physical
1175   // register or spilled to a stack slot.
1176   LiveRegMap::iterator LRI = findLiveVirtReg(Reg);
1177   if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
1178     setPhysReg(MI, MO, LRI->PhysReg);
1179   } else {
1180     int SS = StackSlotForVirtReg[Reg];
1181     if (SS != -1) {
1182       // Modify DBG_VALUE now that the value is in a spill slot.
1183       updateDbgValueForSpill(MI, SS);
1184       LLVM_DEBUG(dbgs() << "Modifying debug info due to spill:" << "\t" << MI);
1185       return;
1186     }
1187 
1188     // We can't allocate a physreg for a DebugValue, sorry!
1189     LLVM_DEBUG(dbgs() << "Unable to allocate vreg used by DBG_VALUE");
1190     MO.setReg(Register());
1191   }
1192 
1193   // If Reg hasn't been spilled, put this DBG_VALUE in LiveDbgValueMap so
1194   // that future spills of Reg will have DBG_VALUEs.
1195   LiveDbgValueMap[Reg].push_back(&MI);
1196 }
1197 
1198 void RegAllocFast::allocateBasicBlock(MachineBasicBlock &MBB) {
1199   this->MBB = &MBB;
1200   LLVM_DEBUG(dbgs() << "\nAllocating " << MBB);
1201 
1202   RegUnitStates.assign(TRI->getNumRegUnits(), regFree);
1203   assert(LiveVirtRegs.empty() && "Mapping not cleared from last block?");
1204 
1205   MachineBasicBlock::iterator MII = MBB.begin();
1206 
1207   // Add live-in registers as live.
1208   for (const MachineBasicBlock::RegisterMaskPair &LI : MBB.liveins())
1209     if (MRI->isAllocatable(LI.PhysReg))
1210       definePhysReg(MII, LI.PhysReg, regReserved);
1211 
1212   VirtDead.clear();
1213   Coalesced.clear();
1214 
1215   // Otherwise, sequentially allocate each instruction in the MBB.
1216   for (MachineInstr &MI : MBB) {
1217     LLVM_DEBUG(
1218       dbgs() << "\n>> " << MI << "Regs:";
1219       dumpState()
1220     );
1221 
1222     // Special handling for debug values. Note that they are not allowed to
1223     // affect codegen of the other instructions in any way.
1224     if (MI.isDebugValue()) {
1225       handleDebugValue(MI);
1226       continue;
1227     }
1228 
1229     allocateInstruction(MI);
1230   }
1231 
1232   // Spill all physical registers holding virtual registers now.
1233   LLVM_DEBUG(dbgs() << "Spilling live registers at end of block.\n");
1234   spillAll(MBB.getFirstTerminator(), /*OnlyLiveOut*/ true);
1235 
1236   // Erase all the coalesced copies. We are delaying it until now because
1237   // LiveVirtRegs might refer to the instrs.
1238   for (MachineInstr *MI : Coalesced)
1239     MBB.erase(MI);
1240   NumCoalesced += Coalesced.size();
1241 
1242   LLVM_DEBUG(MBB.dump());
1243 }
1244 
1245 bool RegAllocFast::runOnMachineFunction(MachineFunction &MF) {
1246   LLVM_DEBUG(dbgs() << "********** FAST REGISTER ALLOCATION **********\n"
1247                     << "********** Function: " << MF.getName() << '\n');
1248   MRI = &MF.getRegInfo();
1249   const TargetSubtargetInfo &STI = MF.getSubtarget();
1250   TRI = STI.getRegisterInfo();
1251   TII = STI.getInstrInfo();
1252   MFI = &MF.getFrameInfo();
1253   MRI->freezeReservedRegs(MF);
1254   RegClassInfo.runOnMachineFunction(MF);
1255   UsedInInstr.clear();
1256   UsedInInstr.setUniverse(TRI->getNumRegUnits());
1257 
1258   // initialize the virtual->physical register map to have a 'null'
1259   // mapping for all virtual registers
1260   unsigned NumVirtRegs = MRI->getNumVirtRegs();
1261   StackSlotForVirtReg.resize(NumVirtRegs);
1262   LiveVirtRegs.setUniverse(NumVirtRegs);
1263   MayLiveAcrossBlocks.clear();
1264   MayLiveAcrossBlocks.resize(NumVirtRegs);
1265 
1266   // Loop over all of the basic blocks, eliminating virtual register references
1267   for (MachineBasicBlock &MBB : MF)
1268     allocateBasicBlock(MBB);
1269 
1270   // All machine operands and other references to virtual registers have been
1271   // replaced. Remove the virtual registers.
1272   MRI->clearVirtRegs();
1273 
1274   StackSlotForVirtReg.clear();
1275   LiveDbgValueMap.clear();
1276   return true;
1277 }
1278 
1279 FunctionPass *llvm::createFastRegisterAllocator() {
1280   return new RegAllocFast();
1281 }
1282