1 //===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 /// \file
11 /// This file implements the machine register scavenger. It can provide
12 /// information, such as unused registers, at any point in a machine basic
13 /// block. It also provides a mechanism to make registers available by evicting
14 /// them to spill slots.
15 //
16 //===----------------------------------------------------------------------===//
17 
18 #include "llvm/CodeGen/RegisterScavenging.h"
19 
20 #include "llvm/ADT/BitVector.h"
21 #include "llvm/ADT/SmallVector.h"
22 #include "llvm/ADT/Statistic.h"
23 #include "llvm/CodeGen/MachineBasicBlock.h"
24 #include "llvm/CodeGen/MachineFrameInfo.h"
25 #include "llvm/CodeGen/MachineFunction.h"
26 #include "llvm/CodeGen/MachineInstr.h"
27 #include "llvm/CodeGen/MachineOperand.h"
28 #include "llvm/CodeGen/MachineRegisterInfo.h"
29 #include "llvm/MC/MCRegisterInfo.h"
30 #include "llvm/PassSupport.h"
31 #include "llvm/Support/Debug.h"
32 #include "llvm/Support/ErrorHandling.h"
33 #include "llvm/Support/raw_ostream.h"
34 #include "llvm/Target/TargetFrameLowering.h"
35 #include "llvm/Target/TargetInstrInfo.h"
36 #include "llvm/Target/TargetRegisterInfo.h"
37 #include "llvm/Target/TargetSubtargetInfo.h"
38 #include <algorithm>
39 #include <cassert>
40 #include <iterator>
41 #include <limits>
42 #include <string>
43 
44 using namespace llvm;
45 
46 #define DEBUG_TYPE "reg-scavenging"
47 
48 STATISTIC(NumScavengedRegs, "Number of frame index regs scavenged");
49 
50 void RegScavenger::setRegUsed(unsigned Reg, LaneBitmask LaneMask) {
51   LiveUnits.addRegMasked(Reg, LaneMask);
52 }
53 
54 void RegScavenger::init(MachineBasicBlock &MBB) {
55   MachineFunction &MF = *MBB.getParent();
56   TII = MF.getSubtarget().getInstrInfo();
57   TRI = MF.getSubtarget().getRegisterInfo();
58   MRI = &MF.getRegInfo();
59   LiveUnits.init(*TRI);
60 
61   assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) &&
62          "Target changed?");
63 
64   // Self-initialize.
65   if (!this->MBB) {
66     NumRegUnits = TRI->getNumRegUnits();
67     KillRegUnits.resize(NumRegUnits);
68     DefRegUnits.resize(NumRegUnits);
69     TmpRegUnits.resize(NumRegUnits);
70   }
71   this->MBB = &MBB;
72 
73   for (ScavengedInfo &SI : Scavenged) {
74     SI.Reg = 0;
75     SI.Restore = nullptr;
76   }
77 
78   Tracking = false;
79 }
80 
81 void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) {
82   init(MBB);
83   LiveUnits.addLiveIns(MBB);
84 }
85 
86 void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) {
87   init(MBB);
88   LiveUnits.addLiveOuts(MBB);
89 
90   // Move internal iterator at the last instruction of the block.
91   if (MBB.begin() != MBB.end()) {
92     MBBI = std::prev(MBB.end());
93     Tracking = true;
94   }
95 }
96 
97 void RegScavenger::addRegUnits(BitVector &BV, unsigned Reg) {
98   for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
99     BV.set(*RUI);
100 }
101 
102 void RegScavenger::removeRegUnits(BitVector &BV, unsigned Reg) {
103   for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
104     BV.reset(*RUI);
105 }
106 
107 void RegScavenger::determineKillsAndDefs() {
108   assert(Tracking && "Must be tracking to determine kills and defs");
109 
110   MachineInstr &MI = *MBBI;
111   assert(!MI.isDebugValue() && "Debug values have no kills or defs");
112 
113   // Find out which registers are early clobbered, killed, defined, and marked
114   // def-dead in this instruction.
115   KillRegUnits.reset();
116   DefRegUnits.reset();
117   for (const MachineOperand &MO : MI.operands()) {
118     if (MO.isRegMask()) {
119       TmpRegUnits.clear();
120       for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; ++RU) {
121         for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); ++RURI) {
122           if (MO.clobbersPhysReg(*RURI)) {
123             TmpRegUnits.set(RU);
124             break;
125           }
126         }
127       }
128 
129       // Apply the mask.
130       KillRegUnits |= TmpRegUnits;
131     }
132     if (!MO.isReg())
133       continue;
134     unsigned Reg = MO.getReg();
135     if (!TargetRegisterInfo::isPhysicalRegister(Reg) || isReserved(Reg))
136       continue;
137 
138     if (MO.isUse()) {
139       // Ignore undef uses.
140       if (MO.isUndef())
141         continue;
142       if (MO.isKill())
143         addRegUnits(KillRegUnits, Reg);
144     } else {
145       assert(MO.isDef());
146       if (MO.isDead())
147         addRegUnits(KillRegUnits, Reg);
148       else
149         addRegUnits(DefRegUnits, Reg);
150     }
151   }
152 }
153 
154 void RegScavenger::unprocess() {
155   assert(Tracking && "Cannot unprocess because we're not tracking");
156 
157   MachineInstr &MI = *MBBI;
158   if (!MI.isDebugValue()) {
159     determineKillsAndDefs();
160 
161     // Commit the changes.
162     setUsed(KillRegUnits);
163     setUnused(DefRegUnits);
164   }
165 
166   if (MBBI == MBB->begin()) {
167     MBBI = MachineBasicBlock::iterator(nullptr);
168     Tracking = false;
169   } else
170     --MBBI;
171 }
172 
173 void RegScavenger::forward() {
174   // Move ptr forward.
175   if (!Tracking) {
176     MBBI = MBB->begin();
177     Tracking = true;
178   } else {
179     assert(MBBI != MBB->end() && "Already past the end of the basic block!");
180     MBBI = std::next(MBBI);
181   }
182   assert(MBBI != MBB->end() && "Already at the end of the basic block!");
183 
184   MachineInstr &MI = *MBBI;
185 
186   for (SmallVectorImpl<ScavengedInfo>::iterator I = Scavenged.begin(),
187          IE = Scavenged.end(); I != IE; ++I) {
188     if (I->Restore != &MI)
189       continue;
190 
191     I->Reg = 0;
192     I->Restore = nullptr;
193   }
194 
195   if (MI.isDebugValue())
196     return;
197 
198   determineKillsAndDefs();
199 
200   // Verify uses and defs.
201 #ifndef NDEBUG
202   for (const MachineOperand &MO : MI.operands()) {
203     if (!MO.isReg())
204       continue;
205     unsigned Reg = MO.getReg();
206     if (!TargetRegisterInfo::isPhysicalRegister(Reg) || isReserved(Reg))
207       continue;
208     if (MO.isUse()) {
209       if (MO.isUndef())
210         continue;
211       if (!isRegUsed(Reg)) {
212         // Check if it's partial live: e.g.
213         // D0 = insert_subreg D0<undef>, S0
214         // ... D0
215         // The problem is the insert_subreg could be eliminated. The use of
216         // D0 is using a partially undef value. This is not *incorrect* since
217         // S1 is can be freely clobbered.
218         // Ideally we would like a way to model this, but leaving the
219         // insert_subreg around causes both correctness and performance issues.
220         bool SubUsed = false;
221         for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs)
222           if (isRegUsed(*SubRegs)) {
223             SubUsed = true;
224             break;
225           }
226         bool SuperUsed = false;
227         for (MCSuperRegIterator SR(Reg, TRI); SR.isValid(); ++SR) {
228           if (isRegUsed(*SR)) {
229             SuperUsed = true;
230             break;
231           }
232         }
233         if (!SubUsed && !SuperUsed) {
234           MBB->getParent()->verify(nullptr, "In Register Scavenger");
235           llvm_unreachable("Using an undefined register!");
236         }
237         (void)SubUsed;
238         (void)SuperUsed;
239       }
240     } else {
241       assert(MO.isDef());
242 #if 0
243       // FIXME: Enable this once we've figured out how to correctly transfer
244       // implicit kills during codegen passes like the coalescer.
245       assert((KillRegs.test(Reg) || isUnused(Reg) ||
246               isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) &&
247              "Re-defining a live register!");
248 #endif
249     }
250   }
251 #endif // NDEBUG
252 
253   // Commit the changes.
254   setUnused(KillRegUnits);
255   setUsed(DefRegUnits);
256 }
257 
258 void RegScavenger::backward() {
259   assert(Tracking && "Must be tracking to determine kills and defs");
260 
261   const MachineInstr &MI = *MBBI;
262   LiveUnits.stepBackward(MI);
263 
264   // Expire scavenge spill frameindex uses.
265   for (ScavengedInfo &I : Scavenged) {
266     if (I.Restore == &MI) {
267       I.Reg = 0;
268       I.Restore = nullptr;
269     }
270   }
271 
272   if (MBBI == MBB->begin()) {
273     MBBI = MachineBasicBlock::iterator(nullptr);
274     Tracking = false;
275   } else
276     --MBBI;
277 }
278 
279 bool RegScavenger::isRegUsed(unsigned Reg, bool includeReserved) const {
280   if (isReserved(Reg))
281     return includeReserved;
282   return !LiveUnits.available(Reg);
283 }
284 
285 unsigned RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const {
286   for (unsigned Reg : *RC) {
287     if (!isRegUsed(Reg)) {
288       DEBUG(dbgs() << "Scavenger found unused reg: " << TRI->getName(Reg) <<
289             "\n");
290       return Reg;
291     }
292   }
293   return 0;
294 }
295 
296 BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) {
297   BitVector Mask(TRI->getNumRegs());
298   for (unsigned Reg : *RC)
299     if (!isRegUsed(Reg))
300       Mask.set(Reg);
301   return Mask;
302 }
303 
304 unsigned RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI,
305                                        BitVector &Candidates,
306                                        unsigned InstrLimit,
307                                        MachineBasicBlock::iterator &UseMI) {
308   int Survivor = Candidates.find_first();
309   assert(Survivor > 0 && "No candidates for scavenging");
310 
311   MachineBasicBlock::iterator ME = MBB->getFirstTerminator();
312   assert(StartMI != ME && "MI already at terminator");
313   MachineBasicBlock::iterator RestorePointMI = StartMI;
314   MachineBasicBlock::iterator MI = StartMI;
315 
316   bool inVirtLiveRange = false;
317   for (++MI; InstrLimit > 0 && MI != ME; ++MI, --InstrLimit) {
318     if (MI->isDebugValue()) {
319       ++InstrLimit; // Don't count debug instructions
320       continue;
321     }
322     bool isVirtKillInsn = false;
323     bool isVirtDefInsn = false;
324     // Remove any candidates touched by instruction.
325     for (const MachineOperand &MO : MI->operands()) {
326       if (MO.isRegMask())
327         Candidates.clearBitsNotInMask(MO.getRegMask());
328       if (!MO.isReg() || MO.isUndef() || !MO.getReg())
329         continue;
330       if (TargetRegisterInfo::isVirtualRegister(MO.getReg())) {
331         if (MO.isDef())
332           isVirtDefInsn = true;
333         else if (MO.isKill())
334           isVirtKillInsn = true;
335         continue;
336       }
337       for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
338         Candidates.reset(*AI);
339     }
340     // If we're not in a virtual reg's live range, this is a valid
341     // restore point.
342     if (!inVirtLiveRange) RestorePointMI = MI;
343 
344     // Update whether we're in the live range of a virtual register
345     if (isVirtKillInsn) inVirtLiveRange = false;
346     if (isVirtDefInsn) inVirtLiveRange = true;
347 
348     // Was our survivor untouched by this instruction?
349     if (Candidates.test(Survivor))
350       continue;
351 
352     // All candidates gone?
353     if (Candidates.none())
354       break;
355 
356     Survivor = Candidates.find_first();
357   }
358   // If we ran off the end, that's where we want to restore.
359   if (MI == ME) RestorePointMI = ME;
360   assert(RestorePointMI != StartMI &&
361          "No available scavenger restore location!");
362 
363   // We ran out of candidates, so stop the search.
364   UseMI = RestorePointMI;
365   return Survivor;
366 }
367 
368 /// Given the bitvector \p Available of free register units at position
369 /// \p From. Search backwards to find a register that is part of \p
370 /// Candidates and not used/clobbered until the point \p To. If there is
371 /// multiple candidates continue searching and pick the one that is not used/
372 /// clobbered for the longest time.
373 /// Returns the register and the earliest position we know it to be free or
374 /// the position MBB.end() if no register is available.
375 static std::pair<MCPhysReg, MachineBasicBlock::iterator>
376 findSurvivorBackwards(const MachineRegisterInfo &MRI,
377     MachineBasicBlock::iterator From, MachineBasicBlock::iterator To,
378     const LiveRegUnits &LiveOut, ArrayRef<MCPhysReg> AllocationOrder,
379     bool RestoreAfter) {
380   bool FoundTo = false;
381   MCPhysReg Survivor = 0;
382   MachineBasicBlock::iterator Pos;
383   MachineBasicBlock &MBB = *From->getParent();
384   unsigned InstrLimit = 25;
385   unsigned InstrCountDown = InstrLimit;
386   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
387   LiveRegUnits Used(TRI);
388 
389   for (MachineBasicBlock::iterator I = From;; --I) {
390     const MachineInstr &MI = *I;
391 
392     Used.accumulate(MI);
393 
394     if (I == To) {
395       // See if one of the registers in RC wasn't used so far.
396       for (MCPhysReg Reg : AllocationOrder) {
397         if (!MRI.isReserved(Reg) && Used.available(Reg) &&
398             LiveOut.available(Reg))
399           return std::make_pair(Reg, MBB.end());
400       }
401       // Otherwise we will continue up to InstrLimit instructions to find
402       // the register which is not defined/used for the longest time.
403       FoundTo = true;
404       Pos = To;
405       // Note: It was fine so far to start our search at From, however now that
406       // we have to spill, and can only place the restore after From then
407       // add the regs used/defed by std::next(From) to the set.
408       if (RestoreAfter)
409         Used.accumulate(*std::next(From));
410     }
411     if (FoundTo) {
412       if (Survivor == 0 || !Used.available(Survivor)) {
413         MCPhysReg AvilableReg = 0;
414         for (MCPhysReg Reg : AllocationOrder) {
415           if (!MRI.isReserved(Reg) && Used.available(Reg)) {
416             AvilableReg = Reg;
417             break;
418           }
419         }
420         if (AvilableReg == 0)
421           break;
422         Survivor = AvilableReg;
423       }
424       if (--InstrCountDown == 0)
425         break;
426 
427       // Keep searching when we find a vreg since the spilled register will
428       // be usefull for this other vreg as well later.
429       bool FoundVReg = false;
430       for (const MachineOperand &MO : MI.operands()) {
431         if (MO.isReg() && TargetRegisterInfo::isVirtualRegister(MO.getReg())) {
432           FoundVReg = true;
433           break;
434         }
435       }
436       if (FoundVReg) {
437         InstrCountDown = InstrLimit;
438         Pos = I;
439       }
440       if (I == MBB.begin())
441         break;
442     }
443   }
444 
445   return std::make_pair(Survivor, Pos);
446 }
447 
448 static unsigned getFrameIndexOperandNum(MachineInstr &MI) {
449   unsigned i = 0;
450   while (!MI.getOperand(i).isFI()) {
451     ++i;
452     assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
453   }
454   return i;
455 }
456 
457 RegScavenger::ScavengedInfo &
458 RegScavenger::spill(unsigned Reg, const TargetRegisterClass &RC, int SPAdj,
459                     MachineBasicBlock::iterator Before,
460                     MachineBasicBlock::iterator &UseMI) {
461   // Find an available scavenging slot with size and alignment matching
462   // the requirements of the class RC.
463   const MachineFunction &MF = *Before->getParent()->getParent();
464   const MachineFrameInfo &MFI = MF.getFrameInfo();
465   unsigned NeedSize = TRI->getSpillSize(RC);
466   unsigned NeedAlign = TRI->getSpillAlignment(RC);
467 
468   unsigned SI = Scavenged.size(), Diff = std::numeric_limits<unsigned>::max();
469   int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd();
470   for (unsigned I = 0; I < Scavenged.size(); ++I) {
471     if (Scavenged[I].Reg != 0)
472       continue;
473     // Verify that this slot is valid for this register.
474     int FI = Scavenged[I].FrameIndex;
475     if (FI < FIB || FI >= FIE)
476       continue;
477     unsigned S = MFI.getObjectSize(FI);
478     unsigned A = MFI.getObjectAlignment(FI);
479     if (NeedSize > S || NeedAlign > A)
480       continue;
481     // Avoid wasting slots with large size and/or large alignment. Pick one
482     // that is the best fit for this register class (in street metric).
483     // Picking a larger slot than necessary could happen if a slot for a
484     // larger register is reserved before a slot for a smaller one. When
485     // trying to spill a smaller register, the large slot would be found
486     // first, thus making it impossible to spill the larger register later.
487     unsigned D = (S-NeedSize) + (A-NeedAlign);
488     if (D < Diff) {
489       SI = I;
490       Diff = D;
491     }
492   }
493 
494   if (SI == Scavenged.size()) {
495     // We need to scavenge a register but have no spill slot, the target
496     // must know how to do it (if not, we'll assert below).
497     Scavenged.push_back(ScavengedInfo(FIE));
498   }
499 
500   // Avoid infinite regress
501   Scavenged[SI].Reg = Reg;
502 
503   // If the target knows how to save/restore the register, let it do so;
504   // otherwise, use the emergency stack spill slot.
505   if (!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)) {
506     // Spill the scavenged register before \p Before.
507     int FI = Scavenged[SI].FrameIndex;
508     if (FI < FIB || FI >= FIE) {
509       std::string Msg = std::string("Error while trying to spill ") +
510           TRI->getName(Reg) + " from class " + TRI->getRegClassName(&RC) +
511           ": Cannot scavenge register without an emergency spill slot!";
512       report_fatal_error(Msg.c_str());
513     }
514     TII->storeRegToStackSlot(*MBB, Before, Reg, true, Scavenged[SI].FrameIndex,
515                              &RC, TRI);
516     MachineBasicBlock::iterator II = std::prev(Before);
517 
518     unsigned FIOperandNum = getFrameIndexOperandNum(*II);
519     TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
520 
521     // Restore the scavenged register before its use (or first terminator).
522     TII->loadRegFromStackSlot(*MBB, UseMI, Reg, Scavenged[SI].FrameIndex,
523                               &RC, TRI);
524     II = std::prev(UseMI);
525 
526     FIOperandNum = getFrameIndexOperandNum(*II);
527     TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
528   }
529   return Scavenged[SI];
530 }
531 
532 unsigned RegScavenger::scavengeRegister(const TargetRegisterClass *RC,
533                                         MachineBasicBlock::iterator I,
534                                         int SPAdj) {
535   MachineInstr &MI = *I;
536   const MachineFunction &MF = *MI.getParent()->getParent();
537   // Consider all allocatable registers in the register class initially
538   BitVector Candidates = TRI->getAllocatableSet(MF, RC);
539 
540   // Exclude all the registers being used by the instruction.
541   for (const MachineOperand &MO : MI.operands()) {
542     if (MO.isReg() && MO.getReg() != 0 && !(MO.isUse() && MO.isUndef()) &&
543         !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
544       for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
545         Candidates.reset(*AI);
546   }
547 
548   // Try to find a register that's unused if there is one, as then we won't
549   // have to spill.
550   BitVector Available = getRegsAvailable(RC);
551   Available &= Candidates;
552   if (Available.any())
553     Candidates = Available;
554 
555   // Find the register whose use is furthest away.
556   MachineBasicBlock::iterator UseMI;
557   unsigned SReg = findSurvivorReg(I, Candidates, 25, UseMI);
558 
559   // If we found an unused register there is no reason to spill it.
560   if (!isRegUsed(SReg)) {
561     DEBUG(dbgs() << "Scavenged register: " << TRI->getName(SReg) << "\n");
562     return SReg;
563   }
564 
565   ScavengedInfo &Scavenged = spill(SReg, *RC, SPAdj, I, UseMI);
566   Scavenged.Restore = &*std::prev(UseMI);
567 
568   DEBUG(dbgs() << "Scavenged register (with spill): " << TRI->getName(SReg) <<
569         "\n");
570 
571   return SReg;
572 }
573 
574 unsigned RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass &RC,
575                                                  MachineBasicBlock::iterator To,
576                                                  bool RestoreAfter, int SPAdj) {
577   const MachineBasicBlock &MBB = *To->getParent();
578   const MachineFunction &MF = *MBB.getParent();
579 
580   // Find the register whose use is furthest away.
581   MachineBasicBlock::iterator UseMI;
582   ArrayRef<MCPhysReg> AllocationOrder = RC.getRawAllocationOrder(MF);
583   std::pair<MCPhysReg, MachineBasicBlock::iterator> P =
584       findSurvivorBackwards(*MRI, MBBI, To, LiveUnits, AllocationOrder,
585                             RestoreAfter);
586   MCPhysReg Reg = P.first;
587   MachineBasicBlock::iterator SpillBefore = P.second;
588   assert(Reg != 0 && "No register left to scavenge!");
589   // Found an available register?
590   if (SpillBefore != MBB.end()) {
591     MachineBasicBlock::iterator ReloadAfter =
592       RestoreAfter ? std::next(MBBI) : MBBI;
593     MachineBasicBlock::iterator ReloadBefore = std::next(ReloadAfter);
594     DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n');
595     ScavengedInfo &Scavenged = spill(Reg, RC, SPAdj, SpillBefore, ReloadBefore);
596     Scavenged.Restore = &*std::prev(SpillBefore);
597     LiveUnits.removeReg(Reg);
598     DEBUG(dbgs() << "Scavenged register with spill: " << PrintReg(Reg, TRI)
599           << " until " << *SpillBefore);
600   } else {
601     DEBUG(dbgs() << "Scavenged free register: " << PrintReg(Reg, TRI) << '\n');
602   }
603   return Reg;
604 }
605 
606 /// Allocate a register for the virtual register \p VReg. The last use of
607 /// \p VReg is around the current position of the register scavenger \p RS.
608 /// \p ReserveAfter controls whether the scavenged register needs to be reserved
609 /// after the current instruction, otherwise it will only be reserved before the
610 /// current instruction.
611 static unsigned scavengeVReg(MachineRegisterInfo &MRI, RegScavenger &RS,
612                              unsigned VReg, bool ReserveAfter) {
613   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
614 #ifndef NDEBUG
615   // Verify that all definitions and uses are in the same basic block.
616   const MachineBasicBlock *CommonMBB = nullptr;
617   // Real definition for the reg, re-definitions are not considered.
618   const MachineInstr *RealDef = nullptr;
619   for (MachineOperand &MO : MRI.reg_nodbg_operands(VReg)) {
620     MachineBasicBlock *MBB = MO.getParent()->getParent();
621     if (CommonMBB == nullptr)
622       CommonMBB = MBB;
623     assert(MBB == CommonMBB && "All defs+uses must be in the same basic block");
624     if (MO.isDef()) {
625       const MachineInstr &MI = *MO.getParent();
626       if (!MI.readsRegister(VReg, &TRI)) {
627         assert((!RealDef || RealDef == &MI) &&
628                "Can have at most one definition which is not a redefinition");
629         RealDef = &MI;
630       }
631     }
632   }
633   assert(RealDef != nullptr && "Must have at least 1 Def");
634 #endif
635 
636   // We should only have one definition of the register. However to accommodate
637   // the requirements of two address code we also allow definitions in
638   // subsequent instructions provided they also read the register. That way
639   // we get a single contiguous lifetime.
640   //
641   // Definitions in MRI.def_begin() are unordered, search for the first.
642   MachineRegisterInfo::def_iterator FirstDef =
643     std::find_if(MRI.def_begin(VReg), MRI.def_end(),
644                  [VReg, &TRI](const MachineOperand &MO) {
645       return !MO.getParent()->readsRegister(VReg, &TRI);
646     });
647   assert(FirstDef != MRI.def_end() &&
648          "Must have one definition that does not redefine vreg");
649   MachineInstr &DefMI = *FirstDef->getParent();
650 
651   // The register scavenger will report a free register inserting an emergency
652   // spill/reload if necessary.
653   int SPAdj = 0;
654   const TargetRegisterClass &RC = *MRI.getRegClass(VReg);
655   unsigned SReg = RS.scavengeRegisterBackwards(RC, DefMI.getIterator(),
656                                                ReserveAfter, SPAdj);
657   MRI.replaceRegWith(VReg, SReg);
658   ++NumScavengedRegs;
659   return SReg;
660 }
661 
662 /// Allocate (scavenge) vregs inside a single basic block.
663 /// Returns true if the target spill callback created new vregs and a 2nd pass
664 /// is necessary.
665 static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo &MRI,
666                                             RegScavenger &RS,
667                                             MachineBasicBlock &MBB) {
668   const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
669   RS.enterBasicBlockEnd(MBB);
670 
671   unsigned InitialNumVirtRegs = MRI.getNumVirtRegs();
672   bool NextInstructionReadsVReg = false;
673   for (MachineBasicBlock::iterator I = MBB.end(); I != MBB.begin(); ) {
674     --I;
675     // Move RegScavenger to the position between *I and *std::next(I).
676     RS.backward(I);
677 
678     // Look for unassigned vregs in the uses of *std::next(I).
679     if (NextInstructionReadsVReg) {
680       MachineBasicBlock::iterator N = std::next(I);
681       const MachineInstr &NMI = *N;
682       for (const MachineOperand &MO : NMI.operands()) {
683         if (!MO.isReg())
684           continue;
685         unsigned Reg = MO.getReg();
686         // We only care about virtual registers and ignore virtual registers
687         // created by the target callbacks in the process (those will be handled
688         // in a scavenging round).
689         if (!TargetRegisterInfo::isVirtualRegister(Reg) ||
690             TargetRegisterInfo::virtReg2Index(Reg) >= InitialNumVirtRegs)
691           continue;
692         if (!MO.readsReg())
693           continue;
694 
695         unsigned SReg = scavengeVReg(MRI, RS, Reg, true);
696         N->addRegisterKilled(SReg, &TRI, false);
697         RS.setRegUsed(SReg);
698       }
699     }
700 
701     // Look for unassigned vregs in the defs of *I.
702     NextInstructionReadsVReg = false;
703     const MachineInstr &MI = *I;
704     for (const MachineOperand &MO : MI.operands()) {
705       if (!MO.isReg())
706         continue;
707       unsigned Reg = MO.getReg();
708       // Only vregs, no newly created vregs (see above).
709       if (!TargetRegisterInfo::isVirtualRegister(Reg) ||
710           TargetRegisterInfo::virtReg2Index(Reg) >= InitialNumVirtRegs)
711         continue;
712       // We have to look at all operands anyway so we can precalculate here
713       // whether there is a reading operand. This allows use to skip the use
714       // step in the next iteration if there was none.
715       assert(!MO.isInternalRead() && "Cannot assign inside bundles");
716       assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
717       if (MO.readsReg()) {
718         NextInstructionReadsVReg = true;
719       }
720       if (MO.isDef()) {
721         unsigned SReg = scavengeVReg(MRI, RS, Reg, false);
722         I->addRegisterDead(SReg, &TRI, false);
723       }
724     }
725   }
726 #ifndef NDEBUG
727   for (const MachineOperand &MO : MBB.front().operands()) {
728     if (!MO.isReg() || !TargetRegisterInfo::isVirtualRegister(MO.getReg()))
729       continue;
730     assert(!MO.isInternalRead() && "Cannot assign inside bundles");
731     assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
732     assert(!MO.readsReg() && "Vreg use in first instruction not allowed");
733   }
734 #endif
735 
736   return MRI.getNumVirtRegs() != InitialNumVirtRegs;
737 }
738 
739 void llvm::scavengeFrameVirtualRegs(MachineFunction &MF, RegScavenger &RS) {
740   // FIXME: Iterating over the instruction stream is unnecessary. We can simply
741   // iterate over the vreg use list, which at this point only contains machine
742   // operands for which eliminateFrameIndex need a new scratch reg.
743   MachineRegisterInfo &MRI = MF.getRegInfo();
744   // Shortcut.
745   if (MRI.getNumVirtRegs() == 0) {
746     MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
747     return;
748   }
749 
750   // Run through the instructions and find any virtual registers.
751   for (MachineBasicBlock &MBB : MF) {
752     if (MBB.empty())
753       continue;
754 
755     bool Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
756     if (Again) {
757       DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
758             << MBB.getName() << '\n');
759       Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
760       // The target required a 2nd run (because it created new vregs while
761       // spilling). Refuse to do another pass to keep compiletime in check.
762       if (Again)
763         report_fatal_error("Incomplete scavenging after 2nd pass");
764     }
765   }
766 
767   MRI.clearVirtRegs();
768   MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
769 }
770 
771 namespace {
772 /// This class runs register scavenging independ of the PrologEpilogInserter.
773 /// This is used in for testing.
774 class ScavengerTest : public MachineFunctionPass {
775 public:
776   static char ID;
777   ScavengerTest() : MachineFunctionPass(ID) {}
778   bool runOnMachineFunction(MachineFunction &MF) {
779     const TargetSubtargetInfo &STI = MF.getSubtarget();
780     const TargetFrameLowering &TFL = *STI.getFrameLowering();
781 
782     RegScavenger RS;
783     // Let's hope that calling those outside of PrologEpilogueInserter works
784     // well enough to initialize the scavenger with some emergency spillslots
785     // for the target.
786     BitVector SavedRegs;
787     TFL.determineCalleeSaves(MF, SavedRegs, &RS);
788     TFL.processFunctionBeforeFrameFinalized(MF, &RS);
789 
790     // Let's scavenge the current function
791     scavengeFrameVirtualRegs(MF, RS);
792     return true;
793   }
794 };
795 char ScavengerTest::ID;
796 
797 } // end anonymous namespace
798 
799 INITIALIZE_PASS(ScavengerTest, "scavenger-test",
800                 "Scavenge virtual registers inside basic blocks", false, false)
801