1 //===-- WebAssemblyRegStackify.cpp - Register Stackification --------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 ///
10 /// \file
11 /// \brief This file implements a register stacking pass.
12 ///
13 /// This pass reorders instructions to put register uses and defs in an order
14 /// such that they form single-use expression trees. Registers fitting this form
15 /// are then marked as "stackified", meaning references to them are replaced by
16 /// "push" and "pop" from the stack.
17 ///
18 /// This is primarily a code size optimization, since temporary values on the
19 /// expression don't need to be named.
20 ///
21 //===----------------------------------------------------------------------===//
22 
23 #include "WebAssembly.h"
24 #include "MCTargetDesc/WebAssemblyMCTargetDesc.h" // for WebAssembly::ARGUMENT_*
25 #include "WebAssemblyMachineFunctionInfo.h"
26 #include "WebAssemblySubtarget.h"
27 #include "llvm/Analysis/AliasAnalysis.h"
28 #include "llvm/CodeGen/LiveIntervalAnalysis.h"
29 #include "llvm/CodeGen/MachineBlockFrequencyInfo.h"
30 #include "llvm/CodeGen/MachineDominators.h"
31 #include "llvm/CodeGen/MachineInstrBuilder.h"
32 #include "llvm/CodeGen/MachineRegisterInfo.h"
33 #include "llvm/CodeGen/Passes.h"
34 #include "llvm/Support/Debug.h"
35 #include "llvm/Support/raw_ostream.h"
36 using namespace llvm;
37 
38 #define DEBUG_TYPE "wasm-reg-stackify"
39 
40 namespace {
41 class WebAssemblyRegStackify final : public MachineFunctionPass {
42   const char *getPassName() const override {
43     return "WebAssembly Register Stackify";
44   }
45 
46   void getAnalysisUsage(AnalysisUsage &AU) const override {
47     AU.setPreservesCFG();
48     AU.addRequired<AAResultsWrapperPass>();
49     AU.addRequired<MachineDominatorTree>();
50     AU.addRequired<LiveIntervals>();
51     AU.addPreserved<MachineBlockFrequencyInfo>();
52     AU.addPreserved<SlotIndexes>();
53     AU.addPreserved<LiveIntervals>();
54     AU.addPreservedID(LiveVariablesID);
55     AU.addPreserved<MachineDominatorTree>();
56     MachineFunctionPass::getAnalysisUsage(AU);
57   }
58 
59   bool runOnMachineFunction(MachineFunction &MF) override;
60 
61 public:
62   static char ID; // Pass identification, replacement for typeid
63   WebAssemblyRegStackify() : MachineFunctionPass(ID) {}
64 };
65 } // end anonymous namespace
66 
67 char WebAssemblyRegStackify::ID = 0;
68 FunctionPass *llvm::createWebAssemblyRegStackify() {
69   return new WebAssemblyRegStackify();
70 }
71 
72 // Decorate the given instruction with implicit operands that enforce the
73 // expression stack ordering constraints for an instruction which is on
74 // the expression stack.
75 static void ImposeStackOrdering(MachineInstr *MI) {
76   // Write the opaque EXPR_STACK register.
77   if (!MI->definesRegister(WebAssembly::EXPR_STACK))
78     MI->addOperand(MachineOperand::CreateReg(WebAssembly::EXPR_STACK,
79                                              /*isDef=*/true,
80                                              /*isImp=*/true));
81 
82   // Also read the opaque EXPR_STACK register.
83   if (!MI->readsRegister(WebAssembly::EXPR_STACK))
84     MI->addOperand(MachineOperand::CreateReg(WebAssembly::EXPR_STACK,
85                                              /*isDef=*/false,
86                                              /*isImp=*/true));
87 }
88 
89 // Test whether it's safe to move Def to just before Insert.
90 // TODO: Compute memory dependencies in a way that doesn't require always
91 // walking the block.
92 // TODO: Compute memory dependencies in a way that uses AliasAnalysis to be
93 // more precise.
94 static bool IsSafeToMove(const MachineInstr *Def, const MachineInstr *Insert,
95                          AliasAnalysis &AA, const LiveIntervals &LIS,
96                          const MachineRegisterInfo &MRI) {
97   assert(Def->getParent() == Insert->getParent());
98   bool SawStore = false, SawSideEffects = false;
99   MachineBasicBlock::const_iterator D(Def), I(Insert);
100 
101   // Check for register dependencies.
102   for (const MachineOperand &MO : Def->operands()) {
103     if (!MO.isReg() || MO.isUndef())
104       continue;
105     unsigned Reg = MO.getReg();
106 
107     // If the register is dead here and at Insert, ignore it.
108     if (MO.isDead() && Insert->definesRegister(Reg) &&
109         !Insert->readsRegister(Reg))
110       continue;
111 
112     if (TargetRegisterInfo::isPhysicalRegister(Reg)) {
113       // If the physical register is never modified, ignore it.
114       if (!MRI.isPhysRegModified(Reg))
115         continue;
116       // Otherwise, it's a physical register with unknown liveness.
117       return false;
118     }
119 
120     // Ask LiveIntervals whether moving this virtual register use or def to
121     // Insert will change value numbers are seen.
122     const LiveInterval &LI = LIS.getInterval(Reg);
123     VNInfo *DefVNI =
124         MO.isDef() ? LI.getVNInfoAt(LIS.getInstructionIndex(*Def).getRegSlot())
125                    : LI.getVNInfoBefore(LIS.getInstructionIndex(*Def));
126     assert(DefVNI && "Instruction input missing value number");
127     VNInfo *InsVNI = LI.getVNInfoBefore(LIS.getInstructionIndex(*Insert));
128     if (InsVNI && DefVNI != InsVNI)
129       return false;
130   }
131 
132   SawStore = Def->isCall() || Def->mayStore();
133   // Check for memory dependencies and side effects.
134   for (--I; I != D; --I)
135     SawSideEffects |= !I->isSafeToMove(&AA, SawStore);
136   return !(SawStore && Def->mayLoad() && !Def->isInvariantLoad(&AA)) &&
137          !(SawSideEffects && !Def->isSafeToMove(&AA, SawStore));
138 }
139 
140 /// Test whether OneUse, a use of Reg, dominates all of Reg's other uses.
141 static bool OneUseDominatesOtherUses(unsigned Reg, const MachineOperand &OneUse,
142                                      const MachineBasicBlock &MBB,
143                                      const MachineRegisterInfo &MRI,
144                                      const MachineDominatorTree &MDT) {
145   for (const MachineOperand &Use : MRI.use_operands(Reg)) {
146     if (&Use == &OneUse)
147       continue;
148     const MachineInstr *UseInst = Use.getParent();
149     const MachineInstr *OneUseInst = OneUse.getParent();
150     if (UseInst->getOpcode() == TargetOpcode::PHI) {
151       // Test that the PHI use, which happens on the CFG edge rather than
152       // within the PHI's own block, is dominated by the one selected use.
153       const MachineBasicBlock *Pred =
154           UseInst->getOperand(&Use - &UseInst->getOperand(0) + 1).getMBB();
155       if (!MDT.dominates(&MBB, Pred))
156         return false;
157     } else if (UseInst == OneUseInst) {
158       // Another use in the same instruction. We need to ensure that the one
159       // selected use happens "before" it.
160       if (&OneUse > &Use)
161         return false;
162     } else {
163       // Test that the use is dominated by the one selected use.
164       if (!MDT.dominates(OneUseInst, UseInst))
165         return false;
166     }
167   }
168   return true;
169 }
170 
171 /// Get the appropriate tee_local opcode for the given register class.
172 static unsigned GetTeeLocalOpcode(const TargetRegisterClass *RC) {
173   if (RC == &WebAssembly::I32RegClass)
174     return WebAssembly::TEE_LOCAL_I32;
175   if (RC == &WebAssembly::I64RegClass)
176     return WebAssembly::TEE_LOCAL_I64;
177   if (RC == &WebAssembly::F32RegClass)
178     return WebAssembly::TEE_LOCAL_F32;
179   if (RC == &WebAssembly::F64RegClass)
180     return WebAssembly::TEE_LOCAL_F64;
181   llvm_unreachable("Unexpected register class");
182 }
183 
184 /// A single-use def in the same block with no intervening memory or register
185 /// dependencies; move the def down and nest it with the current instruction.
186 static MachineInstr *MoveForSingleUse(unsigned Reg, MachineInstr *Def,
187                                       MachineBasicBlock &MBB,
188                                       MachineInstr *Insert, LiveIntervals &LIS,
189                                       WebAssemblyFunctionInfo &MFI) {
190   MBB.splice(Insert, &MBB, Def);
191   LIS.handleMove(*Def);
192   MFI.stackifyVReg(Reg);
193   ImposeStackOrdering(Def);
194   return Def;
195 }
196 
197 /// A trivially cloneable instruction; clone it and nest the new copy with the
198 /// current instruction.
199 static MachineInstr *
200 RematerializeCheapDef(unsigned Reg, MachineOperand &Op, MachineInstr *Def,
201                       MachineBasicBlock &MBB, MachineInstr *Insert,
202                       LiveIntervals &LIS, WebAssemblyFunctionInfo &MFI,
203                       MachineRegisterInfo &MRI, const WebAssemblyInstrInfo *TII,
204                       const WebAssemblyRegisterInfo *TRI) {
205   unsigned NewReg = MRI.createVirtualRegister(MRI.getRegClass(Reg));
206   TII->reMaterialize(MBB, Insert, NewReg, 0, Def, *TRI);
207   Op.setReg(NewReg);
208   MachineInstr *Clone = &*std::prev(MachineBasicBlock::instr_iterator(Insert));
209   LIS.InsertMachineInstrInMaps(*Clone);
210   LIS.createAndComputeVirtRegInterval(NewReg);
211   MFI.stackifyVReg(NewReg);
212   ImposeStackOrdering(Clone);
213 
214   // If that was the last use of the original, delete the original.
215   // Otherwise shrink the LiveInterval.
216   if (MRI.use_empty(Reg)) {
217     SlotIndex Idx = LIS.getInstructionIndex(*Def).getRegSlot();
218     LIS.removePhysRegDefAt(WebAssembly::ARGUMENTS, Idx);
219     LIS.removeVRegDefAt(LIS.getInterval(Reg), Idx);
220     LIS.removeInterval(Reg);
221     LIS.RemoveMachineInstrFromMaps(*Def);
222     Def->eraseFromParent();
223   } else {
224     LIS.shrinkToUses(&LIS.getInterval(Reg));
225   }
226   return Clone;
227 }
228 
229 /// A multiple-use def in the same block with no intervening memory or register
230 /// dependencies; move the def down, nest it with the current instruction, and
231 /// insert a tee_local to satisfy the rest of the uses. As an illustration,
232 /// rewrite this:
233 ///
234 ///    Reg = INST ...        // Def
235 ///    INST ..., Reg, ...    // Insert
236 ///    INST ..., Reg, ...
237 ///    INST ..., Reg, ...
238 ///
239 /// to this:
240 ///
241 ///    DefReg = INST ...     // Def (to become the new Insert)
242 ///    TeeReg, NewReg = TEE_LOCAL_... DefReg
243 ///    INST ..., TeeReg, ... // Insert
244 ///    INST ..., NewReg, ...
245 ///    INST ..., NewReg, ...
246 ///
247 /// with DefReg and TeeReg stackified. This eliminates a get_local from the
248 /// resulting code.
249 static MachineInstr *MoveAndTeeForMultiUse(
250     unsigned Reg, MachineOperand &Op, MachineInstr *Def, MachineBasicBlock &MBB,
251     MachineInstr *Insert, LiveIntervals &LIS, WebAssemblyFunctionInfo &MFI,
252     MachineRegisterInfo &MRI, const WebAssemblyInstrInfo *TII) {
253   MBB.splice(Insert, &MBB, Def);
254   LIS.handleMove(*Def);
255   const auto *RegClass = MRI.getRegClass(Reg);
256   unsigned NewReg = MRI.createVirtualRegister(RegClass);
257   unsigned TeeReg = MRI.createVirtualRegister(RegClass);
258   unsigned DefReg = MRI.createVirtualRegister(RegClass);
259   MRI.replaceRegWith(Reg, NewReg);
260   MachineInstr *Tee = BuildMI(MBB, Insert, Insert->getDebugLoc(),
261                               TII->get(GetTeeLocalOpcode(RegClass)), TeeReg)
262                           .addReg(NewReg, RegState::Define)
263                           .addReg(DefReg);
264   Op.setReg(TeeReg);
265   Def->getOperand(0).setReg(DefReg);
266   LIS.InsertMachineInstrInMaps(*Tee);
267   LIS.removeInterval(Reg);
268   LIS.createAndComputeVirtRegInterval(NewReg);
269   LIS.createAndComputeVirtRegInterval(TeeReg);
270   LIS.createAndComputeVirtRegInterval(DefReg);
271   MFI.stackifyVReg(DefReg);
272   MFI.stackifyVReg(TeeReg);
273   ImposeStackOrdering(Def);
274   ImposeStackOrdering(Tee);
275   return Def;
276 }
277 
278 namespace {
279 /// A stack for walking the tree of instructions being built, visiting the
280 /// MachineOperands in DFS order.
281 class TreeWalkerState {
282   typedef MachineInstr::mop_iterator mop_iterator;
283   typedef std::reverse_iterator<mop_iterator> mop_reverse_iterator;
284   typedef iterator_range<mop_reverse_iterator> RangeTy;
285   SmallVector<RangeTy, 4> Worklist;
286 
287 public:
288   explicit TreeWalkerState(MachineInstr *Insert) {
289     const iterator_range<mop_iterator> &Range = Insert->explicit_uses();
290     if (Range.begin() != Range.end())
291       Worklist.push_back(reverse(Range));
292   }
293 
294   bool Done() const { return Worklist.empty(); }
295 
296   MachineOperand &Pop() {
297     RangeTy &Range = Worklist.back();
298     MachineOperand &Op = *Range.begin();
299     Range = drop_begin(Range, 1);
300     if (Range.begin() == Range.end())
301       Worklist.pop_back();
302     assert((Worklist.empty() ||
303             Worklist.back().begin() != Worklist.back().end()) &&
304            "Empty ranges shouldn't remain in the worklist");
305     return Op;
306   }
307 
308   /// Push Instr's operands onto the stack to be visited.
309   void PushOperands(MachineInstr *Instr) {
310     const iterator_range<mop_iterator> &Range(Instr->explicit_uses());
311     if (Range.begin() != Range.end())
312       Worklist.push_back(reverse(Range));
313   }
314 
315   /// Some of Instr's operands are on the top of the stack; remove them and
316   /// re-insert them starting from the beginning (because we've commuted them).
317   void ResetTopOperands(MachineInstr *Instr) {
318     assert(HasRemainingOperands(Instr) &&
319            "Reseting operands should only be done when the instruction has "
320            "an operand still on the stack");
321     Worklist.back() = reverse(Instr->explicit_uses());
322   }
323 
324   /// Test whether Instr has operands remaining to be visited at the top of
325   /// the stack.
326   bool HasRemainingOperands(const MachineInstr *Instr) const {
327     if (Worklist.empty())
328       return false;
329     const RangeTy &Range = Worklist.back();
330     return Range.begin() != Range.end() && Range.begin()->getParent() == Instr;
331   }
332 
333   /// Test whether the given register is present on the stack, indicating an
334   /// operand in the tree that we haven't visited yet. Moving a definition of
335   /// Reg to a point in the tree after that would change its value.
336   bool IsOnStack(unsigned Reg) const {
337     for (const RangeTy &Range : Worklist)
338       for (const MachineOperand &MO : Range)
339         if (MO.isReg() && MO.getReg() == Reg)
340           return true;
341     return false;
342   }
343 };
344 
345 /// State to keep track of whether commuting is in flight or whether it's been
346 /// tried for the current instruction and didn't work.
347 class CommutingState {
348   /// There are effectively three states: the initial state where we haven't
349   /// started commuting anything and we don't know anything yet, the tenative
350   /// state where we've commuted the operands of the current instruction and are
351   /// revisting it, and the declined state where we've reverted the operands
352   /// back to their original order and will no longer commute it further.
353   bool TentativelyCommuting;
354   bool Declined;
355 
356   /// During the tentative state, these hold the operand indices of the commuted
357   /// operands.
358   unsigned Operand0, Operand1;
359 
360 public:
361   CommutingState() : TentativelyCommuting(false), Declined(false) {}
362 
363   /// Stackification for an operand was not successful due to ordering
364   /// constraints. If possible, and if we haven't already tried it and declined
365   /// it, commute Insert's operands and prepare to revisit it.
366   void MaybeCommute(MachineInstr *Insert, TreeWalkerState &TreeWalker,
367                     const WebAssemblyInstrInfo *TII) {
368     if (TentativelyCommuting) {
369       assert(!Declined &&
370              "Don't decline commuting until you've finished trying it");
371       // Commuting didn't help. Revert it.
372       TII->commuteInstruction(Insert, /*NewMI=*/false, Operand0, Operand1);
373       TentativelyCommuting = false;
374       Declined = true;
375     } else if (!Declined && TreeWalker.HasRemainingOperands(Insert)) {
376       Operand0 = TargetInstrInfo::CommuteAnyOperandIndex;
377       Operand1 = TargetInstrInfo::CommuteAnyOperandIndex;
378       if (TII->findCommutedOpIndices(Insert, Operand0, Operand1)) {
379         // Tentatively commute the operands and try again.
380         TII->commuteInstruction(Insert, /*NewMI=*/false, Operand0, Operand1);
381         TreeWalker.ResetTopOperands(Insert);
382         TentativelyCommuting = true;
383         Declined = false;
384       }
385     }
386   }
387 
388   /// Stackification for some operand was successful. Reset to the default
389   /// state.
390   void Reset() {
391     TentativelyCommuting = false;
392     Declined = false;
393   }
394 };
395 } // end anonymous namespace
396 
397 bool WebAssemblyRegStackify::runOnMachineFunction(MachineFunction &MF) {
398   DEBUG(dbgs() << "********** Register Stackifying **********\n"
399                   "********** Function: "
400                << MF.getName() << '\n');
401 
402   bool Changed = false;
403   MachineRegisterInfo &MRI = MF.getRegInfo();
404   WebAssemblyFunctionInfo &MFI = *MF.getInfo<WebAssemblyFunctionInfo>();
405   const auto *TII = MF.getSubtarget<WebAssemblySubtarget>().getInstrInfo();
406   const auto *TRI = MF.getSubtarget<WebAssemblySubtarget>().getRegisterInfo();
407   AliasAnalysis &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
408   MachineDominatorTree &MDT = getAnalysis<MachineDominatorTree>();
409   LiveIntervals &LIS = getAnalysis<LiveIntervals>();
410 
411   // Walk the instructions from the bottom up. Currently we don't look past
412   // block boundaries, and the blocks aren't ordered so the block visitation
413   // order isn't significant, but we may want to change this in the future.
414   for (MachineBasicBlock &MBB : MF) {
415     // Don't use a range-based for loop, because we modify the list as we're
416     // iterating over it and the end iterator may change.
417     for (auto MII = MBB.rbegin(); MII != MBB.rend(); ++MII) {
418       MachineInstr *Insert = &*MII;
419       // Don't nest anything inside a phi.
420       if (Insert->getOpcode() == TargetOpcode::PHI)
421         break;
422 
423       // Don't nest anything inside an inline asm, because we don't have
424       // constraints for $push inputs.
425       if (Insert->getOpcode() == TargetOpcode::INLINEASM)
426         continue;
427 
428       // Ignore debugging intrinsics.
429       if (Insert->getOpcode() == TargetOpcode::DBG_VALUE)
430         continue;
431 
432       // Iterate through the inputs in reverse order, since we'll be pulling
433       // operands off the stack in LIFO order.
434       CommutingState Commuting;
435       TreeWalkerState TreeWalker(Insert);
436       while (!TreeWalker.Done()) {
437         MachineOperand &Op = TreeWalker.Pop();
438 
439         // We're only interested in explicit virtual register operands.
440         if (!Op.isReg())
441           continue;
442 
443         unsigned Reg = Op.getReg();
444         assert(Op.isUse() && "explicit_uses() should only iterate over uses");
445         assert(!Op.isImplicit() &&
446                "explicit_uses() should only iterate over explicit operands");
447         if (TargetRegisterInfo::isPhysicalRegister(Reg))
448           continue;
449 
450         // Identify the definition for this register at this point. Most
451         // registers are in SSA form here so we try a quick MRI query first.
452         MachineInstr *Def = MRI.getUniqueVRegDef(Reg);
453         if (!Def) {
454           // MRI doesn't know what the Def is. Try asking LIS.
455           const VNInfo *ValNo = LIS.getInterval(Reg).getVNInfoBefore(
456               LIS.getInstructionIndex(*Insert));
457           if (!ValNo)
458             continue;
459           Def = LIS.getInstructionFromIndex(ValNo->def);
460           if (!Def)
461             continue;
462         }
463 
464         // Don't nest an INLINE_ASM def into anything, because we don't have
465         // constraints for $pop outputs.
466         if (Def->getOpcode() == TargetOpcode::INLINEASM)
467           continue;
468 
469         // Don't nest PHIs inside of anything.
470         if (Def->getOpcode() == TargetOpcode::PHI)
471           continue;
472 
473         // Argument instructions represent live-in registers and not real
474         // instructions.
475         if (Def->getOpcode() == WebAssembly::ARGUMENT_I32 ||
476             Def->getOpcode() == WebAssembly::ARGUMENT_I64 ||
477             Def->getOpcode() == WebAssembly::ARGUMENT_F32 ||
478             Def->getOpcode() == WebAssembly::ARGUMENT_F64)
479           continue;
480 
481         // Decide which strategy to take. Prefer to move a single-use value
482         // over cloning it, and prefer cloning over introducing a tee_local.
483         // For moving, we require the def to be in the same block as the use;
484         // this makes things simpler (LiveIntervals' handleMove function only
485         // supports intra-block moves) and it's MachineSink's job to catch all
486         // the sinking opportunities anyway.
487         bool SameBlock = Def->getParent() == &MBB;
488         bool CanMove = SameBlock && IsSafeToMove(Def, Insert, AA, LIS, MRI) &&
489                        !TreeWalker.IsOnStack(Reg);
490         if (CanMove && MRI.hasOneUse(Reg)) {
491           Insert = MoveForSingleUse(Reg, Def, MBB, Insert, LIS, MFI);
492         } else if (Def->isAsCheapAsAMove() &&
493                    TII->isTriviallyReMaterializable(Def, &AA)) {
494           Insert = RematerializeCheapDef(Reg, Op, Def, MBB, Insert, LIS, MFI,
495                                          MRI, TII, TRI);
496         } else if (CanMove &&
497                    OneUseDominatesOtherUses(Reg, Op, MBB, MRI, MDT)) {
498           Insert = MoveAndTeeForMultiUse(Reg, Op, Def, MBB, Insert, LIS, MFI,
499                                          MRI, TII);
500         } else {
501           // We failed to stackify the operand. If the problem was ordering
502           // constraints, Commuting may be able to help.
503           if (!CanMove && SameBlock)
504             Commuting.MaybeCommute(Insert, TreeWalker, TII);
505           // Proceed to the next operand.
506           continue;
507         }
508 
509         // We stackified an operand. Add the defining instruction's operands to
510         // the worklist stack now to continue to build an ever deeper tree.
511         Commuting.Reset();
512         TreeWalker.PushOperands(Insert);
513       }
514 
515       // If we stackified any operands, skip over the tree to start looking for
516       // the next instruction we can build a tree on.
517       if (Insert != &*MII) {
518         ImposeStackOrdering(&*MII);
519         MII = std::prev(
520             make_reverse_iterator(MachineBasicBlock::iterator(Insert)));
521         Changed = true;
522       }
523     }
524   }
525 
526   // If we used EXPR_STACK anywhere, add it to the live-in sets everywhere so
527   // that it never looks like a use-before-def.
528   if (Changed) {
529     MF.getRegInfo().addLiveIn(WebAssembly::EXPR_STACK);
530     for (MachineBasicBlock &MBB : MF)
531       MBB.addLiveIn(WebAssembly::EXPR_STACK);
532   }
533 
534 #ifndef NDEBUG
535   // Verify that pushes and pops are performed in LIFO order.
536   SmallVector<unsigned, 0> Stack;
537   for (MachineBasicBlock &MBB : MF) {
538     for (MachineInstr &MI : MBB) {
539       for (MachineOperand &MO : reverse(MI.explicit_operands())) {
540         if (!MO.isReg())
541           continue;
542         unsigned Reg = MO.getReg();
543 
544         // Don't stackify physregs like SP or FP.
545         if (!TargetRegisterInfo::isVirtualRegister(Reg))
546           continue;
547 
548         if (MFI.isVRegStackified(Reg)) {
549           if (MO.isDef())
550             Stack.push_back(Reg);
551           else
552             assert(Stack.pop_back_val() == Reg &&
553                    "Register stack pop should be paired with a push");
554         }
555       }
556     }
557     // TODO: Generalize this code to support keeping values on the stack across
558     // basic block boundaries.
559     assert(Stack.empty() &&
560            "Register stack pushes and pops should be balanced");
561   }
562 #endif
563 
564   return Changed;
565 }
566