1*0b57cec5SDimitry Andric //===----- SchedulePostRAList.cpp - list scheduler ------------------------===//
2*0b57cec5SDimitry Andric //
3*0b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4*0b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
5*0b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6*0b57cec5SDimitry Andric //
7*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
8*0b57cec5SDimitry Andric //
9*0b57cec5SDimitry Andric // This implements a top-down list scheduler, using standard algorithms.
10*0b57cec5SDimitry Andric // The basic approach uses a priority queue of available nodes to schedule.
11*0b57cec5SDimitry Andric // One at a time, nodes are taken from the priority queue (thus in priority
12*0b57cec5SDimitry Andric // order), checked for legality to schedule, and emitted if legal.
13*0b57cec5SDimitry Andric //
14*0b57cec5SDimitry Andric // Nodes may not be legal to schedule either due to structural hazards (e.g.
15*0b57cec5SDimitry Andric // pipeline or resource constraints) or because an input to the instruction has
16*0b57cec5SDimitry Andric // not completed execution.
17*0b57cec5SDimitry Andric //
18*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
19*0b57cec5SDimitry Andric 
20*0b57cec5SDimitry Andric #include "llvm/ADT/Statistic.h"
21*0b57cec5SDimitry Andric #include "llvm/Analysis/AliasAnalysis.h"
22*0b57cec5SDimitry Andric #include "llvm/CodeGen/AntiDepBreaker.h"
23*0b57cec5SDimitry Andric #include "llvm/CodeGen/LatencyPriorityQueue.h"
24*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineDominators.h"
25*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineFunctionPass.h"
26*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineLoopInfo.h"
27*0b57cec5SDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h"
28*0b57cec5SDimitry Andric #include "llvm/CodeGen/Passes.h"
29*0b57cec5SDimitry Andric #include "llvm/CodeGen/RegisterClassInfo.h"
30*0b57cec5SDimitry Andric #include "llvm/CodeGen/ScheduleDAGInstrs.h"
31*0b57cec5SDimitry Andric #include "llvm/CodeGen/ScheduleHazardRecognizer.h"
32*0b57cec5SDimitry Andric #include "llvm/CodeGen/SchedulerRegistry.h"
33*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetInstrInfo.h"
34*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetLowering.h"
35*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetPassConfig.h"
36*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetRegisterInfo.h"
37*0b57cec5SDimitry Andric #include "llvm/CodeGen/TargetSubtargetInfo.h"
38*0b57cec5SDimitry Andric #include "llvm/Config/llvm-config.h"
39*0b57cec5SDimitry Andric #include "llvm/InitializePasses.h"
40*0b57cec5SDimitry Andric #include "llvm/Support/CommandLine.h"
41*0b57cec5SDimitry Andric #include "llvm/Support/Debug.h"
42*0b57cec5SDimitry Andric #include "llvm/Support/ErrorHandling.h"
43*0b57cec5SDimitry Andric #include "llvm/Support/raw_ostream.h"
44*0b57cec5SDimitry Andric using namespace llvm;
45*0b57cec5SDimitry Andric 
46*0b57cec5SDimitry Andric #define DEBUG_TYPE "post-RA-sched"
47*0b57cec5SDimitry Andric 
48*0b57cec5SDimitry Andric STATISTIC(NumNoops, "Number of noops inserted");
49*0b57cec5SDimitry Andric STATISTIC(NumStalls, "Number of pipeline stalls");
50*0b57cec5SDimitry Andric STATISTIC(NumFixedAnti, "Number of fixed anti-dependencies");
51*0b57cec5SDimitry Andric 
52*0b57cec5SDimitry Andric // Post-RA scheduling is enabled with
53*0b57cec5SDimitry Andric // TargetSubtargetInfo.enablePostRAScheduler(). This flag can be used to
54*0b57cec5SDimitry Andric // override the target.
55*0b57cec5SDimitry Andric static cl::opt<bool>
56*0b57cec5SDimitry Andric EnablePostRAScheduler("post-RA-scheduler",
57*0b57cec5SDimitry Andric                        cl::desc("Enable scheduling after register allocation"),
58*0b57cec5SDimitry Andric                        cl::init(false), cl::Hidden);
59*0b57cec5SDimitry Andric static cl::opt<std::string>
60*0b57cec5SDimitry Andric EnableAntiDepBreaking("break-anti-dependencies",
61*0b57cec5SDimitry Andric                       cl::desc("Break post-RA scheduling anti-dependencies: "
62*0b57cec5SDimitry Andric                                "\"critical\", \"all\", or \"none\""),
63*0b57cec5SDimitry Andric                       cl::init("none"), cl::Hidden);
64*0b57cec5SDimitry Andric 
65*0b57cec5SDimitry Andric // If DebugDiv > 0 then only schedule MBB with (ID % DebugDiv) == DebugMod
66*0b57cec5SDimitry Andric static cl::opt<int>
67*0b57cec5SDimitry Andric DebugDiv("postra-sched-debugdiv",
68*0b57cec5SDimitry Andric                       cl::desc("Debug control MBBs that are scheduled"),
69*0b57cec5SDimitry Andric                       cl::init(0), cl::Hidden);
70*0b57cec5SDimitry Andric static cl::opt<int>
71*0b57cec5SDimitry Andric DebugMod("postra-sched-debugmod",
72*0b57cec5SDimitry Andric                       cl::desc("Debug control MBBs that are scheduled"),
73*0b57cec5SDimitry Andric                       cl::init(0), cl::Hidden);
74*0b57cec5SDimitry Andric 
~AntiDepBreaker()75*0b57cec5SDimitry Andric AntiDepBreaker::~AntiDepBreaker() { }
76*0b57cec5SDimitry Andric 
77*0b57cec5SDimitry Andric namespace {
78*0b57cec5SDimitry Andric   class PostRAScheduler : public MachineFunctionPass {
79*0b57cec5SDimitry Andric     const TargetInstrInfo *TII = nullptr;
80*0b57cec5SDimitry Andric     RegisterClassInfo RegClassInfo;
81*0b57cec5SDimitry Andric 
82*0b57cec5SDimitry Andric   public:
83*0b57cec5SDimitry Andric     static char ID;
PostRAScheduler()84*0b57cec5SDimitry Andric     PostRAScheduler() : MachineFunctionPass(ID) {}
85*0b57cec5SDimitry Andric 
getAnalysisUsage(AnalysisUsage & AU) const86*0b57cec5SDimitry Andric     void getAnalysisUsage(AnalysisUsage &AU) const override {
87*0b57cec5SDimitry Andric       AU.setPreservesCFG();
88*0b57cec5SDimitry Andric       AU.addRequired<AAResultsWrapperPass>();
89*0b57cec5SDimitry Andric       AU.addRequired<TargetPassConfig>();
90*0b57cec5SDimitry Andric       AU.addRequired<MachineDominatorTree>();
91*0b57cec5SDimitry Andric       AU.addPreserved<MachineDominatorTree>();
92*0b57cec5SDimitry Andric       AU.addRequired<MachineLoopInfo>();
93*0b57cec5SDimitry Andric       AU.addPreserved<MachineLoopInfo>();
94*0b57cec5SDimitry Andric       MachineFunctionPass::getAnalysisUsage(AU);
95*0b57cec5SDimitry Andric     }
96*0b57cec5SDimitry Andric 
getRequiredProperties() const97*0b57cec5SDimitry Andric     MachineFunctionProperties getRequiredProperties() const override {
98*0b57cec5SDimitry Andric       return MachineFunctionProperties().set(
99*0b57cec5SDimitry Andric           MachineFunctionProperties::Property::NoVRegs);
100*0b57cec5SDimitry Andric     }
101*0b57cec5SDimitry Andric 
102*0b57cec5SDimitry Andric     bool runOnMachineFunction(MachineFunction &Fn) override;
103*0b57cec5SDimitry Andric 
104*0b57cec5SDimitry Andric   private:
105*0b57cec5SDimitry Andric     bool enablePostRAScheduler(
106*0b57cec5SDimitry Andric         const TargetSubtargetInfo &ST, CodeGenOpt::Level OptLevel,
107*0b57cec5SDimitry Andric         TargetSubtargetInfo::AntiDepBreakMode &Mode,
108*0b57cec5SDimitry Andric         TargetSubtargetInfo::RegClassVector &CriticalPathRCs) const;
109*0b57cec5SDimitry Andric   };
110*0b57cec5SDimitry Andric   char PostRAScheduler::ID = 0;
111*0b57cec5SDimitry Andric 
112*0b57cec5SDimitry Andric   class SchedulePostRATDList : public ScheduleDAGInstrs {
113*0b57cec5SDimitry Andric     /// AvailableQueue - The priority queue to use for the available SUnits.
114*0b57cec5SDimitry Andric     ///
115*0b57cec5SDimitry Andric     LatencyPriorityQueue AvailableQueue;
116*0b57cec5SDimitry Andric 
117*0b57cec5SDimitry Andric     /// PendingQueue - This contains all of the instructions whose operands have
118*0b57cec5SDimitry Andric     /// been issued, but their results are not ready yet (due to the latency of
119*0b57cec5SDimitry Andric     /// the operation).  Once the operands becomes available, the instruction is
120*0b57cec5SDimitry Andric     /// added to the AvailableQueue.
121*0b57cec5SDimitry Andric     std::vector<SUnit*> PendingQueue;
122*0b57cec5SDimitry Andric 
123*0b57cec5SDimitry Andric     /// HazardRec - The hazard recognizer to use.
124*0b57cec5SDimitry Andric     ScheduleHazardRecognizer *HazardRec;
125*0b57cec5SDimitry Andric 
126*0b57cec5SDimitry Andric     /// AntiDepBreak - Anti-dependence breaking object, or NULL if none
127*0b57cec5SDimitry Andric     AntiDepBreaker *AntiDepBreak;
128*0b57cec5SDimitry Andric 
129*0b57cec5SDimitry Andric     /// AA - AliasAnalysis for making memory reference queries.
130*0b57cec5SDimitry Andric     AliasAnalysis *AA;
131*0b57cec5SDimitry Andric 
132*0b57cec5SDimitry Andric     /// The schedule. Null SUnit*'s represent noop instructions.
133*0b57cec5SDimitry Andric     std::vector<SUnit*> Sequence;
134*0b57cec5SDimitry Andric 
135*0b57cec5SDimitry Andric     /// Ordered list of DAG postprocessing steps.
136*0b57cec5SDimitry Andric     std::vector<std::unique_ptr<ScheduleDAGMutation>> Mutations;
137*0b57cec5SDimitry Andric 
138*0b57cec5SDimitry Andric     /// The index in BB of RegionEnd.
139*0b57cec5SDimitry Andric     ///
140*0b57cec5SDimitry Andric     /// This is the instruction number from the top of the current block, not
141*0b57cec5SDimitry Andric     /// the SlotIndex. It is only used by the AntiDepBreaker.
142*0b57cec5SDimitry Andric     unsigned EndIndex;
143*0b57cec5SDimitry Andric 
144*0b57cec5SDimitry Andric   public:
145*0b57cec5SDimitry Andric     SchedulePostRATDList(
146*0b57cec5SDimitry Andric         MachineFunction &MF, MachineLoopInfo &MLI, AliasAnalysis *AA,
147*0b57cec5SDimitry Andric         const RegisterClassInfo &,
148*0b57cec5SDimitry Andric         TargetSubtargetInfo::AntiDepBreakMode AntiDepMode,
149*0b57cec5SDimitry Andric         SmallVectorImpl<const TargetRegisterClass *> &CriticalPathRCs);
150*0b57cec5SDimitry Andric 
151*0b57cec5SDimitry Andric     ~SchedulePostRATDList() override;
152*0b57cec5SDimitry Andric 
153*0b57cec5SDimitry Andric     /// startBlock - Initialize register live-range state for scheduling in
154*0b57cec5SDimitry Andric     /// this block.
155*0b57cec5SDimitry Andric     ///
156*0b57cec5SDimitry Andric     void startBlock(MachineBasicBlock *BB) override;
157*0b57cec5SDimitry Andric 
158*0b57cec5SDimitry Andric     // Set the index of RegionEnd within the current BB.
setEndIndex(unsigned EndIdx)159*0b57cec5SDimitry Andric     void setEndIndex(unsigned EndIdx) { EndIndex = EndIdx; }
160*0b57cec5SDimitry Andric 
161*0b57cec5SDimitry Andric     /// Initialize the scheduler state for the next scheduling region.
162*0b57cec5SDimitry Andric     void enterRegion(MachineBasicBlock *bb,
163*0b57cec5SDimitry Andric                      MachineBasicBlock::iterator begin,
164*0b57cec5SDimitry Andric                      MachineBasicBlock::iterator end,
165*0b57cec5SDimitry Andric                      unsigned regioninstrs) override;
166*0b57cec5SDimitry Andric 
167*0b57cec5SDimitry Andric     /// Notify that the scheduler has finished scheduling the current region.
168*0b57cec5SDimitry Andric     void exitRegion() override;
169*0b57cec5SDimitry Andric 
170*0b57cec5SDimitry Andric     /// Schedule - Schedule the instruction range using list scheduling.
171*0b57cec5SDimitry Andric     ///
172*0b57cec5SDimitry Andric     void schedule() override;
173*0b57cec5SDimitry Andric 
174*0b57cec5SDimitry Andric     void EmitSchedule();
175*0b57cec5SDimitry Andric 
176*0b57cec5SDimitry Andric     /// Observe - Update liveness information to account for the current
177*0b57cec5SDimitry Andric     /// instruction, which will not be scheduled.
178*0b57cec5SDimitry Andric     ///
179*0b57cec5SDimitry Andric     void Observe(MachineInstr &MI, unsigned Count);
180*0b57cec5SDimitry Andric 
181*0b57cec5SDimitry Andric     /// finishBlock - Clean up register live-range state.
182*0b57cec5SDimitry Andric     ///
183*0b57cec5SDimitry Andric     void finishBlock() override;
184*0b57cec5SDimitry Andric 
185*0b57cec5SDimitry Andric   private:
186*0b57cec5SDimitry Andric     /// Apply each ScheduleDAGMutation step in order.
187*0b57cec5SDimitry Andric     void postprocessDAG();
188*0b57cec5SDimitry Andric 
189*0b57cec5SDimitry Andric     void ReleaseSucc(SUnit *SU, SDep *SuccEdge);
190*0b57cec5SDimitry Andric     void ReleaseSuccessors(SUnit *SU);
191*0b57cec5SDimitry Andric     void ScheduleNodeTopDown(SUnit *SU, unsigned CurCycle);
192*0b57cec5SDimitry Andric     void ListScheduleTopDown();
193*0b57cec5SDimitry Andric 
194*0b57cec5SDimitry Andric     void dumpSchedule() const;
195*0b57cec5SDimitry Andric     void emitNoop(unsigned CurCycle);
196*0b57cec5SDimitry Andric   };
197*0b57cec5SDimitry Andric }
198*0b57cec5SDimitry Andric 
199*0b57cec5SDimitry Andric char &llvm::PostRASchedulerID = PostRAScheduler::ID;
200*0b57cec5SDimitry Andric 
201*0b57cec5SDimitry Andric INITIALIZE_PASS(PostRAScheduler, DEBUG_TYPE,
202*0b57cec5SDimitry Andric                 "Post RA top-down list latency scheduler", false, false)
203*0b57cec5SDimitry Andric 
SchedulePostRATDList(MachineFunction & MF,MachineLoopInfo & MLI,AliasAnalysis * AA,const RegisterClassInfo & RCI,TargetSubtargetInfo::AntiDepBreakMode AntiDepMode,SmallVectorImpl<const TargetRegisterClass * > & CriticalPathRCs)204*0b57cec5SDimitry Andric SchedulePostRATDList::SchedulePostRATDList(
205*0b57cec5SDimitry Andric     MachineFunction &MF, MachineLoopInfo &MLI, AliasAnalysis *AA,
206*0b57cec5SDimitry Andric     const RegisterClassInfo &RCI,
207*0b57cec5SDimitry Andric     TargetSubtargetInfo::AntiDepBreakMode AntiDepMode,
208*0b57cec5SDimitry Andric     SmallVectorImpl<const TargetRegisterClass *> &CriticalPathRCs)
209*0b57cec5SDimitry Andric     : ScheduleDAGInstrs(MF, &MLI), AA(AA), EndIndex(0) {
210*0b57cec5SDimitry Andric 
211*0b57cec5SDimitry Andric   const InstrItineraryData *InstrItins =
212*0b57cec5SDimitry Andric       MF.getSubtarget().getInstrItineraryData();
213*0b57cec5SDimitry Andric   HazardRec =
214*0b57cec5SDimitry Andric       MF.getSubtarget().getInstrInfo()->CreateTargetPostRAHazardRecognizer(
215*0b57cec5SDimitry Andric           InstrItins, this);
216*0b57cec5SDimitry Andric   MF.getSubtarget().getPostRAMutations(Mutations);
217*0b57cec5SDimitry Andric 
218*0b57cec5SDimitry Andric   assert((AntiDepMode == TargetSubtargetInfo::ANTIDEP_NONE ||
219*0b57cec5SDimitry Andric           MRI.tracksLiveness()) &&
220*0b57cec5SDimitry Andric          "Live-ins must be accurate for anti-dependency breaking");
221*0b57cec5SDimitry Andric   AntiDepBreak = ((AntiDepMode == TargetSubtargetInfo::ANTIDEP_ALL)
222*0b57cec5SDimitry Andric                       ? createAggressiveAntiDepBreaker(MF, RCI, CriticalPathRCs)
223*0b57cec5SDimitry Andric                       : ((AntiDepMode == TargetSubtargetInfo::ANTIDEP_CRITICAL)
224*0b57cec5SDimitry Andric                              ? createCriticalAntiDepBreaker(MF, RCI)
225*0b57cec5SDimitry Andric                              : nullptr));
226*0b57cec5SDimitry Andric }
227*0b57cec5SDimitry Andric 
~SchedulePostRATDList()228*0b57cec5SDimitry Andric SchedulePostRATDList::~SchedulePostRATDList() {
229*0b57cec5SDimitry Andric   delete HazardRec;
230*0b57cec5SDimitry Andric   delete AntiDepBreak;
231*0b57cec5SDimitry Andric }
232*0b57cec5SDimitry Andric 
233*0b57cec5SDimitry Andric /// Initialize state associated with the next scheduling region.
enterRegion(MachineBasicBlock * bb,MachineBasicBlock::iterator begin,MachineBasicBlock::iterator end,unsigned regioninstrs)234*0b57cec5SDimitry Andric void SchedulePostRATDList::enterRegion(MachineBasicBlock *bb,
235*0b57cec5SDimitry Andric                  MachineBasicBlock::iterator begin,
236*0b57cec5SDimitry Andric                  MachineBasicBlock::iterator end,
237*0b57cec5SDimitry Andric                  unsigned regioninstrs) {
238*0b57cec5SDimitry Andric   ScheduleDAGInstrs::enterRegion(bb, begin, end, regioninstrs);
239*0b57cec5SDimitry Andric   Sequence.clear();
240*0b57cec5SDimitry Andric }
241*0b57cec5SDimitry Andric 
242*0b57cec5SDimitry Andric /// Print the schedule before exiting the region.
exitRegion()243*0b57cec5SDimitry Andric void SchedulePostRATDList::exitRegion() {
244*0b57cec5SDimitry Andric   LLVM_DEBUG({
245*0b57cec5SDimitry Andric     dbgs() << "*** Final schedule ***\n";
246*0b57cec5SDimitry Andric     dumpSchedule();
247*0b57cec5SDimitry Andric     dbgs() << '\n';
248*0b57cec5SDimitry Andric   });
249*0b57cec5SDimitry Andric   ScheduleDAGInstrs::exitRegion();
250*0b57cec5SDimitry Andric }
251*0b57cec5SDimitry Andric 
252*0b57cec5SDimitry Andric #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
253*0b57cec5SDimitry Andric /// dumpSchedule - dump the scheduled Sequence.
dumpSchedule() const254*0b57cec5SDimitry Andric LLVM_DUMP_METHOD void SchedulePostRATDList::dumpSchedule() const {
255*0b57cec5SDimitry Andric   for (unsigned i = 0, e = Sequence.size(); i != e; i++) {
256*0b57cec5SDimitry Andric     if (SUnit *SU = Sequence[i])
257*0b57cec5SDimitry Andric       dumpNode(*SU);
258*0b57cec5SDimitry Andric     else
259*0b57cec5SDimitry Andric       dbgs() << "**** NOOP ****\n";
260*0b57cec5SDimitry Andric   }
261*0b57cec5SDimitry Andric }
262*0b57cec5SDimitry Andric #endif
263*0b57cec5SDimitry Andric 
enablePostRAScheduler(const TargetSubtargetInfo & ST,CodeGenOpt::Level OptLevel,TargetSubtargetInfo::AntiDepBreakMode & Mode,TargetSubtargetInfo::RegClassVector & CriticalPathRCs) const264*0b57cec5SDimitry Andric bool PostRAScheduler::enablePostRAScheduler(
265*0b57cec5SDimitry Andric     const TargetSubtargetInfo &ST,
266*0b57cec5SDimitry Andric     CodeGenOpt::Level OptLevel,
267*0b57cec5SDimitry Andric     TargetSubtargetInfo::AntiDepBreakMode &Mode,
268*0b57cec5SDimitry Andric     TargetSubtargetInfo::RegClassVector &CriticalPathRCs) const {
269*0b57cec5SDimitry Andric   Mode = ST.getAntiDepBreakMode();
270*0b57cec5SDimitry Andric   ST.getCriticalPathRCs(CriticalPathRCs);
271*0b57cec5SDimitry Andric 
272*0b57cec5SDimitry Andric   // Check for explicit enable/disable of post-ra scheduling.
273*0b57cec5SDimitry Andric   if (EnablePostRAScheduler.getPosition() > 0)
274*0b57cec5SDimitry Andric     return EnablePostRAScheduler;
275*0b57cec5SDimitry Andric 
276*0b57cec5SDimitry Andric   return ST.enablePostRAScheduler() &&
277*0b57cec5SDimitry Andric          OptLevel >= ST.getOptLevelToEnablePostRAScheduler();
278*0b57cec5SDimitry Andric }
279*0b57cec5SDimitry Andric 
runOnMachineFunction(MachineFunction & Fn)280*0b57cec5SDimitry Andric bool PostRAScheduler::runOnMachineFunction(MachineFunction &Fn) {
281*0b57cec5SDimitry Andric   if (skipFunction(Fn.getFunction()))
282*0b57cec5SDimitry Andric     return false;
283*0b57cec5SDimitry Andric 
284*0b57cec5SDimitry Andric   TII = Fn.getSubtarget().getInstrInfo();
285*0b57cec5SDimitry Andric   MachineLoopInfo &MLI = getAnalysis<MachineLoopInfo>();
286*0b57cec5SDimitry Andric   AliasAnalysis *AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
287*0b57cec5SDimitry Andric   TargetPassConfig *PassConfig = &getAnalysis<TargetPassConfig>();
288*0b57cec5SDimitry Andric 
289*0b57cec5SDimitry Andric   RegClassInfo.runOnMachineFunction(Fn);
290*0b57cec5SDimitry Andric 
291*0b57cec5SDimitry Andric   TargetSubtargetInfo::AntiDepBreakMode AntiDepMode =
292*0b57cec5SDimitry Andric     TargetSubtargetInfo::ANTIDEP_NONE;
293*0b57cec5SDimitry Andric   SmallVector<const TargetRegisterClass*, 4> CriticalPathRCs;
294*0b57cec5SDimitry Andric 
295*0b57cec5SDimitry Andric   // Check that post-RA scheduling is enabled for this target.
296*0b57cec5SDimitry Andric   // This may upgrade the AntiDepMode.
297*0b57cec5SDimitry Andric   if (!enablePostRAScheduler(Fn.getSubtarget(), PassConfig->getOptLevel(),
298*0b57cec5SDimitry Andric                              AntiDepMode, CriticalPathRCs))
299*0b57cec5SDimitry Andric     return false;
300*0b57cec5SDimitry Andric 
301*0b57cec5SDimitry Andric   // Check for antidep breaking override...
302*0b57cec5SDimitry Andric   if (EnableAntiDepBreaking.getPosition() > 0) {
303*0b57cec5SDimitry Andric     AntiDepMode = (EnableAntiDepBreaking == "all")
304*0b57cec5SDimitry Andric       ? TargetSubtargetInfo::ANTIDEP_ALL
305*0b57cec5SDimitry Andric       : ((EnableAntiDepBreaking == "critical")
306*0b57cec5SDimitry Andric          ? TargetSubtargetInfo::ANTIDEP_CRITICAL
307*0b57cec5SDimitry Andric          : TargetSubtargetInfo::ANTIDEP_NONE);
308*0b57cec5SDimitry Andric   }
309*0b57cec5SDimitry Andric 
310*0b57cec5SDimitry Andric   LLVM_DEBUG(dbgs() << "PostRAScheduler\n");
311*0b57cec5SDimitry Andric 
312*0b57cec5SDimitry Andric   SchedulePostRATDList Scheduler(Fn, MLI, AA, RegClassInfo, AntiDepMode,
313*0b57cec5SDimitry Andric                                  CriticalPathRCs);
314*0b57cec5SDimitry Andric 
315*0b57cec5SDimitry Andric   // Loop over all of the basic blocks
316*0b57cec5SDimitry Andric   for (auto &MBB : Fn) {
317*0b57cec5SDimitry Andric #ifndef NDEBUG
318*0b57cec5SDimitry Andric     // If DebugDiv > 0 then only schedule MBB with (ID % DebugDiv) == DebugMod
319*0b57cec5SDimitry Andric     if (DebugDiv > 0) {
320*0b57cec5SDimitry Andric       static int bbcnt = 0;
321*0b57cec5SDimitry Andric       if (bbcnt++ % DebugDiv != DebugMod)
322*0b57cec5SDimitry Andric         continue;
323*0b57cec5SDimitry Andric       dbgs() << "*** DEBUG scheduling " << Fn.getName() << ":"
324*0b57cec5SDimitry Andric              << printMBBReference(MBB) << " ***\n";
325*0b57cec5SDimitry Andric     }
326*0b57cec5SDimitry Andric #endif
327*0b57cec5SDimitry Andric 
328*0b57cec5SDimitry Andric     // Initialize register live-range state for scheduling in this block.
329*0b57cec5SDimitry Andric     Scheduler.startBlock(&MBB);
330*0b57cec5SDimitry Andric 
331*0b57cec5SDimitry Andric     // Schedule each sequence of instructions not interrupted by a label
332*0b57cec5SDimitry Andric     // or anything else that effectively needs to shut down scheduling.
333*0b57cec5SDimitry Andric     MachineBasicBlock::iterator Current = MBB.end();
334*0b57cec5SDimitry Andric     unsigned Count = MBB.size(), CurrentCount = Count;
335*0b57cec5SDimitry Andric     for (MachineBasicBlock::iterator I = Current; I != MBB.begin();) {
336*0b57cec5SDimitry Andric       MachineInstr &MI = *std::prev(I);
337*0b57cec5SDimitry Andric       --Count;
338*0b57cec5SDimitry Andric       // Calls are not scheduling boundaries before register allocation, but
339*0b57cec5SDimitry Andric       // post-ra we don't gain anything by scheduling across calls since we
340*0b57cec5SDimitry Andric       // don't need to worry about register pressure.
341*0b57cec5SDimitry Andric       if (MI.isCall() || TII->isSchedulingBoundary(MI, &MBB, Fn)) {
342*0b57cec5SDimitry Andric         Scheduler.enterRegion(&MBB, I, Current, CurrentCount - Count);
343*0b57cec5SDimitry Andric         Scheduler.setEndIndex(CurrentCount);
344*0b57cec5SDimitry Andric         Scheduler.schedule();
345*0b57cec5SDimitry Andric         Scheduler.exitRegion();
346*0b57cec5SDimitry Andric         Scheduler.EmitSchedule();
347*0b57cec5SDimitry Andric         Current = &MI;
348*0b57cec5SDimitry Andric         CurrentCount = Count;
349*0b57cec5SDimitry Andric         Scheduler.Observe(MI, CurrentCount);
350*0b57cec5SDimitry Andric       }
351*0b57cec5SDimitry Andric       I = MI;
352*0b57cec5SDimitry Andric       if (MI.isBundle())
353*0b57cec5SDimitry Andric         Count -= MI.getBundleSize();
354*0b57cec5SDimitry Andric     }
355*0b57cec5SDimitry Andric     assert(Count == 0 && "Instruction count mismatch!");
356*0b57cec5SDimitry Andric     assert((MBB.begin() == Current || CurrentCount != 0) &&
357*0b57cec5SDimitry Andric            "Instruction count mismatch!");
358*0b57cec5SDimitry Andric     Scheduler.enterRegion(&MBB, MBB.begin(), Current, CurrentCount);
359*0b57cec5SDimitry Andric     Scheduler.setEndIndex(CurrentCount);
360*0b57cec5SDimitry Andric     Scheduler.schedule();
361*0b57cec5SDimitry Andric     Scheduler.exitRegion();
362*0b57cec5SDimitry Andric     Scheduler.EmitSchedule();
363*0b57cec5SDimitry Andric 
364*0b57cec5SDimitry Andric     // Clean up register live-range state.
365*0b57cec5SDimitry Andric     Scheduler.finishBlock();
366*0b57cec5SDimitry Andric 
367*0b57cec5SDimitry Andric     // Update register kills
368*0b57cec5SDimitry Andric     Scheduler.fixupKills(MBB);
369*0b57cec5SDimitry Andric   }
370*0b57cec5SDimitry Andric 
371*0b57cec5SDimitry Andric   return true;
372*0b57cec5SDimitry Andric }
373*0b57cec5SDimitry Andric 
374*0b57cec5SDimitry Andric /// StartBlock - Initialize register live-range state for scheduling in
375*0b57cec5SDimitry Andric /// this block.
376*0b57cec5SDimitry Andric ///
startBlock(MachineBasicBlock * BB)377*0b57cec5SDimitry Andric void SchedulePostRATDList::startBlock(MachineBasicBlock *BB) {
378*0b57cec5SDimitry Andric   // Call the superclass.
379*0b57cec5SDimitry Andric   ScheduleDAGInstrs::startBlock(BB);
380*0b57cec5SDimitry Andric 
381*0b57cec5SDimitry Andric   // Reset the hazard recognizer and anti-dep breaker.
382*0b57cec5SDimitry Andric   HazardRec->Reset();
383*0b57cec5SDimitry Andric   if (AntiDepBreak)
384*0b57cec5SDimitry Andric     AntiDepBreak->StartBlock(BB);
385*0b57cec5SDimitry Andric }
386*0b57cec5SDimitry Andric 
387*0b57cec5SDimitry Andric /// Schedule - Schedule the instruction range using list scheduling.
388*0b57cec5SDimitry Andric ///
schedule()389*0b57cec5SDimitry Andric void SchedulePostRATDList::schedule() {
390*0b57cec5SDimitry Andric   // Build the scheduling graph.
391*0b57cec5SDimitry Andric   buildSchedGraph(AA);
392*0b57cec5SDimitry Andric 
393*0b57cec5SDimitry Andric   if (AntiDepBreak) {
394*0b57cec5SDimitry Andric     unsigned Broken =
395*0b57cec5SDimitry Andric       AntiDepBreak->BreakAntiDependencies(SUnits, RegionBegin, RegionEnd,
396*0b57cec5SDimitry Andric                                           EndIndex, DbgValues);
397*0b57cec5SDimitry Andric 
398*0b57cec5SDimitry Andric     if (Broken != 0) {
399*0b57cec5SDimitry Andric       // We made changes. Update the dependency graph.
400*0b57cec5SDimitry Andric       // Theoretically we could update the graph in place:
401*0b57cec5SDimitry Andric       // When a live range is changed to use a different register, remove
402*0b57cec5SDimitry Andric       // the def's anti-dependence *and* output-dependence edges due to
403*0b57cec5SDimitry Andric       // that register, and add new anti-dependence and output-dependence
404*0b57cec5SDimitry Andric       // edges based on the next live range of the register.
405*0b57cec5SDimitry Andric       ScheduleDAG::clearDAG();
406*0b57cec5SDimitry Andric       buildSchedGraph(AA);
407*0b57cec5SDimitry Andric 
408*0b57cec5SDimitry Andric       NumFixedAnti += Broken;
409*0b57cec5SDimitry Andric     }
410*0b57cec5SDimitry Andric   }
411*0b57cec5SDimitry Andric 
412*0b57cec5SDimitry Andric   postprocessDAG();
413*0b57cec5SDimitry Andric 
414*0b57cec5SDimitry Andric   LLVM_DEBUG(dbgs() << "********** List Scheduling **********\n");
415*0b57cec5SDimitry Andric   LLVM_DEBUG(dump());
416*0b57cec5SDimitry Andric 
417*0b57cec5SDimitry Andric   AvailableQueue.initNodes(SUnits);
418*0b57cec5SDimitry Andric   ListScheduleTopDown();
419*0b57cec5SDimitry Andric   AvailableQueue.releaseState();
420*0b57cec5SDimitry Andric }
421*0b57cec5SDimitry Andric 
422*0b57cec5SDimitry Andric /// Observe - Update liveness information to account for the current
423*0b57cec5SDimitry Andric /// instruction, which will not be scheduled.
424*0b57cec5SDimitry Andric ///
Observe(MachineInstr & MI,unsigned Count)425*0b57cec5SDimitry Andric void SchedulePostRATDList::Observe(MachineInstr &MI, unsigned Count) {
426*0b57cec5SDimitry Andric   if (AntiDepBreak)
427*0b57cec5SDimitry Andric     AntiDepBreak->Observe(MI, Count, EndIndex);
428*0b57cec5SDimitry Andric }
429*0b57cec5SDimitry Andric 
430*0b57cec5SDimitry Andric /// FinishBlock - Clean up register live-range state.
431*0b57cec5SDimitry Andric ///
finishBlock()432*0b57cec5SDimitry Andric void SchedulePostRATDList::finishBlock() {
433*0b57cec5SDimitry Andric   if (AntiDepBreak)
434*0b57cec5SDimitry Andric     AntiDepBreak->FinishBlock();
435*0b57cec5SDimitry Andric 
436*0b57cec5SDimitry Andric   // Call the superclass.
437*0b57cec5SDimitry Andric   ScheduleDAGInstrs::finishBlock();
438*0b57cec5SDimitry Andric }
439*0b57cec5SDimitry Andric 
440*0b57cec5SDimitry Andric /// Apply each ScheduleDAGMutation step in order.
postprocessDAG()441*0b57cec5SDimitry Andric void SchedulePostRATDList::postprocessDAG() {
442*0b57cec5SDimitry Andric   for (auto &M : Mutations)
443*0b57cec5SDimitry Andric     M->apply(this);
444*0b57cec5SDimitry Andric }
445*0b57cec5SDimitry Andric 
446*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
447*0b57cec5SDimitry Andric //  Top-Down Scheduling
448*0b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
449*0b57cec5SDimitry Andric 
450*0b57cec5SDimitry Andric /// ReleaseSucc - Decrement the NumPredsLeft count of a successor. Add it to
451*0b57cec5SDimitry Andric /// the PendingQueue if the count reaches zero.
ReleaseSucc(SUnit * SU,SDep * SuccEdge)452*0b57cec5SDimitry Andric void SchedulePostRATDList::ReleaseSucc(SUnit *SU, SDep *SuccEdge) {
453*0b57cec5SDimitry Andric   SUnit *SuccSU = SuccEdge->getSUnit();
454*0b57cec5SDimitry Andric 
455*0b57cec5SDimitry Andric   if (SuccEdge->isWeak()) {
456*0b57cec5SDimitry Andric     --SuccSU->WeakPredsLeft;
457*0b57cec5SDimitry Andric     return;
458*0b57cec5SDimitry Andric   }
459*0b57cec5SDimitry Andric #ifndef NDEBUG
460*0b57cec5SDimitry Andric   if (SuccSU->NumPredsLeft == 0) {
461*0b57cec5SDimitry Andric     dbgs() << "*** Scheduling failed! ***\n";
462*0b57cec5SDimitry Andric     dumpNode(*SuccSU);
463*0b57cec5SDimitry Andric     dbgs() << " has been released too many times!\n";
464*0b57cec5SDimitry Andric     llvm_unreachable(nullptr);
465*0b57cec5SDimitry Andric   }
466*0b57cec5SDimitry Andric #endif
467*0b57cec5SDimitry Andric   --SuccSU->NumPredsLeft;
468*0b57cec5SDimitry Andric 
469*0b57cec5SDimitry Andric   // Standard scheduler algorithms will recompute the depth of the successor
470*0b57cec5SDimitry Andric   // here as such:
471*0b57cec5SDimitry Andric   //   SuccSU->setDepthToAtLeast(SU->getDepth() + SuccEdge->getLatency());
472*0b57cec5SDimitry Andric   //
473*0b57cec5SDimitry Andric   // However, we lazily compute node depth instead. Note that
474*0b57cec5SDimitry Andric   // ScheduleNodeTopDown has already updated the depth of this node which causes
475*0b57cec5SDimitry Andric   // all descendents to be marked dirty. Setting the successor depth explicitly
476*0b57cec5SDimitry Andric   // here would cause depth to be recomputed for all its ancestors. If the
477*0b57cec5SDimitry Andric   // successor is not yet ready (because of a transitively redundant edge) then
478*0b57cec5SDimitry Andric   // this causes depth computation to be quadratic in the size of the DAG.
479*0b57cec5SDimitry Andric 
480*0b57cec5SDimitry Andric   // If all the node's predecessors are scheduled, this node is ready
481*0b57cec5SDimitry Andric   // to be scheduled. Ignore the special ExitSU node.
482*0b57cec5SDimitry Andric   if (SuccSU->NumPredsLeft == 0 && SuccSU != &ExitSU)
483*0b57cec5SDimitry Andric     PendingQueue.push_back(SuccSU);
484*0b57cec5SDimitry Andric }
485*0b57cec5SDimitry Andric 
486*0b57cec5SDimitry Andric /// ReleaseSuccessors - Call ReleaseSucc on each of SU's successors.
ReleaseSuccessors(SUnit * SU)487*0b57cec5SDimitry Andric void SchedulePostRATDList::ReleaseSuccessors(SUnit *SU) {
488*0b57cec5SDimitry Andric   for (SUnit::succ_iterator I = SU->Succs.begin(), E = SU->Succs.end();
489*0b57cec5SDimitry Andric        I != E; ++I) {
490*0b57cec5SDimitry Andric     ReleaseSucc(SU, &*I);
491*0b57cec5SDimitry Andric   }
492*0b57cec5SDimitry Andric }
493*0b57cec5SDimitry Andric 
494*0b57cec5SDimitry Andric /// ScheduleNodeTopDown - Add the node to the schedule. Decrement the pending
495*0b57cec5SDimitry Andric /// count of its successors. If a successor pending count is zero, add it to
496*0b57cec5SDimitry Andric /// the Available queue.
ScheduleNodeTopDown(SUnit * SU,unsigned CurCycle)497*0b57cec5SDimitry Andric void SchedulePostRATDList::ScheduleNodeTopDown(SUnit *SU, unsigned CurCycle) {
498*0b57cec5SDimitry Andric   LLVM_DEBUG(dbgs() << "*** Scheduling [" << CurCycle << "]: ");
499*0b57cec5SDimitry Andric   LLVM_DEBUG(dumpNode(*SU));
500*0b57cec5SDimitry Andric 
501*0b57cec5SDimitry Andric   Sequence.push_back(SU);
502*0b57cec5SDimitry Andric   assert(CurCycle >= SU->getDepth() &&
503*0b57cec5SDimitry Andric          "Node scheduled above its depth!");
504*0b57cec5SDimitry Andric   SU->setDepthToAtLeast(CurCycle);
505*0b57cec5SDimitry Andric 
506*0b57cec5SDimitry Andric   ReleaseSuccessors(SU);
507*0b57cec5SDimitry Andric   SU->isScheduled = true;
508*0b57cec5SDimitry Andric   AvailableQueue.scheduledNode(SU);
509*0b57cec5SDimitry Andric }
510*0b57cec5SDimitry Andric 
511*0b57cec5SDimitry Andric /// emitNoop - Add a noop to the current instruction sequence.
emitNoop(unsigned CurCycle)512*0b57cec5SDimitry Andric void SchedulePostRATDList::emitNoop(unsigned CurCycle) {
513*0b57cec5SDimitry Andric   LLVM_DEBUG(dbgs() << "*** Emitting noop in cycle " << CurCycle << '\n');
514*0b57cec5SDimitry Andric   HazardRec->EmitNoop();
515*0b57cec5SDimitry Andric   Sequence.push_back(nullptr);   // NULL here means noop
516*0b57cec5SDimitry Andric   ++NumNoops;
517*0b57cec5SDimitry Andric }
518*0b57cec5SDimitry Andric 
519*0b57cec5SDimitry Andric /// ListScheduleTopDown - The main loop of list scheduling for top-down
520*0b57cec5SDimitry Andric /// schedulers.
ListScheduleTopDown()521*0b57cec5SDimitry Andric void SchedulePostRATDList::ListScheduleTopDown() {
522*0b57cec5SDimitry Andric   unsigned CurCycle = 0;
523*0b57cec5SDimitry Andric 
524*0b57cec5SDimitry Andric   // We're scheduling top-down but we're visiting the regions in
525*0b57cec5SDimitry Andric   // bottom-up order, so we don't know the hazards at the start of a
526*0b57cec5SDimitry Andric   // region. So assume no hazards (this should usually be ok as most
527*0b57cec5SDimitry Andric   // blocks are a single region).
528*0b57cec5SDimitry Andric   HazardRec->Reset();
529*0b57cec5SDimitry Andric 
530*0b57cec5SDimitry Andric   // Release any successors of the special Entry node.
531*0b57cec5SDimitry Andric   ReleaseSuccessors(&EntrySU);
532*0b57cec5SDimitry Andric 
533*0b57cec5SDimitry Andric   // Add all leaves to Available queue.
534*0b57cec5SDimitry Andric   for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
535*0b57cec5SDimitry Andric     // It is available if it has no predecessors.
536*0b57cec5SDimitry Andric     if (!SUnits[i].NumPredsLeft && !SUnits[i].isAvailable) {
537*0b57cec5SDimitry Andric       AvailableQueue.push(&SUnits[i]);
538*0b57cec5SDimitry Andric       SUnits[i].isAvailable = true;
539*0b57cec5SDimitry Andric     }
540*0b57cec5SDimitry Andric   }
541*0b57cec5SDimitry Andric 
542*0b57cec5SDimitry Andric   // In any cycle where we can't schedule any instructions, we must
543*0b57cec5SDimitry Andric   // stall or emit a noop, depending on the target.
544*0b57cec5SDimitry Andric   bool CycleHasInsts = false;
545*0b57cec5SDimitry Andric 
546*0b57cec5SDimitry Andric   // While Available queue is not empty, grab the node with the highest
547*0b57cec5SDimitry Andric   // priority. If it is not ready put it back.  Schedule the node.
548*0b57cec5SDimitry Andric   std::vector<SUnit*> NotReady;
549*0b57cec5SDimitry Andric   Sequence.reserve(SUnits.size());
550*0b57cec5SDimitry Andric   while (!AvailableQueue.empty() || !PendingQueue.empty()) {
551*0b57cec5SDimitry Andric     // Check to see if any of the pending instructions are ready to issue.  If
552*0b57cec5SDimitry Andric     // so, add them to the available queue.
553*0b57cec5SDimitry Andric     unsigned MinDepth = ~0u;
554*0b57cec5SDimitry Andric     for (unsigned i = 0, e = PendingQueue.size(); i != e; ++i) {
555*0b57cec5SDimitry Andric       if (PendingQueue[i]->getDepth() <= CurCycle) {
556*0b57cec5SDimitry Andric         AvailableQueue.push(PendingQueue[i]);
557*0b57cec5SDimitry Andric         PendingQueue[i]->isAvailable = true;
558*0b57cec5SDimitry Andric         PendingQueue[i] = PendingQueue.back();
559*0b57cec5SDimitry Andric         PendingQueue.pop_back();
560*0b57cec5SDimitry Andric         --i; --e;
561*0b57cec5SDimitry Andric       } else if (PendingQueue[i]->getDepth() < MinDepth)
562*0b57cec5SDimitry Andric         MinDepth = PendingQueue[i]->getDepth();
563*0b57cec5SDimitry Andric     }
564*0b57cec5SDimitry Andric 
565*0b57cec5SDimitry Andric     LLVM_DEBUG(dbgs() << "\n*** Examining Available\n";
566*0b57cec5SDimitry Andric                AvailableQueue.dump(this));
567*0b57cec5SDimitry Andric 
568*0b57cec5SDimitry Andric     SUnit *FoundSUnit = nullptr, *NotPreferredSUnit = nullptr;
569*0b57cec5SDimitry Andric     bool HasNoopHazards = false;
570*0b57cec5SDimitry Andric     while (!AvailableQueue.empty()) {
571*0b57cec5SDimitry Andric       SUnit *CurSUnit = AvailableQueue.pop();
572*0b57cec5SDimitry Andric 
573*0b57cec5SDimitry Andric       ScheduleHazardRecognizer::HazardType HT =
574*0b57cec5SDimitry Andric         HazardRec->getHazardType(CurSUnit, 0/*no stalls*/);
575*0b57cec5SDimitry Andric       if (HT == ScheduleHazardRecognizer::NoHazard) {
576*0b57cec5SDimitry Andric         if (HazardRec->ShouldPreferAnother(CurSUnit)) {
577*0b57cec5SDimitry Andric           if (!NotPreferredSUnit) {
578*0b57cec5SDimitry Andric             // If this is the first non-preferred node for this cycle, then
579*0b57cec5SDimitry Andric             // record it and continue searching for a preferred node. If this
580*0b57cec5SDimitry Andric             // is not the first non-preferred node, then treat it as though
581*0b57cec5SDimitry Andric             // there had been a hazard.
582*0b57cec5SDimitry Andric             NotPreferredSUnit = CurSUnit;
583*0b57cec5SDimitry Andric             continue;
584*0b57cec5SDimitry Andric           }
585*0b57cec5SDimitry Andric         } else {
586*0b57cec5SDimitry Andric           FoundSUnit = CurSUnit;
587*0b57cec5SDimitry Andric           break;
588*0b57cec5SDimitry Andric         }
589*0b57cec5SDimitry Andric       }
590*0b57cec5SDimitry Andric 
591*0b57cec5SDimitry Andric       // Remember if this is a noop hazard.
592*0b57cec5SDimitry Andric       HasNoopHazards |= HT == ScheduleHazardRecognizer::NoopHazard;
593*0b57cec5SDimitry Andric 
594*0b57cec5SDimitry Andric       NotReady.push_back(CurSUnit);
595*0b57cec5SDimitry Andric     }
596*0b57cec5SDimitry Andric 
597*0b57cec5SDimitry Andric     // If we have a non-preferred node, push it back onto the available list.
598*0b57cec5SDimitry Andric     // If we did not find a preferred node, then schedule this first
599*0b57cec5SDimitry Andric     // non-preferred node.
600*0b57cec5SDimitry Andric     if (NotPreferredSUnit) {
601*0b57cec5SDimitry Andric       if (!FoundSUnit) {
602*0b57cec5SDimitry Andric         LLVM_DEBUG(
603*0b57cec5SDimitry Andric             dbgs() << "*** Will schedule a non-preferred instruction...\n");
604*0b57cec5SDimitry Andric         FoundSUnit = NotPreferredSUnit;
605*0b57cec5SDimitry Andric       } else {
606*0b57cec5SDimitry Andric         AvailableQueue.push(NotPreferredSUnit);
607*0b57cec5SDimitry Andric       }
608*0b57cec5SDimitry Andric 
609*0b57cec5SDimitry Andric       NotPreferredSUnit = nullptr;
610*0b57cec5SDimitry Andric     }
611*0b57cec5SDimitry Andric 
612*0b57cec5SDimitry Andric     // Add the nodes that aren't ready back onto the available list.
613*0b57cec5SDimitry Andric     if (!NotReady.empty()) {
614*0b57cec5SDimitry Andric       AvailableQueue.push_all(NotReady);
615*0b57cec5SDimitry Andric       NotReady.clear();
616*0b57cec5SDimitry Andric     }
617*0b57cec5SDimitry Andric 
618*0b57cec5SDimitry Andric     // If we found a node to schedule...
619*0b57cec5SDimitry Andric     if (FoundSUnit) {
620*0b57cec5SDimitry Andric       // If we need to emit noops prior to this instruction, then do so.
621*0b57cec5SDimitry Andric       unsigned NumPreNoops = HazardRec->PreEmitNoops(FoundSUnit);
622*0b57cec5SDimitry Andric       for (unsigned i = 0; i != NumPreNoops; ++i)
623*0b57cec5SDimitry Andric         emitNoop(CurCycle);
624*0b57cec5SDimitry Andric 
625*0b57cec5SDimitry Andric       // ... schedule the node...
626*0b57cec5SDimitry Andric       ScheduleNodeTopDown(FoundSUnit, CurCycle);
627*0b57cec5SDimitry Andric       HazardRec->EmitInstruction(FoundSUnit);
628*0b57cec5SDimitry Andric       CycleHasInsts = true;
629*0b57cec5SDimitry Andric       if (HazardRec->atIssueLimit()) {
630*0b57cec5SDimitry Andric         LLVM_DEBUG(dbgs() << "*** Max instructions per cycle " << CurCycle
631*0b57cec5SDimitry Andric                           << '\n');
632*0b57cec5SDimitry Andric         HazardRec->AdvanceCycle();
633*0b57cec5SDimitry Andric         ++CurCycle;
634*0b57cec5SDimitry Andric         CycleHasInsts = false;
635*0b57cec5SDimitry Andric       }
636*0b57cec5SDimitry Andric     } else {
637*0b57cec5SDimitry Andric       if (CycleHasInsts) {
638*0b57cec5SDimitry Andric         LLVM_DEBUG(dbgs() << "*** Finished cycle " << CurCycle << '\n');
639*0b57cec5SDimitry Andric         HazardRec->AdvanceCycle();
640*0b57cec5SDimitry Andric       } else if (!HasNoopHazards) {
641*0b57cec5SDimitry Andric         // Otherwise, we have a pipeline stall, but no other problem,
642*0b57cec5SDimitry Andric         // just advance the current cycle and try again.
643*0b57cec5SDimitry Andric         LLVM_DEBUG(dbgs() << "*** Stall in cycle " << CurCycle << '\n');
644*0b57cec5SDimitry Andric         HazardRec->AdvanceCycle();
645*0b57cec5SDimitry Andric         ++NumStalls;
646*0b57cec5SDimitry Andric       } else {
647*0b57cec5SDimitry Andric         // Otherwise, we have no instructions to issue and we have instructions
648*0b57cec5SDimitry Andric         // that will fault if we don't do this right.  This is the case for
649*0b57cec5SDimitry Andric         // processors without pipeline interlocks and other cases.
650*0b57cec5SDimitry Andric         emitNoop(CurCycle);
651*0b57cec5SDimitry Andric       }
652*0b57cec5SDimitry Andric 
653*0b57cec5SDimitry Andric       ++CurCycle;
654*0b57cec5SDimitry Andric       CycleHasInsts = false;
655*0b57cec5SDimitry Andric     }
656*0b57cec5SDimitry Andric   }
657*0b57cec5SDimitry Andric 
658*0b57cec5SDimitry Andric #ifndef NDEBUG
659*0b57cec5SDimitry Andric   unsigned ScheduledNodes = VerifyScheduledDAG(/*isBottomUp=*/false);
660*0b57cec5SDimitry Andric   unsigned Noops = 0;
661*0b57cec5SDimitry Andric   for (unsigned i = 0, e = Sequence.size(); i != e; ++i)
662*0b57cec5SDimitry Andric     if (!Sequence[i])
663*0b57cec5SDimitry Andric       ++Noops;
664*0b57cec5SDimitry Andric   assert(Sequence.size() - Noops == ScheduledNodes &&
665*0b57cec5SDimitry Andric          "The number of nodes scheduled doesn't match the expected number!");
666*0b57cec5SDimitry Andric #endif // NDEBUG
667*0b57cec5SDimitry Andric }
668*0b57cec5SDimitry Andric 
669*0b57cec5SDimitry Andric // EmitSchedule - Emit the machine code in scheduled order.
EmitSchedule()670*0b57cec5SDimitry Andric void SchedulePostRATDList::EmitSchedule() {
671*0b57cec5SDimitry Andric   RegionBegin = RegionEnd;
672*0b57cec5SDimitry Andric 
673*0b57cec5SDimitry Andric   // If first instruction was a DBG_VALUE then put it back.
674*0b57cec5SDimitry Andric   if (FirstDbgValue)
675*0b57cec5SDimitry Andric     BB->splice(RegionEnd, BB, FirstDbgValue);
676*0b57cec5SDimitry Andric 
677*0b57cec5SDimitry Andric   // Then re-insert them according to the given schedule.
678*0b57cec5SDimitry Andric   for (unsigned i = 0, e = Sequence.size(); i != e; i++) {
679*0b57cec5SDimitry Andric     if (SUnit *SU = Sequence[i])
680*0b57cec5SDimitry Andric       BB->splice(RegionEnd, BB, SU->getInstr());
681*0b57cec5SDimitry Andric     else
682*0b57cec5SDimitry Andric       // Null SUnit* is a noop.
683*0b57cec5SDimitry Andric       TII->insertNoop(*BB, RegionEnd);
684*0b57cec5SDimitry Andric 
685*0b57cec5SDimitry Andric     // Update the Begin iterator, as the first instruction in the block
686*0b57cec5SDimitry Andric     // may have been scheduled later.
687*0b57cec5SDimitry Andric     if (i == 0)
688*0b57cec5SDimitry Andric       RegionBegin = std::prev(RegionEnd);
689*0b57cec5SDimitry Andric   }
690*0b57cec5SDimitry Andric 
691*0b57cec5SDimitry Andric   // Reinsert any remaining debug_values.
692*0b57cec5SDimitry Andric   for (std::vector<std::pair<MachineInstr *, MachineInstr *> >::iterator
693*0b57cec5SDimitry Andric          DI = DbgValues.end(), DE = DbgValues.begin(); DI != DE; --DI) {
694*0b57cec5SDimitry Andric     std::pair<MachineInstr *, MachineInstr *> P = *std::prev(DI);
695*0b57cec5SDimitry Andric     MachineInstr *DbgValue = P.first;
696*0b57cec5SDimitry Andric     MachineBasicBlock::iterator OrigPrivMI = P.second;
697*0b57cec5SDimitry Andric     BB->splice(++OrigPrivMI, BB, DbgValue);
698*0b57cec5SDimitry Andric   }
699*0b57cec5SDimitry Andric   DbgValues.clear();
700*0b57cec5SDimitry Andric   FirstDbgValue = nullptr;
701*0b57cec5SDimitry Andric }
702*0b57cec5SDimitry Andric