1 //===------ VirtualInstruction.cpp ------------------------------*- C++ -*-===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // Tools for determining which instructions are within a statement and the
11 // nature of their operands.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "polly/Support/VirtualInstruction.h"
16 #include "polly/Support/SCEVValidator.h"
17 
18 using namespace polly;
19 using namespace llvm;
20 
21 VirtualUse VirtualUse ::create(Scop *S, const Use &U, LoopInfo *LI,
22                                bool Virtual) {
23   auto *UserBB = getUseBlock(U);
24   Instruction *UI = dyn_cast<Instruction>(U.getUser());
25   ScopStmt *UserStmt = nullptr;
26   if (PHINode *PHI = dyn_cast<PHINode>(UI))
27     UserStmt = S->getLastStmtFor(PHI->getIncomingBlock(U));
28   else
29     UserStmt = S->getStmtFor(UI);
30   auto *UserScope = LI->getLoopFor(UserBB);
31   return create(S, UserStmt, UserScope, U.get(), Virtual);
32 }
33 
34 VirtualUse VirtualUse::create(Scop *S, ScopStmt *UserStmt, Loop *UserScope,
35                               Value *Val, bool Virtual) {
36   assert(!isa<StoreInst>(Val) && "a StoreInst cannot be used");
37 
38   if (isa<BasicBlock>(Val))
39     return VirtualUse(UserStmt, Val, Block, nullptr, nullptr);
40 
41   if (isa<llvm::Constant>(Val) || isa<MetadataAsValue>(Val))
42     return VirtualUse(UserStmt, Val, Constant, nullptr, nullptr);
43 
44   // Is the value synthesizable? If the user has been pruned
45   // (UserStmt == nullptr), it is either not used anywhere or is synthesizable.
46   // We assume synthesizable which practically should have the same effect.
47   auto *SE = S->getSE();
48   if (SE->isSCEVable(Val->getType())) {
49     auto *ScevExpr = SE->getSCEVAtScope(Val, UserScope);
50     if (!UserStmt || canSynthesize(Val, *UserStmt->getParent(), SE, UserScope))
51       return VirtualUse(UserStmt, Val, Synthesizable, ScevExpr, nullptr);
52   }
53 
54   // FIXME: Inconsistency between lookupInvariantEquivClass and
55   // getRequiredInvariantLoads. Querying one of them should be enough.
56   auto &RIL = S->getRequiredInvariantLoads();
57   if (S->lookupInvariantEquivClass(Val) || RIL.count(dyn_cast<LoadInst>(Val)))
58     return VirtualUse(UserStmt, Val, Hoisted, nullptr, nullptr);
59 
60   // ReadOnly uses may have MemoryAccesses that we want to associate with the
61   // use. This is why we look for a MemoryAccess here already.
62   MemoryAccess *InputMA = nullptr;
63   if (UserStmt && Virtual)
64     InputMA = UserStmt->lookupValueReadOf(Val);
65 
66   // Uses are read-only if they have been defined before the SCoP, i.e., they
67   // cannot be written to inside the SCoP. Arguments are defined before any
68   // instructions, hence also before the SCoP. If the user has been pruned
69   // (UserStmt == nullptr) and is not SCEVable, assume it is read-only as it is
70   // neither an intra- nor an inter-use.
71   if (!UserStmt || isa<Argument>(Val))
72     return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
73 
74   auto Inst = cast<Instruction>(Val);
75   if (!S->contains(Inst))
76     return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
77 
78   // A use is inter-statement if either it is defined in another statement, or
79   // there is a MemoryAccess that reads its value that has been written by
80   // another statement.
81   if (InputMA || (!Virtual && UserStmt != S->getStmtFor(Inst)))
82     return VirtualUse(UserStmt, Val, Inter, nullptr, InputMA);
83 
84   return VirtualUse(UserStmt, Val, Intra, nullptr, nullptr);
85 }
86 
87 void VirtualUse::print(raw_ostream &OS, bool Reproducible) const {
88   OS << "User: [" << User->getBaseName() << "] ";
89   switch (Kind) {
90   case VirtualUse::Constant:
91     OS << "Constant Op:";
92     break;
93   case VirtualUse::Block:
94     OS << "BasicBlock Op:";
95     break;
96   case VirtualUse::Synthesizable:
97     OS << "Synthesizable Op:";
98     break;
99   case VirtualUse::Hoisted:
100     OS << "Hoisted load Op:";
101     break;
102   case VirtualUse::ReadOnly:
103     OS << "Read-Only Op:";
104     break;
105   case VirtualUse::Intra:
106     OS << "Intra Op:";
107     break;
108   case VirtualUse::Inter:
109     OS << "Inter Op:";
110     break;
111   }
112 
113   if (Val) {
114     OS << ' ';
115     if (Reproducible)
116       OS << '"' << Val->getName() << '"';
117     else
118       Val->print(OS, true);
119   }
120   if (ScevExpr) {
121     OS << ' ';
122     ScevExpr->print(OS);
123   }
124   if (InputMA && !Reproducible)
125     OS << ' ' << InputMA;
126 }
127 
128 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
129 LLVM_DUMP_METHOD void VirtualUse::dump() const {
130   print(errs(), false);
131   errs() << '\n';
132 }
133 #endif
134 
135 void VirtualInstruction::print(raw_ostream &OS, bool Reproducible) const {
136   if (!Stmt || !Inst) {
137     OS << "[null VirtualInstruction]";
138     return;
139   }
140 
141   OS << "[" << Stmt->getBaseName() << "]";
142   Inst->print(OS, !Reproducible);
143 }
144 
145 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
146 LLVM_DUMP_METHOD void VirtualInstruction::dump() const {
147   print(errs(), false);
148   errs() << '\n';
149 }
150 #endif
151 
152 /// Return true if @p Inst cannot be removed, even if it is nowhere referenced.
153 static bool isRoot(const Instruction *Inst) {
154   // The store is handled by its MemoryAccess. The load must be reached from the
155   // roots in order to be marked as used.
156   if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst))
157     return false;
158 
159   // Terminator instructions (in region statements) are required for control
160   // flow.
161   if (isa<TerminatorInst>(Inst))
162     return true;
163 
164   // Writes to memory must be honored.
165   if (Inst->mayWriteToMemory())
166     return true;
167 
168   return false;
169 }
170 
171 /// Return true for MemoryAccesses that cannot be removed because it represents
172 /// an llvm::Value that is used after the SCoP.
173 static bool isEscaping(MemoryAccess *MA) {
174   assert(MA->isOriginalValueKind());
175   Scop *S = MA->getStatement()->getParent();
176   return S->isEscaping(cast<Instruction>(MA->getAccessValue()));
177 }
178 
179 /// Add non-removable virtual instructions in @p Stmt to @p RootInsts.
180 static void
181 addInstructionRoots(ScopStmt *Stmt,
182                     SmallVectorImpl<VirtualInstruction> &RootInsts) {
183   if (!Stmt->isBlockStmt()) {
184     // In region statements the terminator statement and all statements that
185     // are not in the entry block cannot be eliminated and consequently must
186     // be roots.
187     RootInsts.emplace_back(Stmt,
188                            Stmt->getRegion()->getEntry()->getTerminator());
189     for (BasicBlock *BB : Stmt->getRegion()->blocks())
190       if (Stmt->getRegion()->getEntry() != BB)
191         for (Instruction &Inst : *BB)
192           RootInsts.emplace_back(Stmt, &Inst);
193     return;
194   }
195 
196   for (Instruction *Inst : Stmt->getInstructions())
197     if (isRoot(Inst))
198       RootInsts.emplace_back(Stmt, Inst);
199 }
200 
201 /// Add non-removable memory accesses in @p Stmt to @p RootInsts.
202 ///
203 /// @param Local If true, all writes are assumed to escape. markAndSweep
204 /// algorithms can use this to be applicable to a single ScopStmt only without
205 /// the risk of removing definitions required by other statements.
206 ///              If false, only writes for SCoP-escaping values are roots.  This
207 ///              is global mode, where such writes must be marked by theirs uses
208 ///              in order to be reachable.
209 static void addAccessRoots(ScopStmt *Stmt,
210                            SmallVectorImpl<MemoryAccess *> &RootAccs,
211                            bool Local) {
212   for (auto *MA : *Stmt) {
213     if (!MA->isWrite())
214       continue;
215 
216     // Writes to arrays are always used.
217     if (MA->isLatestArrayKind())
218       RootAccs.push_back(MA);
219 
220     // Values are roots if they are escaping.
221     else if (MA->isLatestValueKind()) {
222       if (Local || isEscaping(MA))
223         RootAccs.push_back(MA);
224     }
225 
226     // Exit phis are, by definition, escaping.
227     else if (MA->isLatestExitPHIKind())
228       RootAccs.push_back(MA);
229 
230     // phi writes are only roots if we are not visiting the statement
231     // containing the PHINode.
232     else if (Local && MA->isLatestPHIKind())
233       RootAccs.push_back(MA);
234   }
235 }
236 
237 /// Determine all instruction and access roots.
238 static void addRoots(ScopStmt *Stmt,
239                      SmallVectorImpl<VirtualInstruction> &RootInsts,
240                      SmallVectorImpl<MemoryAccess *> &RootAccs, bool Local) {
241   addInstructionRoots(Stmt, RootInsts);
242   addAccessRoots(Stmt, RootAccs, Local);
243 }
244 
245 /// Mark accesses and instructions as used if they are reachable from a root,
246 /// walking the operand trees.
247 ///
248 /// @param S              The SCoP to walk.
249 /// @param LI             The LoopInfo Analysis.
250 /// @param RootInsts      List of root instructions.
251 /// @param RootAccs       List of root accesses.
252 /// @param UsesInsts[out] Receives all reachable instructions, including the
253 /// roots.
254 /// @param UsedAccs[out]  Receives all reachable accesses, including the roots.
255 /// @param OnlyLocal      If non-nullptr, restricts walking to a single
256 /// statement.
257 static void walkReachable(Scop *S, LoopInfo *LI,
258                           ArrayRef<VirtualInstruction> RootInsts,
259                           ArrayRef<MemoryAccess *> RootAccs,
260                           DenseSet<VirtualInstruction> &UsedInsts,
261                           DenseSet<MemoryAccess *> &UsedAccs,
262                           ScopStmt *OnlyLocal = nullptr) {
263   UsedInsts.clear();
264   UsedAccs.clear();
265 
266   SmallVector<VirtualInstruction, 32> WorklistInsts;
267   SmallVector<MemoryAccess *, 32> WorklistAccs;
268 
269   WorklistInsts.append(RootInsts.begin(), RootInsts.end());
270   WorklistAccs.append(RootAccs.begin(), RootAccs.end());
271 
272   auto AddToWorklist = [&](VirtualUse VUse) {
273     switch (VUse.getKind()) {
274     case VirtualUse::Block:
275     case VirtualUse::Constant:
276     case VirtualUse::Synthesizable:
277     case VirtualUse::Hoisted:
278       break;
279     case VirtualUse::ReadOnly:
280       // Read-only scalars only have MemoryAccesses if ModelReadOnlyScalars is
281       // enabled.
282       if (!VUse.getMemoryAccess())
283         break;
284       LLVM_FALLTHROUGH;
285     case VirtualUse::Inter:
286       assert(VUse.getMemoryAccess());
287       WorklistAccs.push_back(VUse.getMemoryAccess());
288       break;
289     case VirtualUse::Intra:
290       WorklistInsts.emplace_back(VUse.getUser(),
291                                  cast<Instruction>(VUse.getValue()));
292       break;
293     }
294   };
295 
296   while (true) {
297     // We have two worklists to process: Only when the MemoryAccess worklist is
298     // empty, we process the instruction worklist.
299 
300     while (!WorklistAccs.empty()) {
301       auto *Acc = WorklistAccs.pop_back_val();
302 
303       ScopStmt *Stmt = Acc->getStatement();
304       if (OnlyLocal && Stmt != OnlyLocal)
305         continue;
306 
307       auto Inserted = UsedAccs.insert(Acc);
308       if (!Inserted.second)
309         continue;
310 
311       if (Acc->isRead()) {
312         const ScopArrayInfo *SAI = Acc->getScopArrayInfo();
313 
314         if (Acc->isLatestValueKind()) {
315           MemoryAccess *DefAcc = S->getValueDef(SAI);
316 
317           // Accesses to read-only values do not have a definition.
318           if (DefAcc)
319             WorklistAccs.push_back(S->getValueDef(SAI));
320         }
321 
322         if (Acc->isLatestAnyPHIKind()) {
323           auto IncomingMAs = S->getPHIIncomings(SAI);
324           WorklistAccs.append(IncomingMAs.begin(), IncomingMAs.end());
325         }
326       }
327 
328       if (Acc->isWrite()) {
329         if (Acc->isOriginalValueKind() ||
330             (Acc->isOriginalArrayKind() && Acc->getAccessValue())) {
331           Loop *Scope = Stmt->getSurroundingLoop();
332           VirtualUse VUse =
333               VirtualUse::create(S, Stmt, Scope, Acc->getAccessValue(), true);
334           AddToWorklist(VUse);
335         }
336 
337         if (Acc->isOriginalAnyPHIKind()) {
338           for (auto Incoming : Acc->getIncoming()) {
339             VirtualUse VUse = VirtualUse::create(
340                 S, Stmt, LI->getLoopFor(Incoming.first), Incoming.second, true);
341             AddToWorklist(VUse);
342           }
343         }
344 
345         if (Acc->isOriginalArrayKind())
346           WorklistInsts.emplace_back(Stmt, Acc->getAccessInstruction());
347       }
348     }
349 
350     // If both worklists are empty, stop walking.
351     if (WorklistInsts.empty())
352       break;
353 
354     VirtualInstruction VInst = WorklistInsts.pop_back_val();
355     ScopStmt *Stmt = VInst.getStmt();
356     Instruction *Inst = VInst.getInstruction();
357 
358     // Do not process statements other than the local.
359     if (OnlyLocal && Stmt != OnlyLocal)
360       continue;
361 
362     auto InsertResult = UsedInsts.insert(VInst);
363     if (!InsertResult.second)
364       continue;
365 
366     // Add all operands to the worklists.
367     PHINode *PHI = dyn_cast<PHINode>(Inst);
368     if (PHI && PHI->getParent() == Stmt->getEntryBlock()) {
369       if (MemoryAccess *PHIRead = Stmt->lookupPHIReadOf(PHI))
370         WorklistAccs.push_back(PHIRead);
371     } else {
372       for (VirtualUse VUse : VInst.operands())
373         AddToWorklist(VUse);
374     }
375 
376     // If there is an array access, also add its MemoryAccesses to the worklist.
377     const MemoryAccessList *Accs = Stmt->lookupArrayAccessesFor(Inst);
378     if (!Accs)
379       continue;
380 
381     for (MemoryAccess *Acc : *Accs)
382       WorklistAccs.push_back(Acc);
383   }
384 }
385 
386 void polly::markReachable(Scop *S, LoopInfo *LI,
387                           DenseSet<VirtualInstruction> &UsedInsts,
388                           DenseSet<MemoryAccess *> &UsedAccs,
389                           ScopStmt *OnlyLocal) {
390   SmallVector<VirtualInstruction, 32> RootInsts;
391   SmallVector<MemoryAccess *, 32> RootAccs;
392 
393   if (OnlyLocal) {
394     addRoots(OnlyLocal, RootInsts, RootAccs, true);
395   } else {
396     for (auto &Stmt : *S)
397       addRoots(&Stmt, RootInsts, RootAccs, false);
398   }
399 
400   walkReachable(S, LI, RootInsts, RootAccs, UsedInsts, UsedAccs, OnlyLocal);
401 }
402