1 //===------ VirtualInstruction.cpp ------------------------------*- C++ -*-===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // Tools for determining which instructions are within a statement and the
11 // nature of their operands.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "polly/Support/VirtualInstruction.h"
16 #include "polly/Support/SCEVValidator.h"
17 
18 using namespace polly;
19 using namespace llvm;
20 
21 VirtualUse VirtualUse ::create(Scop *S, const Use &U, LoopInfo *LI,
22                                bool Virtual) {
23   auto *UserBB = getUseBlock(U);
24   Loop *UserScope = LI->getLoopFor(UserBB);
25   Instruction *UI = dyn_cast<Instruction>(U.getUser());
26   ScopStmt *UserStmt = S->getStmtFor(UI);
27 
28   // Uses by PHI nodes are always reading values written by other statements,
29   // except it is within a region statement.
30   if (PHINode *PHI = dyn_cast<PHINode>(UI)) {
31     // Handle PHI in exit block.
32     if (S->getRegion().getExit() == PHI->getParent())
33       return VirtualUse(UserStmt, U.get(), Inter, nullptr, nullptr);
34 
35     if (UserStmt->getEntryBlock() != PHI->getParent())
36       return VirtualUse(UserStmt, U.get(), Intra, nullptr, nullptr);
37 
38     // The MemoryAccess is expected to be set if @p Virtual is true.
39     MemoryAccess *IncomingMA = nullptr;
40     if (Virtual) {
41       if (const ScopArrayInfo *SAI =
42               S->getScopArrayInfoOrNull(PHI, MemoryKind::PHI)) {
43         IncomingMA = S->getPHIRead(SAI);
44         assert(IncomingMA->getStatement() == UserStmt);
45       }
46     }
47 
48     return VirtualUse(UserStmt, U.get(), Inter, nullptr, IncomingMA);
49   }
50 
51   return create(S, UserStmt, UserScope, U.get(), Virtual);
52 }
53 
54 VirtualUse VirtualUse::create(Scop *S, ScopStmt *UserStmt, Loop *UserScope,
55                               Value *Val, bool Virtual) {
56   assert(!isa<StoreInst>(Val) && "a StoreInst cannot be used");
57 
58   if (isa<BasicBlock>(Val))
59     return VirtualUse(UserStmt, Val, Block, nullptr, nullptr);
60 
61   if (isa<llvm::Constant>(Val) || isa<MetadataAsValue>(Val))
62     return VirtualUse(UserStmt, Val, Constant, nullptr, nullptr);
63 
64   // Is the value synthesizable? If the user has been pruned
65   // (UserStmt == nullptr), it is either not used anywhere or is synthesizable.
66   // We assume synthesizable which practically should have the same effect.
67   auto *SE = S->getSE();
68   if (SE->isSCEVable(Val->getType())) {
69     auto *ScevExpr = SE->getSCEVAtScope(Val, UserScope);
70     if (!UserStmt || canSynthesize(Val, *UserStmt->getParent(), SE, UserScope))
71       return VirtualUse(UserStmt, Val, Synthesizable, ScevExpr, nullptr);
72   }
73 
74   // FIXME: Inconsistency between lookupInvariantEquivClass and
75   // getRequiredInvariantLoads. Querying one of them should be enough.
76   auto &RIL = S->getRequiredInvariantLoads();
77   if (S->lookupInvariantEquivClass(Val) || RIL.count(dyn_cast<LoadInst>(Val)))
78     return VirtualUse(UserStmt, Val, Hoisted, nullptr, nullptr);
79 
80   // ReadOnly uses may have MemoryAccesses that we want to associate with the
81   // use. This is why we look for a MemoryAccess here already.
82   MemoryAccess *InputMA = nullptr;
83   if (UserStmt && Virtual)
84     InputMA = UserStmt->lookupValueReadOf(Val);
85 
86   // Uses are read-only if they have been defined before the SCoP, i.e., they
87   // cannot be written to inside the SCoP. Arguments are defined before any
88   // instructions, hence also before the SCoP. If the user has been pruned
89   // (UserStmt == nullptr) and is not SCEVable, assume it is read-only as it is
90   // neither an intra- nor an inter-use.
91   if (!UserStmt || isa<Argument>(Val))
92     return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
93 
94   auto Inst = cast<Instruction>(Val);
95   if (!S->contains(Inst))
96     return VirtualUse(UserStmt, Val, ReadOnly, nullptr, InputMA);
97 
98   // A use is inter-statement if either it is defined in another statement, or
99   // there is a MemoryAccess that reads its value that has been written by
100   // another statement.
101   if (InputMA || (!Virtual && UserStmt != S->getStmtFor(Inst)))
102     return VirtualUse(UserStmt, Val, Inter, nullptr, InputMA);
103 
104   return VirtualUse(UserStmt, Val, Intra, nullptr, nullptr);
105 }
106 
107 void VirtualUse::print(raw_ostream &OS, bool Reproducible) const {
108   OS << "User: [" << User->getBaseName() << "] ";
109   switch (Kind) {
110   case VirtualUse::Constant:
111     OS << "Constant Op:";
112     break;
113   case VirtualUse::Block:
114     OS << "BasicBlock Op:";
115     break;
116   case VirtualUse::Synthesizable:
117     OS << "Synthesizable Op:";
118     break;
119   case VirtualUse::Hoisted:
120     OS << "Hoisted load Op:";
121     break;
122   case VirtualUse::ReadOnly:
123     OS << "Read-Only Op:";
124     break;
125   case VirtualUse::Intra:
126     OS << "Intra Op:";
127     break;
128   case VirtualUse::Inter:
129     OS << "Inter Op:";
130     break;
131   }
132 
133   if (Val) {
134     OS << ' ';
135     if (Reproducible)
136       OS << '"' << Val->getName() << '"';
137     else
138       Val->print(OS, true);
139   }
140   if (ScevExpr) {
141     OS << ' ';
142     ScevExpr->print(OS);
143   }
144   if (InputMA && !Reproducible)
145     OS << ' ' << InputMA;
146 }
147 
148 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
149 LLVM_DUMP_METHOD void VirtualUse::dump() const {
150   print(errs(), false);
151   errs() << '\n';
152 }
153 #endif
154 
155 void VirtualInstruction::print(raw_ostream &OS, bool Reproducible) const {
156   if (!Stmt || !Inst) {
157     OS << "[null VirtualInstruction]";
158     return;
159   }
160 
161   OS << "[" << Stmt->getBaseName() << "]";
162   Inst->print(OS, !Reproducible);
163 }
164 
165 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
166 LLVM_DUMP_METHOD void VirtualInstruction::dump() const {
167   print(errs(), false);
168   errs() << '\n';
169 }
170 #endif
171 
172 /// Return true if @p Inst cannot be removed, even if it is nowhere referenced.
173 static bool isRoot(const Instruction *Inst) {
174   // The store is handled by its MemoryAccess. The load must be reached from the
175   // roots in order to be marked as used.
176   if (isa<LoadInst>(Inst) || isa<StoreInst>(Inst))
177     return false;
178 
179   // Terminator instructions (in region statements) are required for control
180   // flow.
181   if (isa<TerminatorInst>(Inst))
182     return true;
183 
184   // Writes to memory must be honored.
185   if (Inst->mayWriteToMemory())
186     return true;
187 
188   return false;
189 }
190 
191 /// Return true for MemoryAccesses that cannot be removed because it represents
192 /// an llvm::Value that is used after the SCoP.
193 static bool isEscaping(MemoryAccess *MA) {
194   assert(MA->isOriginalValueKind());
195   Scop *S = MA->getStatement()->getParent();
196   return S->isEscaping(cast<Instruction>(MA->getAccessValue()));
197 }
198 
199 /// Add non-removable virtual instructions in @p Stmt to @p RootInsts.
200 static void
201 addInstructionRoots(ScopStmt *Stmt,
202                     SmallVectorImpl<VirtualInstruction> &RootInsts) {
203   if (!Stmt->isBlockStmt()) {
204     // In region statements the terminator statement and all statements that
205     // are not in the entry block cannot be eliminated and consequently must
206     // be roots.
207     RootInsts.emplace_back(Stmt,
208                            Stmt->getRegion()->getEntry()->getTerminator());
209     for (BasicBlock *BB : Stmt->getRegion()->blocks())
210       if (Stmt->getRegion()->getEntry() != BB)
211         for (Instruction &Inst : *BB)
212           RootInsts.emplace_back(Stmt, &Inst);
213     return;
214   }
215 
216   for (Instruction *Inst : Stmt->getInstructions())
217     if (isRoot(Inst))
218       RootInsts.emplace_back(Stmt, Inst);
219 }
220 
221 /// Add non-removable memory accesses in @p Stmt to @p RootInsts.
222 ///
223 /// @param Local If true, all writes are assumed to escape. markAndSweep
224 /// algorithms can use this to be applicable to a single ScopStmt only without
225 /// the risk of removing definitions required by other statements.
226 ///              If false, only writes for SCoP-escaping values are roots.  This
227 ///              is global mode, where such writes must be marked by theirs uses
228 ///              in order to be reachable.
229 static void addAccessRoots(ScopStmt *Stmt,
230                            SmallVectorImpl<MemoryAccess *> &RootAccs,
231                            bool Local) {
232   for (auto *MA : *Stmt) {
233     if (!MA->isWrite())
234       continue;
235 
236     // Writes to arrays are always used.
237     if (MA->isLatestArrayKind())
238       RootAccs.push_back(MA);
239 
240     // Values are roots if they are escaping.
241     else if (MA->isLatestValueKind()) {
242       if (Local || isEscaping(MA))
243         RootAccs.push_back(MA);
244     }
245 
246     // Exit phis are, by definition, escaping.
247     else if (MA->isLatestExitPHIKind())
248       RootAccs.push_back(MA);
249 
250     // phi writes are only roots if we are not visiting the statement
251     // containing the PHINode.
252     else if (Local && MA->isLatestPHIKind())
253       RootAccs.push_back(MA);
254   }
255 }
256 
257 /// Determine all instruction and access roots.
258 static void addRoots(ScopStmt *Stmt,
259                      SmallVectorImpl<VirtualInstruction> &RootInsts,
260                      SmallVectorImpl<MemoryAccess *> &RootAccs, bool Local) {
261   addInstructionRoots(Stmt, RootInsts);
262   addAccessRoots(Stmt, RootAccs, Local);
263 }
264 
265 /// Mark accesses and instructions as used if they are reachable from a root,
266 /// walking the operand trees.
267 ///
268 /// @param S              The SCoP to walk.
269 /// @param LI             The LoopInfo Analysis.
270 /// @param RootInsts      List of root instructions.
271 /// @param RootAccs       List of root accesses.
272 /// @param UsesInsts[out] Receives all reachable instructions, including the
273 /// roots.
274 /// @param UsedAccs[out]  Receives all reachable accesses, including the roots.
275 /// @param OnlyLocal      If non-nullptr, restricts walking to a single
276 /// statement.
277 static void walkReachable(Scop *S, LoopInfo *LI,
278                           ArrayRef<VirtualInstruction> RootInsts,
279                           ArrayRef<MemoryAccess *> RootAccs,
280                           DenseSet<VirtualInstruction> &UsedInsts,
281                           DenseSet<MemoryAccess *> &UsedAccs,
282                           ScopStmt *OnlyLocal = nullptr) {
283   UsedInsts.clear();
284   UsedAccs.clear();
285 
286   SmallVector<VirtualInstruction, 32> WorklistInsts;
287   SmallVector<MemoryAccess *, 32> WorklistAccs;
288 
289   WorklistInsts.append(RootInsts.begin(), RootInsts.end());
290   WorklistAccs.append(RootAccs.begin(), RootAccs.end());
291 
292   auto AddToWorklist = [&](VirtualUse VUse) {
293     switch (VUse.getKind()) {
294     case VirtualUse::Block:
295     case VirtualUse::Constant:
296     case VirtualUse::Synthesizable:
297     case VirtualUse::Hoisted:
298       break;
299     case VirtualUse::ReadOnly:
300       // Read-only scalars only have MemoryAccesses if ModelReadOnlyScalars is
301       // enabled.
302       if (!VUse.getMemoryAccess())
303         break;
304       LLVM_FALLTHROUGH;
305     case VirtualUse::Inter:
306       assert(VUse.getMemoryAccess());
307       WorklistAccs.push_back(VUse.getMemoryAccess());
308       break;
309     case VirtualUse::Intra:
310       WorklistInsts.emplace_back(VUse.getUser(),
311                                  cast<Instruction>(VUse.getValue()));
312       break;
313     }
314   };
315 
316   while (true) {
317     // We have two worklists to process: Only when the MemoryAccess worklist is
318     // empty, we process the instruction worklist.
319 
320     while (!WorklistAccs.empty()) {
321       auto *Acc = WorklistAccs.pop_back_val();
322 
323       ScopStmt *Stmt = Acc->getStatement();
324       if (OnlyLocal && Stmt != OnlyLocal)
325         continue;
326 
327       auto Inserted = UsedAccs.insert(Acc);
328       if (!Inserted.second)
329         continue;
330 
331       if (Acc->isRead()) {
332         const ScopArrayInfo *SAI = Acc->getScopArrayInfo();
333 
334         if (Acc->isLatestValueKind()) {
335           MemoryAccess *DefAcc = S->getValueDef(SAI);
336 
337           // Accesses to read-only values do not have a definition.
338           if (DefAcc)
339             WorklistAccs.push_back(S->getValueDef(SAI));
340         }
341 
342         if (Acc->isLatestAnyPHIKind()) {
343           auto IncomingMAs = S->getPHIIncomings(SAI);
344           WorklistAccs.append(IncomingMAs.begin(), IncomingMAs.end());
345         }
346       }
347 
348       if (Acc->isWrite()) {
349         if (Acc->isOriginalValueKind() ||
350             (Acc->isOriginalArrayKind() && Acc->getAccessValue())) {
351           Loop *Scope = Stmt->getSurroundingLoop();
352           VirtualUse VUse =
353               VirtualUse::create(S, Stmt, Scope, Acc->getAccessValue(), true);
354           AddToWorklist(VUse);
355         }
356 
357         if (Acc->isOriginalAnyPHIKind()) {
358           for (auto Incoming : Acc->getIncoming()) {
359             VirtualUse VUse = VirtualUse::create(
360                 S, Stmt, LI->getLoopFor(Incoming.first), Incoming.second, true);
361             AddToWorklist(VUse);
362           }
363         }
364 
365         if (Acc->isOriginalArrayKind())
366           WorklistInsts.emplace_back(Stmt, Acc->getAccessInstruction());
367       }
368     }
369 
370     // If both worklists are empty, stop walking.
371     if (WorklistInsts.empty())
372       break;
373 
374     VirtualInstruction VInst = WorklistInsts.pop_back_val();
375     ScopStmt *Stmt = VInst.getStmt();
376     Instruction *Inst = VInst.getInstruction();
377 
378     // Do not process statements other than the local.
379     if (OnlyLocal && Stmt != OnlyLocal)
380       continue;
381 
382     auto InsertResult = UsedInsts.insert(VInst);
383     if (!InsertResult.second)
384       continue;
385 
386     // Add all operands to the worklists.
387     PHINode *PHI = dyn_cast<PHINode>(Inst);
388     if (PHI && PHI->getParent() == Stmt->getEntryBlock()) {
389       if (MemoryAccess *PHIRead = Stmt->lookupPHIReadOf(PHI))
390         WorklistAccs.push_back(PHIRead);
391     } else {
392       for (VirtualUse VUse : VInst.operands())
393         AddToWorklist(VUse);
394     }
395 
396     // If there is an array access, also add its MemoryAccesses to the worklist.
397     const MemoryAccessList *Accs = Stmt->lookupArrayAccessesFor(Inst);
398     if (!Accs)
399       continue;
400 
401     for (MemoryAccess *Acc : *Accs)
402       WorklistAccs.push_back(Acc);
403   }
404 }
405 
406 void polly::markReachable(Scop *S, LoopInfo *LI,
407                           DenseSet<VirtualInstruction> &UsedInsts,
408                           DenseSet<MemoryAccess *> &UsedAccs,
409                           ScopStmt *OnlyLocal) {
410   SmallVector<VirtualInstruction, 32> RootInsts;
411   SmallVector<MemoryAccess *, 32> RootAccs;
412 
413   if (OnlyLocal) {
414     addRoots(OnlyLocal, RootInsts, RootAccs, true);
415   } else {
416     for (auto &Stmt : *S)
417       addRoots(&Stmt, RootInsts, RootAccs, false);
418   }
419 
420   walkReachable(S, LI, RootInsts, RootAccs, UsedInsts, UsedAccs, OnlyLocal);
421 }
422