1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains routines that help determine which pointers are captured.
10 // A pointer value is captured if the function makes a copy of any part of the
11 // pointer that outlives the call.  Not being captured means, more or less, that
12 // the pointer is only dereferenced and not stored in a global.  Returning part
13 // of the pointer as the function return value may or may not count as capturing
14 // the pointer, depending on the context.
15 //
16 //===----------------------------------------------------------------------===//
17 
18 #include "llvm/Analysis/CaptureTracking.h"
19 #include "llvm/ADT/SmallSet.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/Analysis/AliasAnalysis.h"
22 #include "llvm/Analysis/CFG.h"
23 #include "llvm/Analysis/ValueTracking.h"
24 #include "llvm/IR/Constants.h"
25 #include "llvm/IR/Dominators.h"
26 #include "llvm/IR/Instructions.h"
27 #include "llvm/IR/IntrinsicInst.h"
28 
29 using namespace llvm;
30 
31 /// The default value for MaxUsesToExplore argument. It's relatively small to
32 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis,
33 /// where the results can't be cached.
34 /// TODO: we should probably introduce a caching CaptureTracking analysis and
35 /// use it where possible. The caching version can use much higher limit or
36 /// don't have this cap at all.
37 static cl::opt<unsigned>
38 DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden,
39                         cl::desc("Maximal number of uses to explore."),
40                         cl::init(20));
41 
42 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() {
43   return DefaultMaxUsesToExplore;
44 }
45 
46 CaptureTracker::~CaptureTracker() {}
47 
48 bool CaptureTracker::shouldExplore(const Use *U) { return true; }
49 
50 bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) {
51   // An inbounds GEP can either be a valid pointer (pointing into
52   // or to the end of an allocation), or be null in the default
53   // address space. So for an inbounds GEP there is no way to let
54   // the pointer escape using clever GEP hacking because doing so
55   // would make the pointer point outside of the allocated object
56   // and thus make the GEP result a poison value. Similarly, other
57   // dereferenceable pointers cannot be manipulated without producing
58   // poison.
59   if (auto *GEP = dyn_cast<GetElementPtrInst>(O))
60     if (GEP->isInBounds())
61       return true;
62   bool CanBeNull;
63   return O->getPointerDereferenceableBytes(DL, CanBeNull);
64 }
65 
66 namespace {
67   struct SimpleCaptureTracker : public CaptureTracker {
68     explicit SimpleCaptureTracker(bool ReturnCaptures)
69       : ReturnCaptures(ReturnCaptures), Captured(false) {}
70 
71     void tooManyUses() override { Captured = true; }
72 
73     bool captured(const Use *U) override {
74       if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
75         return false;
76 
77       Captured = true;
78       return true;
79     }
80 
81     bool ReturnCaptures;
82 
83     bool Captured;
84   };
85 
86   /// Only find pointer captures which happen before the given instruction. Uses
87   /// the dominator tree to determine whether one instruction is before another.
88   /// Only support the case where the Value is defined in the same basic block
89   /// as the given instruction and the use.
90   struct CapturesBefore : public CaptureTracker {
91 
92     CapturesBefore(bool ReturnCaptures, const Instruction *I, const DominatorTree *DT,
93                    bool IncludeI)
94       : BeforeHere(I), DT(DT),
95         ReturnCaptures(ReturnCaptures), IncludeI(IncludeI), Captured(false) {}
96 
97     void tooManyUses() override { Captured = true; }
98 
99     bool isSafeToPrune(Instruction *I) {
100       BasicBlock *BB = I->getParent();
101       // We explore this usage only if the usage can reach "BeforeHere".
102       // If use is not reachable from entry, there is no need to explore.
103       if (BeforeHere != I && !DT->isReachableFromEntry(BB))
104         return true;
105 
106       // Compute the case where both instructions are inside the same basic
107       // block.
108       if (BB == BeforeHere->getParent()) {
109         // 'I' dominates 'BeforeHere' => not safe to prune.
110         //
111         // The value defined by an invoke dominates an instruction only
112         // if it dominates every instruction in UseBB. A PHI is dominated only
113         // if the instruction dominates every possible use in the UseBB. Since
114         // UseBB == BB, avoid pruning.
115         if (isa<InvokeInst>(BeforeHere) || isa<PHINode>(I) || I == BeforeHere)
116           return false;
117         if (!BeforeHere->comesBefore(I))
118           return false;
119 
120         // 'BeforeHere' comes before 'I', it's safe to prune if we also
121         // guarantee that 'I' never reaches 'BeforeHere' through a back-edge or
122         // by its successors, i.e, prune if:
123         //
124         //  (1) BB is an entry block or have no successors.
125         //  (2) There's no path coming back through BB successors.
126         if (BB == &BB->getParent()->getEntryBlock() ||
127             !BB->getTerminator()->getNumSuccessors())
128           return true;
129 
130         SmallVector<BasicBlock*, 32> Worklist;
131         Worklist.append(succ_begin(BB), succ_end(BB));
132         return !isPotentiallyReachableFromMany(Worklist, BB, nullptr, DT);
133       }
134 
135       // If the value is defined in the same basic block as use and BeforeHere,
136       // there is no need to explore the use if BeforeHere dominates use.
137       // Check whether there is a path from I to BeforeHere.
138       if (BeforeHere != I && DT->dominates(BeforeHere, I) &&
139           !isPotentiallyReachable(I, BeforeHere, nullptr, DT))
140         return true;
141 
142       return false;
143     }
144 
145     bool shouldExplore(const Use *U) override {
146       Instruction *I = cast<Instruction>(U->getUser());
147 
148       if (BeforeHere == I && !IncludeI)
149         return false;
150 
151       if (isSafeToPrune(I))
152         return false;
153 
154       return true;
155     }
156 
157     bool captured(const Use *U) override {
158       if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
159         return false;
160 
161       if (!shouldExplore(U))
162         return false;
163 
164       Captured = true;
165       return true;
166     }
167 
168     const Instruction *BeforeHere;
169     const DominatorTree *DT;
170 
171     bool ReturnCaptures;
172     bool IncludeI;
173 
174     bool Captured;
175   };
176 }
177 
178 /// PointerMayBeCaptured - Return true if this pointer value may be captured
179 /// by the enclosing function (which is required to exist).  This routine can
180 /// be expensive, so consider caching the results.  The boolean ReturnCaptures
181 /// specifies whether returning the value (or part of it) from the function
182 /// counts as capturing it or not.  The boolean StoreCaptures specified whether
183 /// storing the value (or part of it) into memory anywhere automatically
184 /// counts as capturing it or not.
185 bool llvm::PointerMayBeCaptured(const Value *V,
186                                 bool ReturnCaptures, bool StoreCaptures,
187                                 unsigned MaxUsesToExplore) {
188   assert(!isa<GlobalValue>(V) &&
189          "It doesn't make sense to ask whether a global is captured.");
190 
191   // TODO: If StoreCaptures is not true, we could do Fancy analysis
192   // to determine whether this store is not actually an escape point.
193   // In that case, BasicAliasAnalysis should be updated as well to
194   // take advantage of this.
195   (void)StoreCaptures;
196 
197   SimpleCaptureTracker SCT(ReturnCaptures);
198   PointerMayBeCaptured(V, &SCT, MaxUsesToExplore);
199   return SCT.Captured;
200 }
201 
202 /// PointerMayBeCapturedBefore - Return true if this pointer value may be
203 /// captured by the enclosing function (which is required to exist). If a
204 /// DominatorTree is provided, only captures which happen before the given
205 /// instruction are considered. This routine can be expensive, so consider
206 /// caching the results.  The boolean ReturnCaptures specifies whether
207 /// returning the value (or part of it) from the function counts as capturing
208 /// it or not.  The boolean StoreCaptures specified whether storing the value
209 /// (or part of it) into memory anywhere automatically counts as capturing it
210 /// or not.
211 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
212                                       bool StoreCaptures, const Instruction *I,
213                                       const DominatorTree *DT, bool IncludeI,
214                                       unsigned MaxUsesToExplore) {
215   assert(!isa<GlobalValue>(V) &&
216          "It doesn't make sense to ask whether a global is captured.");
217 
218   if (!DT)
219     return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures,
220                                 MaxUsesToExplore);
221 
222   // TODO: See comment in PointerMayBeCaptured regarding what could be done
223   // with StoreCaptures.
224 
225   CapturesBefore CB(ReturnCaptures, I, DT, IncludeI);
226   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
227   return CB.Captured;
228 }
229 
230 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker,
231                                 unsigned MaxUsesToExplore) {
232   assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
233   if (MaxUsesToExplore == 0)
234     MaxUsesToExplore = DefaultMaxUsesToExplore;
235 
236   SmallVector<const Use *, 20> Worklist;
237   Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking());
238   SmallSet<const Use *, 20> Visited;
239 
240   auto AddUses = [&](const Value *V) {
241     unsigned Count = 0;
242     for (const Use &U : V->uses()) {
243       // If there are lots of uses, conservatively say that the value
244       // is captured to avoid taking too much compile time.
245       if (Count++ >= MaxUsesToExplore)
246         return Tracker->tooManyUses();
247       if (!Visited.insert(&U).second)
248         continue;
249       if (!Tracker->shouldExplore(&U))
250         continue;
251       Worklist.push_back(&U);
252     }
253   };
254   AddUses(V);
255 
256   while (!Worklist.empty()) {
257     const Use *U = Worklist.pop_back_val();
258     Instruction *I = cast<Instruction>(U->getUser());
259     V = U->get();
260 
261     switch (I->getOpcode()) {
262     case Instruction::Call:
263     case Instruction::Invoke: {
264       auto *Call = cast<CallBase>(I);
265       // Not captured if the callee is readonly, doesn't return a copy through
266       // its return value and doesn't unwind (a readonly function can leak bits
267       // by throwing an exception or not depending on the input value).
268       if (Call->onlyReadsMemory() && Call->doesNotThrow() &&
269           Call->getType()->isVoidTy())
270         break;
271 
272       // The pointer is not captured if returned pointer is not captured.
273       // NOTE: CaptureTracking users should not assume that only functions
274       // marked with nocapture do not capture. This means that places like
275       // GetUnderlyingObject in ValueTracking or DecomposeGEPExpression
276       // in BasicAA also need to know about this property.
277       if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call,
278                                                                       true)) {
279         AddUses(Call);
280         break;
281       }
282 
283       // Volatile operations effectively capture the memory location that they
284       // load and store to.
285       if (auto *MI = dyn_cast<MemIntrinsic>(Call))
286         if (MI->isVolatile())
287           if (Tracker->captured(U))
288             return;
289 
290       // Not captured if only passed via 'nocapture' arguments.  Note that
291       // calling a function pointer does not in itself cause the pointer to
292       // be captured.  This is a subtle point considering that (for example)
293       // the callee might return its own address.  It is analogous to saying
294       // that loading a value from a pointer does not cause the pointer to be
295       // captured, even though the loaded value might be the pointer itself
296       // (think of self-referential objects).
297       for (auto IdxOpPair : enumerate(Call->data_ops())) {
298         int Idx = IdxOpPair.index();
299         Value *A = IdxOpPair.value();
300         if (A == V && !Call->doesNotCapture(Idx))
301           // The parameter is not marked 'nocapture' - captured.
302           if (Tracker->captured(U))
303             return;
304       }
305       break;
306     }
307     case Instruction::Load:
308       // Volatile loads make the address observable.
309       if (cast<LoadInst>(I)->isVolatile())
310         if (Tracker->captured(U))
311           return;
312       break;
313     case Instruction::VAArg:
314       // "va-arg" from a pointer does not cause it to be captured.
315       break;
316     case Instruction::Store:
317         // Stored the pointer - conservatively assume it may be captured.
318         // Volatile stores make the address observable.
319       if (V == I->getOperand(0) || cast<StoreInst>(I)->isVolatile())
320         if (Tracker->captured(U))
321           return;
322       break;
323     case Instruction::AtomicRMW: {
324       // atomicrmw conceptually includes both a load and store from
325       // the same location.
326       // As with a store, the location being accessed is not captured,
327       // but the value being stored is.
328       // Volatile stores make the address observable.
329       auto *ARMWI = cast<AtomicRMWInst>(I);
330       if (ARMWI->getValOperand() == V || ARMWI->isVolatile())
331         if (Tracker->captured(U))
332           return;
333       break;
334     }
335     case Instruction::AtomicCmpXchg: {
336       // cmpxchg conceptually includes both a load and store from
337       // the same location.
338       // As with a store, the location being accessed is not captured,
339       // but the value being stored is.
340       // Volatile stores make the address observable.
341       auto *ACXI = cast<AtomicCmpXchgInst>(I);
342       if (ACXI->getCompareOperand() == V || ACXI->getNewValOperand() == V ||
343           ACXI->isVolatile())
344         if (Tracker->captured(U))
345           return;
346       break;
347     }
348     case Instruction::BitCast:
349     case Instruction::GetElementPtr:
350     case Instruction::PHI:
351     case Instruction::Select:
352     case Instruction::AddrSpaceCast:
353       // The original value is not captured via this if the new value isn't.
354       AddUses(I);
355       break;
356     case Instruction::ICmp: {
357       unsigned Idx = (I->getOperand(0) == V) ? 0 : 1;
358       unsigned OtherIdx = 1 - Idx;
359       if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) {
360         // Don't count comparisons of a no-alias return value against null as
361         // captures. This allows us to ignore comparisons of malloc results
362         // with null, for example.
363         if (CPN->getType()->getAddressSpace() == 0)
364           if (isNoAliasCall(V->stripPointerCasts()))
365             break;
366         if (!I->getFunction()->nullPointerIsDefined()) {
367           auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation();
368           // Comparing a dereferenceable_or_null pointer against null cannot
369           // lead to pointer escapes, because if it is not null it must be a
370           // valid (in-bounds) pointer.
371           if (Tracker->isDereferenceableOrNull(O, I->getModule()->getDataLayout()))
372             break;
373         }
374       }
375       // Comparison against value stored in global variable. Given the pointer
376       // does not escape, its value cannot be guessed and stored separately in a
377       // global variable.
378       auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIdx));
379       if (LI && isa<GlobalVariable>(LI->getPointerOperand()))
380         break;
381       // Otherwise, be conservative. There are crazy ways to capture pointers
382       // using comparisons.
383       if (Tracker->captured(U))
384         return;
385       break;
386     }
387     default:
388       // Something else - be conservative and say it is captured.
389       if (Tracker->captured(U))
390         return;
391       break;
392     }
393   }
394 
395   // All uses examined.
396 }
397