1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains routines that help determine which pointers are captured.
10 // A pointer value is captured if the function makes a copy of any part of the
11 // pointer that outlives the call.  Not being captured means, more or less, that
12 // the pointer is only dereferenced and not stored in a global.  Returning part
13 // of the pointer as the function return value may or may not count as capturing
14 // the pointer, depending on the context.
15 //
16 //===----------------------------------------------------------------------===//
17 
18 #include "llvm/Analysis/CaptureTracking.h"
19 #include "llvm/ADT/SmallSet.h"
20 #include "llvm/ADT/SmallVector.h"
21 #include "llvm/ADT/Statistic.h"
22 #include "llvm/Analysis/AliasAnalysis.h"
23 #include "llvm/Analysis/CFG.h"
24 #include "llvm/Analysis/ValueTracking.h"
25 #include "llvm/IR/Constants.h"
26 #include "llvm/IR/Dominators.h"
27 #include "llvm/IR/Instructions.h"
28 #include "llvm/IR/IntrinsicInst.h"
29 #include "llvm/Support/CommandLine.h"
30 
31 using namespace llvm;
32 
33 #define DEBUG_TYPE "capture-tracking"
34 
35 STATISTIC(NumCaptured,          "Number of pointers maybe captured");
36 STATISTIC(NumNotCaptured,       "Number of pointers not captured");
37 STATISTIC(NumCapturedBefore,    "Number of pointers maybe captured before");
38 STATISTIC(NumNotCapturedBefore, "Number of pointers not captured before");
39 
40 /// The default value for MaxUsesToExplore argument. It's relatively small to
41 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis,
42 /// where the results can't be cached.
43 /// TODO: we should probably introduce a caching CaptureTracking analysis and
44 /// use it where possible. The caching version can use much higher limit or
45 /// don't have this cap at all.
46 static cl::opt<unsigned>
47 DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden,
48                         cl::desc("Maximal number of uses to explore."),
49                         cl::init(20));
50 
51 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() {
52   return DefaultMaxUsesToExplore;
53 }
54 
55 CaptureTracker::~CaptureTracker() = default;
56 
57 bool CaptureTracker::shouldExplore(const Use *U) { return true; }
58 
59 bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) {
60   // An inbounds GEP can either be a valid pointer (pointing into
61   // or to the end of an allocation), or be null in the default
62   // address space. So for an inbounds GEP there is no way to let
63   // the pointer escape using clever GEP hacking because doing so
64   // would make the pointer point outside of the allocated object
65   // and thus make the GEP result a poison value. Similarly, other
66   // dereferenceable pointers cannot be manipulated without producing
67   // poison.
68   if (auto *GEP = dyn_cast<GetElementPtrInst>(O))
69     if (GEP->isInBounds())
70       return true;
71   bool CanBeNull, CanBeFreed;
72   return O->getPointerDereferenceableBytes(DL, CanBeNull, CanBeFreed);
73 }
74 
75 namespace {
76   struct SimpleCaptureTracker : public CaptureTracker {
77     explicit SimpleCaptureTracker(bool ReturnCaptures)
78         : ReturnCaptures(ReturnCaptures) {}
79 
80     void tooManyUses() override { Captured = true; }
81 
82     bool captured(const Use *U) override {
83       if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
84         return false;
85 
86       Captured = true;
87       return true;
88     }
89 
90     bool ReturnCaptures;
91 
92     bool Captured = false;
93   };
94 
95   /// Only find pointer captures which happen before the given instruction. Uses
96   /// the dominator tree to determine whether one instruction is before another.
97   /// Only support the case where the Value is defined in the same basic block
98   /// as the given instruction and the use.
99   struct CapturesBefore : public CaptureTracker {
100 
101     CapturesBefore(bool ReturnCaptures, const Instruction *I,
102                    const DominatorTree *DT, bool IncludeI, const LoopInfo *LI)
103         : BeforeHere(I), DT(DT), ReturnCaptures(ReturnCaptures),
104           IncludeI(IncludeI), LI(LI) {}
105 
106     void tooManyUses() override { Captured = true; }
107 
108     bool isSafeToPrune(Instruction *I) {
109       if (BeforeHere == I)
110         return !IncludeI;
111 
112       // We explore this usage only if the usage can reach "BeforeHere".
113       // If use is not reachable from entry, there is no need to explore.
114       if (!DT->isReachableFromEntry(I->getParent()))
115         return true;
116 
117       // Check whether there is a path from I to BeforeHere.
118       return !isPotentiallyReachable(I, BeforeHere, nullptr, DT, LI);
119     }
120 
121     bool captured(const Use *U) override {
122       Instruction *I = cast<Instruction>(U->getUser());
123       if (isa<ReturnInst>(I) && !ReturnCaptures)
124         return false;
125 
126       // Check isSafeToPrune() here rather than in shouldExplore() to avoid
127       // an expensive reachability query for every instruction we look at.
128       // Instead we only do one for actual capturing candidates.
129       if (isSafeToPrune(I))
130         return false;
131 
132       Captured = true;
133       return true;
134     }
135 
136     const Instruction *BeforeHere;
137     const DominatorTree *DT;
138 
139     bool ReturnCaptures;
140     bool IncludeI;
141 
142     bool Captured = false;
143 
144     const LoopInfo *LI;
145   };
146 
147   /// Find the 'earliest' instruction before which the pointer is known not to
148   /// be captured. Here an instruction A is considered earlier than instruction
149   /// B, if A dominates B. If 2 escapes do not dominate each other, the
150   /// terminator of the common dominator is chosen. If not all uses cannot be
151   /// analyzed, the earliest escape is set to the first instruction in the
152   /// function entry block.
153   // NOTE: Users have to make sure instructions compared against the earliest
154   // escape are not in a cycle.
155   struct EarliestCaptures : public CaptureTracker {
156 
157     EarliestCaptures(bool ReturnCaptures, Function &F, const DominatorTree &DT)
158         : DT(DT), ReturnCaptures(ReturnCaptures), F(F) {}
159 
160     void tooManyUses() override {
161       Captured = true;
162       EarliestCapture = &*F.getEntryBlock().begin();
163     }
164 
165     bool captured(const Use *U) override {
166       Instruction *I = cast<Instruction>(U->getUser());
167       if (isa<ReturnInst>(I) && !ReturnCaptures)
168         return false;
169 
170       if (!EarliestCapture) {
171         EarliestCapture = I;
172       } else if (EarliestCapture->getParent() == I->getParent()) {
173         if (I->comesBefore(EarliestCapture))
174           EarliestCapture = I;
175       } else {
176         BasicBlock *CurrentBB = I->getParent();
177         BasicBlock *EarliestBB = EarliestCapture->getParent();
178         if (DT.dominates(EarliestBB, CurrentBB)) {
179           // EarliestCapture already comes before the current use.
180         } else if (DT.dominates(CurrentBB, EarliestBB)) {
181           EarliestCapture = I;
182         } else {
183           // Otherwise find the nearest common dominator and use its terminator.
184           auto *NearestCommonDom =
185               DT.findNearestCommonDominator(CurrentBB, EarliestBB);
186           EarliestCapture = NearestCommonDom->getTerminator();
187         }
188       }
189       Captured = true;
190 
191       // Return false to continue analysis; we need to see all potential
192       // captures.
193       return false;
194     }
195 
196     Instruction *EarliestCapture = nullptr;
197 
198     const DominatorTree &DT;
199 
200     bool ReturnCaptures;
201 
202     bool Captured = false;
203 
204     Function &F;
205   };
206 }
207 
208 /// PointerMayBeCaptured - Return true if this pointer value may be captured
209 /// by the enclosing function (which is required to exist).  This routine can
210 /// be expensive, so consider caching the results.  The boolean ReturnCaptures
211 /// specifies whether returning the value (or part of it) from the function
212 /// counts as capturing it or not.  The boolean StoreCaptures specified whether
213 /// storing the value (or part of it) into memory anywhere automatically
214 /// counts as capturing it or not.
215 bool llvm::PointerMayBeCaptured(const Value *V,
216                                 bool ReturnCaptures, bool StoreCaptures,
217                                 unsigned MaxUsesToExplore) {
218   assert(!isa<GlobalValue>(V) &&
219          "It doesn't make sense to ask whether a global is captured.");
220 
221   // TODO: If StoreCaptures is not true, we could do Fancy analysis
222   // to determine whether this store is not actually an escape point.
223   // In that case, BasicAliasAnalysis should be updated as well to
224   // take advantage of this.
225   (void)StoreCaptures;
226 
227   SimpleCaptureTracker SCT(ReturnCaptures);
228   PointerMayBeCaptured(V, &SCT, MaxUsesToExplore);
229   if (SCT.Captured)
230     ++NumCaptured;
231   else
232     ++NumNotCaptured;
233   return SCT.Captured;
234 }
235 
236 /// PointerMayBeCapturedBefore - Return true if this pointer value may be
237 /// captured by the enclosing function (which is required to exist). If a
238 /// DominatorTree is provided, only captures which happen before the given
239 /// instruction are considered. This routine can be expensive, so consider
240 /// caching the results.  The boolean ReturnCaptures specifies whether
241 /// returning the value (or part of it) from the function counts as capturing
242 /// it or not.  The boolean StoreCaptures specified whether storing the value
243 /// (or part of it) into memory anywhere automatically counts as capturing it
244 /// or not.
245 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
246                                       bool StoreCaptures, const Instruction *I,
247                                       const DominatorTree *DT, bool IncludeI,
248                                       unsigned MaxUsesToExplore,
249                                       const LoopInfo *LI) {
250   assert(!isa<GlobalValue>(V) &&
251          "It doesn't make sense to ask whether a global is captured.");
252 
253   if (!DT)
254     return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures,
255                                 MaxUsesToExplore);
256 
257   // TODO: See comment in PointerMayBeCaptured regarding what could be done
258   // with StoreCaptures.
259 
260   CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, LI);
261   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
262   if (CB.Captured)
263     ++NumCapturedBefore;
264   else
265     ++NumNotCapturedBefore;
266   return CB.Captured;
267 }
268 
269 Instruction *llvm::FindEarliestCapture(const Value *V, Function &F,
270                                        bool ReturnCaptures, bool StoreCaptures,
271                                        const DominatorTree &DT,
272                                        unsigned MaxUsesToExplore) {
273   assert(!isa<GlobalValue>(V) &&
274          "It doesn't make sense to ask whether a global is captured.");
275 
276   EarliestCaptures CB(ReturnCaptures, F, DT);
277   PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
278   if (CB.Captured)
279     ++NumCapturedBefore;
280   else
281     ++NumNotCapturedBefore;
282   return CB.EarliestCapture;
283 }
284 
285 UseCaptureKind llvm::DetermineUseCaptureKind(
286     const Use &U,
287     function_ref<bool(Value *, const DataLayout &)> IsDereferenceableOrNull) {
288   Instruction *I = cast<Instruction>(U.getUser());
289 
290   switch (I->getOpcode()) {
291   case Instruction::Call:
292   case Instruction::Invoke: {
293     auto *Call = cast<CallBase>(I);
294     // Not captured if the callee is readonly, doesn't return a copy through
295     // its return value and doesn't unwind (a readonly function can leak bits
296     // by throwing an exception or not depending on the input value).
297     if (Call->onlyReadsMemory() && Call->doesNotThrow() &&
298         Call->getType()->isVoidTy())
299       return UseCaptureKind::NO_CAPTURE;
300 
301     // The pointer is not captured if returned pointer is not captured.
302     // NOTE: CaptureTracking users should not assume that only functions
303     // marked with nocapture do not capture. This means that places like
304     // getUnderlyingObject in ValueTracking or DecomposeGEPExpression
305     // in BasicAA also need to know about this property.
306     if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call, true))
307       return UseCaptureKind::PASSTHROUGH;
308 
309     // Volatile operations effectively capture the memory location that they
310     // load and store to.
311     if (auto *MI = dyn_cast<MemIntrinsic>(Call))
312       if (MI->isVolatile())
313         return UseCaptureKind::MAY_CAPTURE;
314 
315     // Calling a function pointer does not in itself cause the pointer to
316     // be captured.  This is a subtle point considering that (for example)
317     // the callee might return its own address.  It is analogous to saying
318     // that loading a value from a pointer does not cause the pointer to be
319     // captured, even though the loaded value might be the pointer itself
320     // (think of self-referential objects).
321     if (Call->isCallee(&U))
322       return UseCaptureKind::NO_CAPTURE;
323 
324     // Not captured if only passed via 'nocapture' arguments.
325     if (Call->isDataOperand(&U) &&
326         !Call->doesNotCapture(Call->getDataOperandNo(&U))) {
327       // The parameter is not marked 'nocapture' - captured.
328       return UseCaptureKind::MAY_CAPTURE;
329     }
330     return UseCaptureKind::NO_CAPTURE;
331   }
332   case Instruction::Load:
333     // Volatile loads make the address observable.
334     if (cast<LoadInst>(I)->isVolatile())
335       return UseCaptureKind::MAY_CAPTURE;
336     return UseCaptureKind::NO_CAPTURE;
337   case Instruction::VAArg:
338     // "va-arg" from a pointer does not cause it to be captured.
339     return UseCaptureKind::NO_CAPTURE;
340   case Instruction::Store:
341     // Stored the pointer - conservatively assume it may be captured.
342     // Volatile stores make the address observable.
343     if (U.getOperandNo() == 0 || cast<StoreInst>(I)->isVolatile())
344       return UseCaptureKind::MAY_CAPTURE;
345     return UseCaptureKind::NO_CAPTURE;
346   case Instruction::AtomicRMW: {
347     // atomicrmw conceptually includes both a load and store from
348     // the same location.
349     // As with a store, the location being accessed is not captured,
350     // but the value being stored is.
351     // Volatile stores make the address observable.
352     auto *ARMWI = cast<AtomicRMWInst>(I);
353     if (U.getOperandNo() == 1 || ARMWI->isVolatile())
354       return UseCaptureKind::MAY_CAPTURE;
355     return UseCaptureKind::NO_CAPTURE;
356   }
357   case Instruction::AtomicCmpXchg: {
358     // cmpxchg conceptually includes both a load and store from
359     // the same location.
360     // As with a store, the location being accessed is not captured,
361     // but the value being stored is.
362     // Volatile stores make the address observable.
363     auto *ACXI = cast<AtomicCmpXchgInst>(I);
364     if (U.getOperandNo() == 1 || U.getOperandNo() == 2 || ACXI->isVolatile())
365       return UseCaptureKind::MAY_CAPTURE;
366     return UseCaptureKind::NO_CAPTURE;
367   }
368   case Instruction::BitCast:
369   case Instruction::GetElementPtr:
370   case Instruction::PHI:
371   case Instruction::Select:
372   case Instruction::AddrSpaceCast:
373     // The original value is not captured via this if the new value isn't.
374     return UseCaptureKind::PASSTHROUGH;
375   case Instruction::ICmp: {
376     unsigned Idx = U.getOperandNo();
377     unsigned OtherIdx = 1 - Idx;
378     if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) {
379       // Don't count comparisons of a no-alias return value against null as
380       // captures. This allows us to ignore comparisons of malloc results
381       // with null, for example.
382       if (CPN->getType()->getAddressSpace() == 0)
383         if (isNoAliasCall(U.get()->stripPointerCasts()))
384           return UseCaptureKind::NO_CAPTURE;
385       if (!I->getFunction()->nullPointerIsDefined()) {
386         auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation();
387         // Comparing a dereferenceable_or_null pointer against null cannot
388         // lead to pointer escapes, because if it is not null it must be a
389         // valid (in-bounds) pointer.
390         const DataLayout &DL = I->getModule()->getDataLayout();
391         if (IsDereferenceableOrNull && IsDereferenceableOrNull(O, DL))
392           return UseCaptureKind::NO_CAPTURE;
393       }
394     }
395     // Comparison against value stored in global variable. Given the pointer
396     // does not escape, its value cannot be guessed and stored separately in a
397     // global variable.
398     auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIdx));
399     if (LI && isa<GlobalVariable>(LI->getPointerOperand()))
400       return UseCaptureKind::NO_CAPTURE;
401     // Otherwise, be conservative. There are crazy ways to capture pointers
402     // using comparisons.
403     return UseCaptureKind::MAY_CAPTURE;
404   }
405   default:
406     // Something else - be conservative and say it is captured.
407     return UseCaptureKind::MAY_CAPTURE;
408   }
409 }
410 
411 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker,
412                                 unsigned MaxUsesToExplore) {
413   assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
414   if (MaxUsesToExplore == 0)
415     MaxUsesToExplore = DefaultMaxUsesToExplore;
416 
417   SmallVector<const Use *, 20> Worklist;
418   Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking());
419   SmallSet<const Use *, 20> Visited;
420 
421   auto AddUses = [&](const Value *V) {
422     unsigned Count = 0;
423     for (const Use &U : V->uses()) {
424       // If there are lots of uses, conservatively say that the value
425       // is captured to avoid taking too much compile time.
426       if (Count++ >= MaxUsesToExplore) {
427         Tracker->tooManyUses();
428         return false;
429       }
430       if (!Visited.insert(&U).second)
431         continue;
432       if (!Tracker->shouldExplore(&U))
433         continue;
434       Worklist.push_back(&U);
435     }
436     return true;
437   };
438   if (!AddUses(V))
439     return;
440 
441   auto IsDereferenceableOrNull = [Tracker](Value *V, const DataLayout &DL) {
442     return Tracker->isDereferenceableOrNull(V, DL);
443   };
444   while (!Worklist.empty()) {
445     const Use *U = Worklist.pop_back_val();
446     switch (DetermineUseCaptureKind(*U, IsDereferenceableOrNull)) {
447     case UseCaptureKind::NO_CAPTURE:
448       continue;
449     case UseCaptureKind::MAY_CAPTURE:
450       if (Tracker->captured(U))
451         return;
452       continue;
453     case UseCaptureKind::PASSTHROUGH:
454       if (!AddUses(U->getUser()))
455         return;
456       continue;
457     }
458   }
459 
460   // All uses examined.
461 }
462 
463 bool llvm::isNonEscapingLocalObject(
464     const Value *V, SmallDenseMap<const Value *, bool, 8> *IsCapturedCache) {
465   SmallDenseMap<const Value *, bool, 8>::iterator CacheIt;
466   if (IsCapturedCache) {
467     bool Inserted;
468     std::tie(CacheIt, Inserted) = IsCapturedCache->insert({V, false});
469     if (!Inserted)
470       // Found cached result, return it!
471       return CacheIt->second;
472   }
473 
474   // If this is an identified function-local object, check to see if it escapes.
475   if (isIdentifiedFunctionLocal(V)) {
476     // Set StoreCaptures to True so that we can assume in our callers that the
477     // pointer is not the result of a load instruction. Currently
478     // PointerMayBeCaptured doesn't have any special analysis for the
479     // StoreCaptures=false case; if it did, our callers could be refined to be
480     // more precise.
481     auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
482     if (IsCapturedCache)
483       CacheIt->second = Ret;
484     return Ret;
485   }
486 
487   return false;
488 }
489