1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file contains routines that help determine which pointers are captured. 10 // A pointer value is captured if the function makes a copy of any part of the 11 // pointer that outlives the call. Not being captured means, more or less, that 12 // the pointer is only dereferenced and not stored in a global. Returning part 13 // of the pointer as the function return value may or may not count as capturing 14 // the pointer, depending on the context. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "llvm/Analysis/CaptureTracking.h" 19 #include "llvm/ADT/SmallSet.h" 20 #include "llvm/ADT/SmallVector.h" 21 #include "llvm/ADT/Statistic.h" 22 #include "llvm/Analysis/AliasAnalysis.h" 23 #include "llvm/Analysis/CFG.h" 24 #include "llvm/Analysis/ValueTracking.h" 25 #include "llvm/IR/Constants.h" 26 #include "llvm/IR/Dominators.h" 27 #include "llvm/IR/Instructions.h" 28 #include "llvm/IR/IntrinsicInst.h" 29 #include "llvm/Support/CommandLine.h" 30 31 using namespace llvm; 32 33 #define DEBUG_TYPE "capture-tracking" 34 35 STATISTIC(NumCaptured, "Number of pointers maybe captured"); 36 STATISTIC(NumNotCaptured, "Number of pointers not captured"); 37 STATISTIC(NumCapturedBefore, "Number of pointers maybe captured before"); 38 STATISTIC(NumNotCapturedBefore, "Number of pointers not captured before"); 39 40 /// The default value for MaxUsesToExplore argument. It's relatively small to 41 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis, 42 /// where the results can't be cached. 43 /// TODO: we should probably introduce a caching CaptureTracking analysis and 44 /// use it where possible. The caching version can use much higher limit or 45 /// don't have this cap at all. 46 static cl::opt<unsigned> 47 DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden, 48 cl::desc("Maximal number of uses to explore."), 49 cl::init(20)); 50 51 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() { 52 return DefaultMaxUsesToExplore; 53 } 54 55 CaptureTracker::~CaptureTracker() {} 56 57 bool CaptureTracker::shouldExplore(const Use *U) { return true; } 58 59 bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) { 60 // An inbounds GEP can either be a valid pointer (pointing into 61 // or to the end of an allocation), or be null in the default 62 // address space. So for an inbounds GEP there is no way to let 63 // the pointer escape using clever GEP hacking because doing so 64 // would make the pointer point outside of the allocated object 65 // and thus make the GEP result a poison value. Similarly, other 66 // dereferenceable pointers cannot be manipulated without producing 67 // poison. 68 if (auto *GEP = dyn_cast<GetElementPtrInst>(O)) 69 if (GEP->isInBounds()) 70 return true; 71 bool CanBeNull, CanBeFreed; 72 return O->getPointerDereferenceableBytes(DL, CanBeNull, CanBeFreed); 73 } 74 75 namespace { 76 struct SimpleCaptureTracker : public CaptureTracker { 77 explicit SimpleCaptureTracker(bool ReturnCaptures) 78 : ReturnCaptures(ReturnCaptures), Captured(false) {} 79 80 void tooManyUses() override { Captured = true; } 81 82 bool captured(const Use *U) override { 83 if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures) 84 return false; 85 86 Captured = true; 87 return true; 88 } 89 90 bool ReturnCaptures; 91 92 bool Captured; 93 }; 94 95 /// Only find pointer captures which happen before the given instruction. Uses 96 /// the dominator tree to determine whether one instruction is before another. 97 /// Only support the case where the Value is defined in the same basic block 98 /// as the given instruction and the use. 99 struct CapturesBefore : public CaptureTracker { 100 101 CapturesBefore(bool ReturnCaptures, const Instruction *I, const DominatorTree *DT, 102 bool IncludeI) 103 : BeforeHere(I), DT(DT), 104 ReturnCaptures(ReturnCaptures), IncludeI(IncludeI), Captured(false) {} 105 106 void tooManyUses() override { Captured = true; } 107 108 bool isSafeToPrune(Instruction *I) { 109 if (BeforeHere == I) 110 return !IncludeI; 111 112 BasicBlock *BB = I->getParent(); 113 // We explore this usage only if the usage can reach "BeforeHere". 114 // If use is not reachable from entry, there is no need to explore. 115 if (!DT->isReachableFromEntry(BB)) 116 return true; 117 118 // Compute the case where both instructions are inside the same basic 119 // block. 120 if (BB == BeforeHere->getParent()) { 121 // 'I' dominates 'BeforeHere' => not safe to prune. 122 // 123 // The value defined by an invoke dominates an instruction only 124 // if it dominates every instruction in UseBB. A PHI is dominated only 125 // if the instruction dominates every possible use in the UseBB. Since 126 // UseBB == BB, avoid pruning. 127 if (isa<InvokeInst>(BeforeHere) || isa<PHINode>(I)) 128 return false; 129 if (!BeforeHere->comesBefore(I)) 130 return false; 131 132 // 'BeforeHere' comes before 'I', it's safe to prune if we also 133 // guarantee that 'I' never reaches 'BeforeHere' through a back-edge or 134 // by its successors, i.e, prune if: 135 // 136 // (1) BB is an entry block or have no successors. 137 // (2) There's no path coming back through BB successors. 138 if (BB->isEntryBlock() || !BB->getTerminator()->getNumSuccessors()) 139 return true; 140 141 SmallVector<BasicBlock*, 32> Worklist; 142 Worklist.append(succ_begin(BB), succ_end(BB)); 143 return !isPotentiallyReachableFromMany(Worklist, BB, nullptr, DT); 144 } 145 146 // If the value is defined in the same basic block as use and BeforeHere, 147 // there is no need to explore the use if BeforeHere dominates use. 148 // Check whether there is a path from I to BeforeHere. 149 if (DT->dominates(BeforeHere, I) && 150 !isPotentiallyReachable(I, BeforeHere, nullptr, DT)) 151 return true; 152 153 return false; 154 } 155 156 bool captured(const Use *U) override { 157 Instruction *I = cast<Instruction>(U->getUser()); 158 if (isa<ReturnInst>(I) && !ReturnCaptures) 159 return false; 160 161 // Check isSafeToPrune() here rather than in shouldExplore() to avoid 162 // an expensive reachability query for every instruction we look at. 163 // Instead we only do one for actual capturing candidates. 164 if (isSafeToPrune(I)) 165 return false; 166 167 Captured = true; 168 return true; 169 } 170 171 const Instruction *BeforeHere; 172 const DominatorTree *DT; 173 174 bool ReturnCaptures; 175 bool IncludeI; 176 177 bool Captured; 178 }; 179 } 180 181 /// PointerMayBeCaptured - Return true if this pointer value may be captured 182 /// by the enclosing function (which is required to exist). This routine can 183 /// be expensive, so consider caching the results. The boolean ReturnCaptures 184 /// specifies whether returning the value (or part of it) from the function 185 /// counts as capturing it or not. The boolean StoreCaptures specified whether 186 /// storing the value (or part of it) into memory anywhere automatically 187 /// counts as capturing it or not. 188 bool llvm::PointerMayBeCaptured(const Value *V, 189 bool ReturnCaptures, bool StoreCaptures, 190 unsigned MaxUsesToExplore) { 191 assert(!isa<GlobalValue>(V) && 192 "It doesn't make sense to ask whether a global is captured."); 193 194 // TODO: If StoreCaptures is not true, we could do Fancy analysis 195 // to determine whether this store is not actually an escape point. 196 // In that case, BasicAliasAnalysis should be updated as well to 197 // take advantage of this. 198 (void)StoreCaptures; 199 200 SimpleCaptureTracker SCT(ReturnCaptures); 201 PointerMayBeCaptured(V, &SCT, MaxUsesToExplore); 202 if (SCT.Captured) 203 ++NumCaptured; 204 else 205 ++NumNotCaptured; 206 return SCT.Captured; 207 } 208 209 /// PointerMayBeCapturedBefore - Return true if this pointer value may be 210 /// captured by the enclosing function (which is required to exist). If a 211 /// DominatorTree is provided, only captures which happen before the given 212 /// instruction are considered. This routine can be expensive, so consider 213 /// caching the results. The boolean ReturnCaptures specifies whether 214 /// returning the value (or part of it) from the function counts as capturing 215 /// it or not. The boolean StoreCaptures specified whether storing the value 216 /// (or part of it) into memory anywhere automatically counts as capturing it 217 /// or not. 218 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures, 219 bool StoreCaptures, const Instruction *I, 220 const DominatorTree *DT, bool IncludeI, 221 unsigned MaxUsesToExplore) { 222 assert(!isa<GlobalValue>(V) && 223 "It doesn't make sense to ask whether a global is captured."); 224 225 if (!DT) 226 return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures, 227 MaxUsesToExplore); 228 229 // TODO: See comment in PointerMayBeCaptured regarding what could be done 230 // with StoreCaptures. 231 232 CapturesBefore CB(ReturnCaptures, I, DT, IncludeI); 233 PointerMayBeCaptured(V, &CB, MaxUsesToExplore); 234 if (CB.Captured) 235 ++NumCapturedBefore; 236 else 237 ++NumNotCapturedBefore; 238 return CB.Captured; 239 } 240 241 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker, 242 unsigned MaxUsesToExplore) { 243 assert(V->getType()->isPointerTy() && "Capture is for pointers only!"); 244 if (MaxUsesToExplore == 0) 245 MaxUsesToExplore = DefaultMaxUsesToExplore; 246 247 SmallVector<const Use *, 20> Worklist; 248 Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking()); 249 SmallSet<const Use *, 20> Visited; 250 251 auto AddUses = [&](const Value *V) { 252 unsigned Count = 0; 253 for (const Use &U : V->uses()) { 254 // If there are lots of uses, conservatively say that the value 255 // is captured to avoid taking too much compile time. 256 if (Count++ >= MaxUsesToExplore) { 257 Tracker->tooManyUses(); 258 return false; 259 } 260 if (!Visited.insert(&U).second) 261 continue; 262 if (!Tracker->shouldExplore(&U)) 263 continue; 264 Worklist.push_back(&U); 265 } 266 return true; 267 }; 268 if (!AddUses(V)) 269 return; 270 271 while (!Worklist.empty()) { 272 const Use *U = Worklist.pop_back_val(); 273 Instruction *I = cast<Instruction>(U->getUser()); 274 275 switch (I->getOpcode()) { 276 case Instruction::Call: 277 case Instruction::Invoke: { 278 auto *Call = cast<CallBase>(I); 279 // Not captured if the callee is readonly, doesn't return a copy through 280 // its return value and doesn't unwind (a readonly function can leak bits 281 // by throwing an exception or not depending on the input value). 282 if (Call->onlyReadsMemory() && Call->doesNotThrow() && 283 Call->getType()->isVoidTy()) 284 break; 285 286 // The pointer is not captured if returned pointer is not captured. 287 // NOTE: CaptureTracking users should not assume that only functions 288 // marked with nocapture do not capture. This means that places like 289 // getUnderlyingObject in ValueTracking or DecomposeGEPExpression 290 // in BasicAA also need to know about this property. 291 if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call, 292 true)) { 293 if (!AddUses(Call)) 294 return; 295 break; 296 } 297 298 // Volatile operations effectively capture the memory location that they 299 // load and store to. 300 if (auto *MI = dyn_cast<MemIntrinsic>(Call)) 301 if (MI->isVolatile()) 302 if (Tracker->captured(U)) 303 return; 304 305 // Not captured if only passed via 'nocapture' arguments. Note that 306 // calling a function pointer does not in itself cause the pointer to 307 // be captured. This is a subtle point considering that (for example) 308 // the callee might return its own address. It is analogous to saying 309 // that loading a value from a pointer does not cause the pointer to be 310 // captured, even though the loaded value might be the pointer itself 311 // (think of self-referential objects). 312 if (Call->isDataOperand(U) && 313 !Call->doesNotCapture(Call->getDataOperandNo(U))) { 314 // The parameter is not marked 'nocapture' - captured. 315 if (Tracker->captured(U)) 316 return; 317 } 318 break; 319 } 320 case Instruction::Load: 321 // Volatile loads make the address observable. 322 if (cast<LoadInst>(I)->isVolatile()) 323 if (Tracker->captured(U)) 324 return; 325 break; 326 case Instruction::VAArg: 327 // "va-arg" from a pointer does not cause it to be captured. 328 break; 329 case Instruction::Store: 330 // Stored the pointer - conservatively assume it may be captured. 331 // Volatile stores make the address observable. 332 if (U->getOperandNo() == 0 || cast<StoreInst>(I)->isVolatile()) 333 if (Tracker->captured(U)) 334 return; 335 break; 336 case Instruction::AtomicRMW: { 337 // atomicrmw conceptually includes both a load and store from 338 // the same location. 339 // As with a store, the location being accessed is not captured, 340 // but the value being stored is. 341 // Volatile stores make the address observable. 342 auto *ARMWI = cast<AtomicRMWInst>(I); 343 if (U->getOperandNo() == 1 || ARMWI->isVolatile()) 344 if (Tracker->captured(U)) 345 return; 346 break; 347 } 348 case Instruction::AtomicCmpXchg: { 349 // cmpxchg conceptually includes both a load and store from 350 // the same location. 351 // As with a store, the location being accessed is not captured, 352 // but the value being stored is. 353 // Volatile stores make the address observable. 354 auto *ACXI = cast<AtomicCmpXchgInst>(I); 355 if (U->getOperandNo() == 1 || U->getOperandNo() == 2 || 356 ACXI->isVolatile()) 357 if (Tracker->captured(U)) 358 return; 359 break; 360 } 361 case Instruction::BitCast: 362 case Instruction::GetElementPtr: 363 case Instruction::PHI: 364 case Instruction::Select: 365 case Instruction::AddrSpaceCast: 366 // The original value is not captured via this if the new value isn't. 367 if (!AddUses(I)) 368 return; 369 break; 370 case Instruction::ICmp: { 371 unsigned Idx = U->getOperandNo(); 372 unsigned OtherIdx = 1 - Idx; 373 if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) { 374 // Don't count comparisons of a no-alias return value against null as 375 // captures. This allows us to ignore comparisons of malloc results 376 // with null, for example. 377 if (CPN->getType()->getAddressSpace() == 0) 378 if (isNoAliasCall(U->get()->stripPointerCasts())) 379 break; 380 if (!I->getFunction()->nullPointerIsDefined()) { 381 auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation(); 382 // Comparing a dereferenceable_or_null pointer against null cannot 383 // lead to pointer escapes, because if it is not null it must be a 384 // valid (in-bounds) pointer. 385 if (Tracker->isDereferenceableOrNull(O, I->getModule()->getDataLayout())) 386 break; 387 } 388 } 389 // Comparison against value stored in global variable. Given the pointer 390 // does not escape, its value cannot be guessed and stored separately in a 391 // global variable. 392 auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIdx)); 393 if (LI && isa<GlobalVariable>(LI->getPointerOperand())) 394 break; 395 // Otherwise, be conservative. There are crazy ways to capture pointers 396 // using comparisons. 397 if (Tracker->captured(U)) 398 return; 399 break; 400 } 401 default: 402 // Something else - be conservative and say it is captured. 403 if (Tracker->captured(U)) 404 return; 405 break; 406 } 407 } 408 409 // All uses examined. 410 } 411 412 bool llvm::isNonEscapingLocalObject( 413 const Value *V, SmallDenseMap<const Value *, bool, 8> *IsCapturedCache) { 414 SmallDenseMap<const Value *, bool, 8>::iterator CacheIt; 415 if (IsCapturedCache) { 416 bool Inserted; 417 std::tie(CacheIt, Inserted) = IsCapturedCache->insert({V, false}); 418 if (!Inserted) 419 // Found cached result, return it! 420 return CacheIt->second; 421 } 422 423 // If this is an identified function-local object, check to see if it escapes. 424 if (isIdentifiedFunctionLocal(V)) { 425 // Set StoreCaptures to True so that we can assume in our callers that the 426 // pointer is not the result of a load instruction. Currently 427 // PointerMayBeCaptured doesn't have any special analysis for the 428 // StoreCaptures=false case; if it did, our callers could be refined to be 429 // more precise. 430 auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true); 431 if (IsCapturedCache) 432 CacheIt->second = Ret; 433 return Ret; 434 } 435 436 return false; 437 } 438