1 //===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file contains routines that help determine which pointers are captured. 10 // A pointer value is captured if the function makes a copy of any part of the 11 // pointer that outlives the call. Not being captured means, more or less, that 12 // the pointer is only dereferenced and not stored in a global. Returning part 13 // of the pointer as the function return value may or may not count as capturing 14 // the pointer, depending on the context. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "llvm/Analysis/CaptureTracking.h" 19 #include "llvm/ADT/SmallSet.h" 20 #include "llvm/ADT/SmallVector.h" 21 #include "llvm/Analysis/AliasAnalysis.h" 22 #include "llvm/Analysis/CFG.h" 23 #include "llvm/Analysis/ValueTracking.h" 24 #include "llvm/IR/Constants.h" 25 #include "llvm/IR/Dominators.h" 26 #include "llvm/IR/Instructions.h" 27 #include "llvm/IR/IntrinsicInst.h" 28 #include "llvm/Support/CommandLine.h" 29 30 using namespace llvm; 31 32 /// The default value for MaxUsesToExplore argument. It's relatively small to 33 /// keep the cost of analysis reasonable for clients like BasicAliasAnalysis, 34 /// where the results can't be cached. 35 /// TODO: we should probably introduce a caching CaptureTracking analysis and 36 /// use it where possible. The caching version can use much higher limit or 37 /// don't have this cap at all. 38 static cl::opt<unsigned> 39 DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden, 40 cl::desc("Maximal number of uses to explore."), 41 cl::init(20)); 42 43 unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() { 44 return DefaultMaxUsesToExplore; 45 } 46 47 CaptureTracker::~CaptureTracker() {} 48 49 bool CaptureTracker::shouldExplore(const Use *U) { return true; } 50 51 bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) { 52 // An inbounds GEP can either be a valid pointer (pointing into 53 // or to the end of an allocation), or be null in the default 54 // address space. So for an inbounds GEP there is no way to let 55 // the pointer escape using clever GEP hacking because doing so 56 // would make the pointer point outside of the allocated object 57 // and thus make the GEP result a poison value. Similarly, other 58 // dereferenceable pointers cannot be manipulated without producing 59 // poison. 60 if (auto *GEP = dyn_cast<GetElementPtrInst>(O)) 61 if (GEP->isInBounds()) 62 return true; 63 bool CanBeNull; 64 return O->getPointerDereferenceableBytes(DL, CanBeNull); 65 } 66 67 namespace { 68 struct SimpleCaptureTracker : public CaptureTracker { 69 explicit SimpleCaptureTracker(bool ReturnCaptures) 70 : ReturnCaptures(ReturnCaptures), Captured(false) {} 71 72 void tooManyUses() override { Captured = true; } 73 74 bool captured(const Use *U) override { 75 if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures) 76 return false; 77 78 Captured = true; 79 return true; 80 } 81 82 bool ReturnCaptures; 83 84 bool Captured; 85 }; 86 87 /// Only find pointer captures which happen before the given instruction. Uses 88 /// the dominator tree to determine whether one instruction is before another. 89 /// Only support the case where the Value is defined in the same basic block 90 /// as the given instruction and the use. 91 struct CapturesBefore : public CaptureTracker { 92 93 CapturesBefore(bool ReturnCaptures, const Instruction *I, const DominatorTree *DT, 94 bool IncludeI) 95 : BeforeHere(I), DT(DT), 96 ReturnCaptures(ReturnCaptures), IncludeI(IncludeI), Captured(false) {} 97 98 void tooManyUses() override { Captured = true; } 99 100 bool isSafeToPrune(Instruction *I) { 101 BasicBlock *BB = I->getParent(); 102 // We explore this usage only if the usage can reach "BeforeHere". 103 // If use is not reachable from entry, there is no need to explore. 104 if (BeforeHere != I && !DT->isReachableFromEntry(BB)) 105 return true; 106 107 // Compute the case where both instructions are inside the same basic 108 // block. 109 if (BB == BeforeHere->getParent()) { 110 // 'I' dominates 'BeforeHere' => not safe to prune. 111 // 112 // The value defined by an invoke dominates an instruction only 113 // if it dominates every instruction in UseBB. A PHI is dominated only 114 // if the instruction dominates every possible use in the UseBB. Since 115 // UseBB == BB, avoid pruning. 116 if (isa<InvokeInst>(BeforeHere) || isa<PHINode>(I) || I == BeforeHere) 117 return false; 118 if (!BeforeHere->comesBefore(I)) 119 return false; 120 121 // 'BeforeHere' comes before 'I', it's safe to prune if we also 122 // guarantee that 'I' never reaches 'BeforeHere' through a back-edge or 123 // by its successors, i.e, prune if: 124 // 125 // (1) BB is an entry block or have no successors. 126 // (2) There's no path coming back through BB successors. 127 if (BB == &BB->getParent()->getEntryBlock() || 128 !BB->getTerminator()->getNumSuccessors()) 129 return true; 130 131 SmallVector<BasicBlock*, 32> Worklist; 132 Worklist.append(succ_begin(BB), succ_end(BB)); 133 return !isPotentiallyReachableFromMany(Worklist, BB, nullptr, DT); 134 } 135 136 // If the value is defined in the same basic block as use and BeforeHere, 137 // there is no need to explore the use if BeforeHere dominates use. 138 // Check whether there is a path from I to BeforeHere. 139 if (BeforeHere != I && DT->dominates(BeforeHere, I) && 140 !isPotentiallyReachable(I, BeforeHere, nullptr, DT)) 141 return true; 142 143 return false; 144 } 145 146 bool shouldExplore(const Use *U) override { 147 Instruction *I = cast<Instruction>(U->getUser()); 148 149 if (BeforeHere == I && !IncludeI) 150 return false; 151 152 if (isSafeToPrune(I)) 153 return false; 154 155 return true; 156 } 157 158 bool captured(const Use *U) override { 159 if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures) 160 return false; 161 162 Captured = true; 163 return true; 164 } 165 166 const Instruction *BeforeHere; 167 const DominatorTree *DT; 168 169 bool ReturnCaptures; 170 bool IncludeI; 171 172 bool Captured; 173 }; 174 } 175 176 /// PointerMayBeCaptured - Return true if this pointer value may be captured 177 /// by the enclosing function (which is required to exist). This routine can 178 /// be expensive, so consider caching the results. The boolean ReturnCaptures 179 /// specifies whether returning the value (or part of it) from the function 180 /// counts as capturing it or not. The boolean StoreCaptures specified whether 181 /// storing the value (or part of it) into memory anywhere automatically 182 /// counts as capturing it or not. 183 bool llvm::PointerMayBeCaptured(const Value *V, 184 bool ReturnCaptures, bool StoreCaptures, 185 unsigned MaxUsesToExplore) { 186 assert(!isa<GlobalValue>(V) && 187 "It doesn't make sense to ask whether a global is captured."); 188 189 // TODO: If StoreCaptures is not true, we could do Fancy analysis 190 // to determine whether this store is not actually an escape point. 191 // In that case, BasicAliasAnalysis should be updated as well to 192 // take advantage of this. 193 (void)StoreCaptures; 194 195 SimpleCaptureTracker SCT(ReturnCaptures); 196 PointerMayBeCaptured(V, &SCT, MaxUsesToExplore); 197 return SCT.Captured; 198 } 199 200 /// PointerMayBeCapturedBefore - Return true if this pointer value may be 201 /// captured by the enclosing function (which is required to exist). If a 202 /// DominatorTree is provided, only captures which happen before the given 203 /// instruction are considered. This routine can be expensive, so consider 204 /// caching the results. The boolean ReturnCaptures specifies whether 205 /// returning the value (or part of it) from the function counts as capturing 206 /// it or not. The boolean StoreCaptures specified whether storing the value 207 /// (or part of it) into memory anywhere automatically counts as capturing it 208 /// or not. 209 bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures, 210 bool StoreCaptures, const Instruction *I, 211 const DominatorTree *DT, bool IncludeI, 212 unsigned MaxUsesToExplore) { 213 assert(!isa<GlobalValue>(V) && 214 "It doesn't make sense to ask whether a global is captured."); 215 216 if (!DT) 217 return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures, 218 MaxUsesToExplore); 219 220 // TODO: See comment in PointerMayBeCaptured regarding what could be done 221 // with StoreCaptures. 222 223 CapturesBefore CB(ReturnCaptures, I, DT, IncludeI); 224 PointerMayBeCaptured(V, &CB, MaxUsesToExplore); 225 return CB.Captured; 226 } 227 228 void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker, 229 unsigned MaxUsesToExplore) { 230 assert(V->getType()->isPointerTy() && "Capture is for pointers only!"); 231 if (MaxUsesToExplore == 0) 232 MaxUsesToExplore = DefaultMaxUsesToExplore; 233 234 SmallVector<const Use *, 20> Worklist; 235 Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking()); 236 SmallSet<const Use *, 20> Visited; 237 238 auto AddUses = [&](const Value *V) { 239 unsigned Count = 0; 240 for (const Use &U : V->uses()) { 241 // If there are lots of uses, conservatively say that the value 242 // is captured to avoid taking too much compile time. 243 if (Count++ >= MaxUsesToExplore) 244 return Tracker->tooManyUses(); 245 if (!Visited.insert(&U).second) 246 continue; 247 if (!Tracker->shouldExplore(&U)) 248 continue; 249 Worklist.push_back(&U); 250 } 251 }; 252 AddUses(V); 253 254 while (!Worklist.empty()) { 255 const Use *U = Worklist.pop_back_val(); 256 Instruction *I = cast<Instruction>(U->getUser()); 257 V = U->get(); 258 259 switch (I->getOpcode()) { 260 case Instruction::Call: 261 case Instruction::Invoke: { 262 auto *Call = cast<CallBase>(I); 263 // Not captured if the callee is readonly, doesn't return a copy through 264 // its return value and doesn't unwind (a readonly function can leak bits 265 // by throwing an exception or not depending on the input value). 266 if (Call->onlyReadsMemory() && Call->doesNotThrow() && 267 Call->getType()->isVoidTy()) 268 break; 269 270 // The pointer is not captured if returned pointer is not captured. 271 // NOTE: CaptureTracking users should not assume that only functions 272 // marked with nocapture do not capture. This means that places like 273 // getUnderlyingObject in ValueTracking or DecomposeGEPExpression 274 // in BasicAA also need to know about this property. 275 if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call, 276 true)) { 277 AddUses(Call); 278 break; 279 } 280 281 // Volatile operations effectively capture the memory location that they 282 // load and store to. 283 if (auto *MI = dyn_cast<MemIntrinsic>(Call)) 284 if (MI->isVolatile()) 285 if (Tracker->captured(U)) 286 return; 287 288 // Not captured if only passed via 'nocapture' arguments. Note that 289 // calling a function pointer does not in itself cause the pointer to 290 // be captured. This is a subtle point considering that (for example) 291 // the callee might return its own address. It is analogous to saying 292 // that loading a value from a pointer does not cause the pointer to be 293 // captured, even though the loaded value might be the pointer itself 294 // (think of self-referential objects). 295 for (auto IdxOpPair : enumerate(Call->data_ops())) { 296 int Idx = IdxOpPair.index(); 297 Value *A = IdxOpPair.value(); 298 if (A == V && !Call->doesNotCapture(Idx)) 299 // The parameter is not marked 'nocapture' - captured. 300 if (Tracker->captured(U)) 301 return; 302 } 303 break; 304 } 305 case Instruction::Load: 306 // Volatile loads make the address observable. 307 if (cast<LoadInst>(I)->isVolatile()) 308 if (Tracker->captured(U)) 309 return; 310 break; 311 case Instruction::VAArg: 312 // "va-arg" from a pointer does not cause it to be captured. 313 break; 314 case Instruction::Store: 315 // Stored the pointer - conservatively assume it may be captured. 316 // Volatile stores make the address observable. 317 if (V == I->getOperand(0) || cast<StoreInst>(I)->isVolatile()) 318 if (Tracker->captured(U)) 319 return; 320 break; 321 case Instruction::AtomicRMW: { 322 // atomicrmw conceptually includes both a load and store from 323 // the same location. 324 // As with a store, the location being accessed is not captured, 325 // but the value being stored is. 326 // Volatile stores make the address observable. 327 auto *ARMWI = cast<AtomicRMWInst>(I); 328 if (ARMWI->getValOperand() == V || ARMWI->isVolatile()) 329 if (Tracker->captured(U)) 330 return; 331 break; 332 } 333 case Instruction::AtomicCmpXchg: { 334 // cmpxchg conceptually includes both a load and store from 335 // the same location. 336 // As with a store, the location being accessed is not captured, 337 // but the value being stored is. 338 // Volatile stores make the address observable. 339 auto *ACXI = cast<AtomicCmpXchgInst>(I); 340 if (ACXI->getCompareOperand() == V || ACXI->getNewValOperand() == V || 341 ACXI->isVolatile()) 342 if (Tracker->captured(U)) 343 return; 344 break; 345 } 346 case Instruction::BitCast: 347 case Instruction::GetElementPtr: 348 case Instruction::PHI: 349 case Instruction::Select: 350 case Instruction::AddrSpaceCast: 351 // The original value is not captured via this if the new value isn't. 352 AddUses(I); 353 break; 354 case Instruction::ICmp: { 355 unsigned Idx = (I->getOperand(0) == V) ? 0 : 1; 356 unsigned OtherIdx = 1 - Idx; 357 if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) { 358 // Don't count comparisons of a no-alias return value against null as 359 // captures. This allows us to ignore comparisons of malloc results 360 // with null, for example. 361 if (CPN->getType()->getAddressSpace() == 0) 362 if (isNoAliasCall(V->stripPointerCasts())) 363 break; 364 if (!I->getFunction()->nullPointerIsDefined()) { 365 auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation(); 366 // Comparing a dereferenceable_or_null pointer against null cannot 367 // lead to pointer escapes, because if it is not null it must be a 368 // valid (in-bounds) pointer. 369 if (Tracker->isDereferenceableOrNull(O, I->getModule()->getDataLayout())) 370 break; 371 } 372 } 373 // Comparison against value stored in global variable. Given the pointer 374 // does not escape, its value cannot be guessed and stored separately in a 375 // global variable. 376 auto *LI = dyn_cast<LoadInst>(I->getOperand(OtherIdx)); 377 if (LI && isa<GlobalVariable>(LI->getPointerOperand())) 378 break; 379 // Otherwise, be conservative. There are crazy ways to capture pointers 380 // using comparisons. 381 if (Tracker->captured(U)) 382 return; 383 break; 384 } 385 default: 386 // Something else - be conservative and say it is captured. 387 if (Tracker->captured(U)) 388 return; 389 break; 390 } 391 } 392 393 // All uses examined. 394 } 395 396 bool llvm::isNonEscapingLocalObject( 397 const Value *V, SmallDenseMap<const Value *, bool, 8> *IsCapturedCache) { 398 SmallDenseMap<const Value *, bool, 8>::iterator CacheIt; 399 if (IsCapturedCache) { 400 bool Inserted; 401 std::tie(CacheIt, Inserted) = IsCapturedCache->insert({V, false}); 402 if (!Inserted) 403 // Found cached result, return it! 404 return CacheIt->second; 405 } 406 407 // If this is a local allocation, check to see if it escapes. 408 if (isa<AllocaInst>(V) || isNoAliasCall(V)) { 409 // Set StoreCaptures to True so that we can assume in our callers that the 410 // pointer is not the result of a load instruction. Currently 411 // PointerMayBeCaptured doesn't have any special analysis for the 412 // StoreCaptures=false case; if it did, our callers could be refined to be 413 // more precise. 414 auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true); 415 if (IsCapturedCache) 416 CacheIt->second = Ret; 417 return Ret; 418 } 419 420 // If this is an argument that corresponds to a byval or noalias argument, 421 // then it has not escaped before entering the function. Check if it escapes 422 // inside the function. 423 if (const Argument *A = dyn_cast<Argument>(V)) 424 if (A->hasByValAttr() || A->hasNoAliasAttr()) { 425 // Note even if the argument is marked nocapture, we still need to check 426 // for copies made inside the function. The nocapture attribute only 427 // specifies that there are no copies made that outlive the function. 428 auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true); 429 if (IsCapturedCache) 430 CacheIt->second = Ret; 431 return Ret; 432 } 433 434 return false; 435 } 436