1 //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-= 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a meta-engine for path-sensitive dataflow analysis that 11 // is built on GREngine, but provides the boilerplate to execute transfer 12 // functions and build the ExplodedGraph at the expression level. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 17 #include "PrettyStackTraceLocationContext.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/AST/ParentMap.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/AST/StmtObjC.h" 22 #include "clang/Basic/Builtins.h" 23 #include "clang/Basic/PrettyStackTrace.h" 24 #include "clang/Basic/SourceManager.h" 25 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 26 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/LoopWidening.h" 30 #include "llvm/ADT/Statistic.h" 31 #include "llvm/Support/SaveAndRestore.h" 32 #include "llvm/Support/raw_ostream.h" 33 34 #ifndef NDEBUG 35 #include "llvm/Support/GraphWriter.h" 36 #endif 37 38 using namespace clang; 39 using namespace ento; 40 using llvm::APSInt; 41 42 #define DEBUG_TYPE "ExprEngine" 43 44 STATISTIC(NumRemoveDeadBindings, 45 "The # of times RemoveDeadBindings is called"); 46 STATISTIC(NumMaxBlockCountReached, 47 "The # of aborted paths due to reaching the maximum block count in " 48 "a top level function"); 49 STATISTIC(NumMaxBlockCountReachedInInlined, 50 "The # of aborted paths due to reaching the maximum block count in " 51 "an inlined function"); 52 STATISTIC(NumTimesRetriedWithoutInlining, 53 "The # of times we re-evaluated a call without inlining"); 54 55 typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *> 56 CXXBindTemporaryContext; 57 58 // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated. 59 // The StackFrameContext assures that nested calls due to inlined recursive 60 // functions do not interfere. 61 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet, 62 llvm::ImmutableSet<CXXBindTemporaryContext>) 63 64 //===----------------------------------------------------------------------===// 65 // Engine construction and deletion. 66 //===----------------------------------------------------------------------===// 67 68 static const char* TagProviderName = "ExprEngine"; 69 70 ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled, 71 SetOfConstDecls *VisitedCalleesIn, 72 FunctionSummariesTy *FS, 73 InliningModes HowToInlineIn) 74 : AMgr(mgr), 75 AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()), 76 Engine(*this, FS), 77 G(Engine.getGraph()), 78 StateMgr(getContext(), mgr.getStoreManagerCreator(), 79 mgr.getConstraintManagerCreator(), G.getAllocator(), 80 this), 81 SymMgr(StateMgr.getSymbolManager()), 82 svalBuilder(StateMgr.getSValBuilder()), 83 currStmtIdx(0), currBldrCtx(nullptr), 84 ObjCNoRet(mgr.getASTContext()), 85 ObjCGCEnabled(gcEnabled), BR(mgr, *this), 86 VisitedCallees(VisitedCalleesIn), 87 HowToInline(HowToInlineIn) 88 { 89 unsigned TrimInterval = mgr.options.getGraphTrimInterval(); 90 if (TrimInterval != 0) { 91 // Enable eager node reclaimation when constructing the ExplodedGraph. 92 G.enableNodeReclamation(TrimInterval); 93 } 94 } 95 96 ExprEngine::~ExprEngine() { 97 BR.FlushReports(); 98 } 99 100 //===----------------------------------------------------------------------===// 101 // Utility methods. 102 //===----------------------------------------------------------------------===// 103 104 ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) { 105 ProgramStateRef state = StateMgr.getInitialState(InitLoc); 106 const Decl *D = InitLoc->getDecl(); 107 108 // Preconditions. 109 // FIXME: It would be nice if we had a more general mechanism to add 110 // such preconditions. Some day. 111 do { 112 113 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 114 // Precondition: the first argument of 'main' is an integer guaranteed 115 // to be > 0. 116 const IdentifierInfo *II = FD->getIdentifier(); 117 if (!II || !(II->getName() == "main" && FD->getNumParams() > 0)) 118 break; 119 120 const ParmVarDecl *PD = FD->getParamDecl(0); 121 QualType T = PD->getType(); 122 const BuiltinType *BT = dyn_cast<BuiltinType>(T); 123 if (!BT || !BT->isInteger()) 124 break; 125 126 const MemRegion *R = state->getRegion(PD, InitLoc); 127 if (!R) 128 break; 129 130 SVal V = state->getSVal(loc::MemRegionVal(R)); 131 SVal Constraint_untested = evalBinOp(state, BO_GT, V, 132 svalBuilder.makeZeroVal(T), 133 svalBuilder.getConditionType()); 134 135 Optional<DefinedOrUnknownSVal> Constraint = 136 Constraint_untested.getAs<DefinedOrUnknownSVal>(); 137 138 if (!Constraint) 139 break; 140 141 if (ProgramStateRef newState = state->assume(*Constraint, true)) 142 state = newState; 143 } 144 break; 145 } 146 while (0); 147 148 if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 149 // Precondition: 'self' is always non-null upon entry to an Objective-C 150 // method. 151 const ImplicitParamDecl *SelfD = MD->getSelfDecl(); 152 const MemRegion *R = state->getRegion(SelfD, InitLoc); 153 SVal V = state->getSVal(loc::MemRegionVal(R)); 154 155 if (Optional<Loc> LV = V.getAs<Loc>()) { 156 // Assume that the pointer value in 'self' is non-null. 157 state = state->assume(*LV, true); 158 assert(state && "'self' cannot be null"); 159 } 160 } 161 162 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) { 163 if (!MD->isStatic()) { 164 // Precondition: 'this' is always non-null upon entry to the 165 // top-level function. This is our starting assumption for 166 // analyzing an "open" program. 167 const StackFrameContext *SFC = InitLoc->getCurrentStackFrame(); 168 if (SFC->getParent() == nullptr) { 169 loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC); 170 SVal V = state->getSVal(L); 171 if (Optional<Loc> LV = V.getAs<Loc>()) { 172 state = state->assume(*LV, true); 173 assert(state && "'this' cannot be null"); 174 } 175 } 176 } 177 } 178 179 return state; 180 } 181 182 ProgramStateRef 183 ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State, 184 const LocationContext *LC, 185 const Expr *Ex, 186 const Expr *Result) { 187 SVal V = State->getSVal(Ex, LC); 188 if (!Result) { 189 // If we don't have an explicit result expression, we're in "if needed" 190 // mode. Only create a region if the current value is a NonLoc. 191 if (!V.getAs<NonLoc>()) 192 return State; 193 Result = Ex; 194 } else { 195 // We need to create a region no matter what. For sanity, make sure we don't 196 // try to stuff a Loc into a non-pointer temporary region. 197 assert(!V.getAs<Loc>() || Loc::isLocType(Result->getType()) || 198 Result->getType()->isMemberPointerType()); 199 } 200 201 ProgramStateManager &StateMgr = State->getStateManager(); 202 MemRegionManager &MRMgr = StateMgr.getRegionManager(); 203 StoreManager &StoreMgr = StateMgr.getStoreManager(); 204 205 // We need to be careful about treating a derived type's value as 206 // bindings for a base type. Unless we're creating a temporary pointer region, 207 // start by stripping and recording base casts. 208 SmallVector<const CastExpr *, 4> Casts; 209 const Expr *Inner = Ex->IgnoreParens(); 210 if (!Loc::isLocType(Result->getType())) { 211 while (const CastExpr *CE = dyn_cast<CastExpr>(Inner)) { 212 if (CE->getCastKind() == CK_DerivedToBase || 213 CE->getCastKind() == CK_UncheckedDerivedToBase) 214 Casts.push_back(CE); 215 else if (CE->getCastKind() != CK_NoOp) 216 break; 217 218 Inner = CE->getSubExpr()->IgnoreParens(); 219 } 220 } 221 222 // Create a temporary object region for the inner expression (which may have 223 // a more derived type) and bind the value into it. 224 const TypedValueRegion *TR = nullptr; 225 if (const MaterializeTemporaryExpr *MT = 226 dyn_cast<MaterializeTemporaryExpr>(Result)) { 227 StorageDuration SD = MT->getStorageDuration(); 228 // If this object is bound to a reference with static storage duration, we 229 // put it in a different region to prevent "address leakage" warnings. 230 if (SD == SD_Static || SD == SD_Thread) 231 TR = MRMgr.getCXXStaticTempObjectRegion(Inner); 232 } 233 if (!TR) 234 TR = MRMgr.getCXXTempObjectRegion(Inner, LC); 235 236 SVal Reg = loc::MemRegionVal(TR); 237 238 if (V.isUnknown()) 239 V = getSValBuilder().conjureSymbolVal(Result, LC, TR->getValueType(), 240 currBldrCtx->blockCount()); 241 State = State->bindLoc(Reg, V); 242 243 // Re-apply the casts (from innermost to outermost) for type sanity. 244 for (SmallVectorImpl<const CastExpr *>::reverse_iterator I = Casts.rbegin(), 245 E = Casts.rend(); 246 I != E; ++I) { 247 Reg = StoreMgr.evalDerivedToBase(Reg, *I); 248 } 249 250 State = State->BindExpr(Result, LC, Reg); 251 return State; 252 } 253 254 //===----------------------------------------------------------------------===// 255 // Top-level transfer function logic (Dispatcher). 256 //===----------------------------------------------------------------------===// 257 258 /// evalAssume - Called by ConstraintManager. Used to call checker-specific 259 /// logic for handling assumptions on symbolic values. 260 ProgramStateRef ExprEngine::processAssume(ProgramStateRef state, 261 SVal cond, bool assumption) { 262 return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption); 263 } 264 265 bool ExprEngine::wantsRegionChangeUpdate(ProgramStateRef state) { 266 return getCheckerManager().wantsRegionChangeUpdate(state); 267 } 268 269 ProgramStateRef 270 ExprEngine::processRegionChanges(ProgramStateRef state, 271 const InvalidatedSymbols *invalidated, 272 ArrayRef<const MemRegion *> Explicits, 273 ArrayRef<const MemRegion *> Regions, 274 const CallEvent *Call) { 275 return getCheckerManager().runCheckersForRegionChanges(state, invalidated, 276 Explicits, Regions, Call); 277 } 278 279 void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State, 280 const char *NL, const char *Sep) { 281 getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep); 282 } 283 284 void ExprEngine::processEndWorklist(bool hasWorkRemaining) { 285 getCheckerManager().runCheckersForEndAnalysis(G, BR, *this); 286 } 287 288 void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred, 289 unsigned StmtIdx, NodeBuilderContext *Ctx) { 290 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 291 currStmtIdx = StmtIdx; 292 currBldrCtx = Ctx; 293 294 switch (E.getKind()) { 295 case CFGElement::Statement: 296 ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred); 297 return; 298 case CFGElement::Initializer: 299 ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred); 300 return; 301 case CFGElement::NewAllocator: 302 ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(), 303 Pred); 304 return; 305 case CFGElement::AutomaticObjectDtor: 306 case CFGElement::DeleteDtor: 307 case CFGElement::BaseDtor: 308 case CFGElement::MemberDtor: 309 case CFGElement::TemporaryDtor: 310 ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred); 311 return; 312 } 313 } 314 315 static bool shouldRemoveDeadBindings(AnalysisManager &AMgr, 316 const CFGStmt S, 317 const ExplodedNode *Pred, 318 const LocationContext *LC) { 319 320 // Are we never purging state values? 321 if (AMgr.options.AnalysisPurgeOpt == PurgeNone) 322 return false; 323 324 // Is this the beginning of a basic block? 325 if (Pred->getLocation().getAs<BlockEntrance>()) 326 return true; 327 328 // Is this on a non-expression? 329 if (!isa<Expr>(S.getStmt())) 330 return true; 331 332 // Run before processing a call. 333 if (CallEvent::isCallStmt(S.getStmt())) 334 return true; 335 336 // Is this an expression that is consumed by another expression? If so, 337 // postpone cleaning out the state. 338 ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap(); 339 return !PM.isConsumedExpr(cast<Expr>(S.getStmt())); 340 } 341 342 void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out, 343 const Stmt *ReferenceStmt, 344 const LocationContext *LC, 345 const Stmt *DiagnosticStmt, 346 ProgramPoint::Kind K) { 347 assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind || 348 ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt)) 349 && "PostStmt is not generally supported by the SymbolReaper yet"); 350 assert(LC && "Must pass the current (or expiring) LocationContext"); 351 352 if (!DiagnosticStmt) { 353 DiagnosticStmt = ReferenceStmt; 354 assert(DiagnosticStmt && "Required for clearing a LocationContext"); 355 } 356 357 NumRemoveDeadBindings++; 358 ProgramStateRef CleanedState = Pred->getState(); 359 360 // LC is the location context being destroyed, but SymbolReaper wants a 361 // location context that is still live. (If this is the top-level stack 362 // frame, this will be null.) 363 if (!ReferenceStmt) { 364 assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind && 365 "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext"); 366 LC = LC->getParent(); 367 } 368 369 const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr; 370 SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager()); 371 372 getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper); 373 374 // Create a state in which dead bindings are removed from the environment 375 // and the store. TODO: The function should just return new env and store, 376 // not a new state. 377 CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper); 378 379 // Process any special transfer function for dead symbols. 380 // A tag to track convenience transitions, which can be removed at cleanup. 381 static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node"); 382 if (!SymReaper.hasDeadSymbols()) { 383 // Generate a CleanedNode that has the environment and store cleaned 384 // up. Since no symbols are dead, we can optimize and not clean out 385 // the constraint manager. 386 StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx); 387 Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K); 388 389 } else { 390 // Call checkers with the non-cleaned state so that they could query the 391 // values of the soon to be dead symbols. 392 ExplodedNodeSet CheckedSet; 393 getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper, 394 DiagnosticStmt, *this, K); 395 396 // For each node in CheckedSet, generate CleanedNodes that have the 397 // environment, the store, and the constraints cleaned up but have the 398 // user-supplied states as the predecessors. 399 StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx); 400 for (ExplodedNodeSet::const_iterator 401 I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { 402 ProgramStateRef CheckerState = (*I)->getState(); 403 404 // The constraint manager has not been cleaned up yet, so clean up now. 405 CheckerState = getConstraintManager().removeDeadBindings(CheckerState, 406 SymReaper); 407 408 assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) && 409 "Checkers are not allowed to modify the Environment as a part of " 410 "checkDeadSymbols processing."); 411 assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) && 412 "Checkers are not allowed to modify the Store as a part of " 413 "checkDeadSymbols processing."); 414 415 // Create a state based on CleanedState with CheckerState GDM and 416 // generate a transition to that state. 417 ProgramStateRef CleanedCheckerSt = 418 StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState); 419 Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K); 420 } 421 } 422 } 423 424 void ExprEngine::ProcessStmt(const CFGStmt S, 425 ExplodedNode *Pred) { 426 // Reclaim any unnecessary nodes in the ExplodedGraph. 427 G.reclaimRecentlyAllocatedNodes(); 428 429 const Stmt *currStmt = S.getStmt(); 430 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 431 currStmt->getLocStart(), 432 "Error evaluating statement"); 433 434 // Remove dead bindings and symbols. 435 ExplodedNodeSet CleanedStates; 436 if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){ 437 removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext()); 438 } else 439 CleanedStates.Add(Pred); 440 441 // Visit the statement. 442 ExplodedNodeSet Dst; 443 for (ExplodedNodeSet::iterator I = CleanedStates.begin(), 444 E = CleanedStates.end(); I != E; ++I) { 445 ExplodedNodeSet DstI; 446 // Visit the statement. 447 Visit(currStmt, *I, DstI); 448 Dst.insert(DstI); 449 } 450 451 // Enqueue the new nodes onto the work list. 452 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 453 } 454 455 void ExprEngine::ProcessInitializer(const CFGInitializer Init, 456 ExplodedNode *Pred) { 457 const CXXCtorInitializer *BMI = Init.getInitializer(); 458 459 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 460 BMI->getSourceLocation(), 461 "Error evaluating initializer"); 462 463 // We don't clean up dead bindings here. 464 const StackFrameContext *stackFrame = 465 cast<StackFrameContext>(Pred->getLocationContext()); 466 const CXXConstructorDecl *decl = 467 cast<CXXConstructorDecl>(stackFrame->getDecl()); 468 469 ProgramStateRef State = Pred->getState(); 470 SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame)); 471 472 ExplodedNodeSet Tmp(Pred); 473 SVal FieldLoc; 474 475 // Evaluate the initializer, if necessary 476 if (BMI->isAnyMemberInitializer()) { 477 // Constructors build the object directly in the field, 478 // but non-objects must be copied in from the initializer. 479 if (auto *CtorExpr = findDirectConstructorForCurrentCFGElement()) { 480 assert(BMI->getInit()->IgnoreImplicit() == CtorExpr); 481 (void)CtorExpr; 482 // The field was directly constructed, so there is no need to bind. 483 } else { 484 const Expr *Init = BMI->getInit()->IgnoreImplicit(); 485 const ValueDecl *Field; 486 if (BMI->isIndirectMemberInitializer()) { 487 Field = BMI->getIndirectMember(); 488 FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal); 489 } else { 490 Field = BMI->getMember(); 491 FieldLoc = State->getLValue(BMI->getMember(), thisVal); 492 } 493 494 SVal InitVal; 495 if (BMI->getNumArrayIndices() > 0) { 496 // Handle arrays of trivial type. We can represent this with a 497 // primitive load/copy from the base array region. 498 const ArraySubscriptExpr *ASE; 499 while ((ASE = dyn_cast<ArraySubscriptExpr>(Init))) 500 Init = ASE->getBase()->IgnoreImplicit(); 501 502 SVal LValue = State->getSVal(Init, stackFrame); 503 if (Optional<Loc> LValueLoc = LValue.getAs<Loc>()) 504 InitVal = State->getSVal(*LValueLoc); 505 506 // If we fail to get the value for some reason, use a symbolic value. 507 if (InitVal.isUnknownOrUndef()) { 508 SValBuilder &SVB = getSValBuilder(); 509 InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame, 510 Field->getType(), 511 currBldrCtx->blockCount()); 512 } 513 } else { 514 InitVal = State->getSVal(BMI->getInit(), stackFrame); 515 } 516 517 assert(Tmp.size() == 1 && "have not generated any new nodes yet"); 518 assert(*Tmp.begin() == Pred && "have not generated any new nodes yet"); 519 Tmp.clear(); 520 521 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 522 evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP); 523 } 524 } else { 525 assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer()); 526 // We already did all the work when visiting the CXXConstructExpr. 527 } 528 529 // Construct PostInitializer nodes whether the state changed or not, 530 // so that the diagnostics don't get confused. 531 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 532 ExplodedNodeSet Dst; 533 NodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 534 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) { 535 ExplodedNode *N = *I; 536 Bldr.generateNode(PP, N->getState(), N); 537 } 538 539 // Enqueue the new nodes onto the work list. 540 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 541 } 542 543 void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D, 544 ExplodedNode *Pred) { 545 ExplodedNodeSet Dst; 546 switch (D.getKind()) { 547 case CFGElement::AutomaticObjectDtor: 548 ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst); 549 break; 550 case CFGElement::BaseDtor: 551 ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst); 552 break; 553 case CFGElement::MemberDtor: 554 ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst); 555 break; 556 case CFGElement::TemporaryDtor: 557 ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst); 558 break; 559 case CFGElement::DeleteDtor: 560 ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst); 561 break; 562 default: 563 llvm_unreachable("Unexpected dtor kind."); 564 } 565 566 // Enqueue the new nodes onto the work list. 567 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 568 } 569 570 void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE, 571 ExplodedNode *Pred) { 572 ExplodedNodeSet Dst; 573 AnalysisManager &AMgr = getAnalysisManager(); 574 AnalyzerOptions &Opts = AMgr.options; 575 // TODO: We're not evaluating allocators for all cases just yet as 576 // we're not handling the return value correctly, which causes false 577 // positives when the alpha.cplusplus.NewDeleteLeaks check is on. 578 if (Opts.mayInlineCXXAllocator()) 579 VisitCXXNewAllocatorCall(NE, Pred, Dst); 580 else { 581 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 582 const LocationContext *LCtx = Pred->getLocationContext(); 583 PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx); 584 Bldr.generateNode(PP, Pred->getState(), Pred); 585 } 586 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 587 } 588 589 void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor, 590 ExplodedNode *Pred, 591 ExplodedNodeSet &Dst) { 592 const VarDecl *varDecl = Dtor.getVarDecl(); 593 QualType varType = varDecl->getType(); 594 595 ProgramStateRef state = Pred->getState(); 596 SVal dest = state->getLValue(varDecl, Pred->getLocationContext()); 597 const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion(); 598 599 if (const ReferenceType *refType = varType->getAs<ReferenceType>()) { 600 varType = refType->getPointeeType(); 601 Region = state->getSVal(Region).getAsRegion(); 602 } 603 604 VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false, 605 Pred, Dst); 606 } 607 608 void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor, 609 ExplodedNode *Pred, 610 ExplodedNodeSet &Dst) { 611 ProgramStateRef State = Pred->getState(); 612 const LocationContext *LCtx = Pred->getLocationContext(); 613 const CXXDeleteExpr *DE = Dtor.getDeleteExpr(); 614 const Stmt *Arg = DE->getArgument(); 615 SVal ArgVal = State->getSVal(Arg, LCtx); 616 617 // If the argument to delete is known to be a null value, 618 // don't run destructor. 619 if (State->isNull(ArgVal).isConstrainedTrue()) { 620 QualType DTy = DE->getDestroyedType(); 621 QualType BTy = getContext().getBaseElementType(DTy); 622 const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl(); 623 const CXXDestructorDecl *Dtor = RD->getDestructor(); 624 625 PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx); 626 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 627 Bldr.generateNode(PP, Pred->getState(), Pred); 628 return; 629 } 630 631 VisitCXXDestructor(DE->getDestroyedType(), 632 ArgVal.getAsRegion(), 633 DE, /*IsBase=*/ false, 634 Pred, Dst); 635 } 636 637 void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D, 638 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 639 const LocationContext *LCtx = Pred->getLocationContext(); 640 641 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 642 Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor, 643 LCtx->getCurrentStackFrame()); 644 SVal ThisVal = Pred->getState()->getSVal(ThisPtr); 645 646 // Create the base object region. 647 const CXXBaseSpecifier *Base = D.getBaseSpecifier(); 648 QualType BaseTy = Base->getType(); 649 SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy, 650 Base->isVirtual()); 651 652 VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(), 653 CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst); 654 } 655 656 void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D, 657 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 658 const FieldDecl *Member = D.getFieldDecl(); 659 ProgramStateRef State = Pred->getState(); 660 const LocationContext *LCtx = Pred->getLocationContext(); 661 662 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 663 Loc ThisVal = getSValBuilder().getCXXThis(CurDtor, 664 LCtx->getCurrentStackFrame()); 665 SVal FieldVal = 666 State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>()); 667 668 VisitCXXDestructor(Member->getType(), 669 FieldVal.castAs<loc::MemRegionVal>().getRegion(), 670 CurDtor->getBody(), /*IsBase=*/false, Pred, Dst); 671 } 672 673 void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D, 674 ExplodedNode *Pred, 675 ExplodedNodeSet &Dst) { 676 ExplodedNodeSet CleanDtorState; 677 StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx); 678 ProgramStateRef State = Pred->getState(); 679 if (State->contains<InitializedTemporariesSet>( 680 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) { 681 // FIXME: Currently we insert temporary destructors for default parameters, 682 // but we don't insert the constructors. 683 State = State->remove<InitializedTemporariesSet>( 684 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame())); 685 } 686 StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State); 687 688 QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType(); 689 // FIXME: Currently CleanDtorState can be empty here due to temporaries being 690 // bound to default parameters. 691 assert(CleanDtorState.size() <= 1); 692 ExplodedNode *CleanPred = 693 CleanDtorState.empty() ? Pred : *CleanDtorState.begin(); 694 // FIXME: Inlining of temporary destructors is not supported yet anyway, so 695 // we just put a NULL region for now. This will need to be changed later. 696 VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(), 697 /*IsBase=*/false, CleanPred, Dst); 698 } 699 700 void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 701 NodeBuilderContext &BldCtx, 702 ExplodedNode *Pred, 703 ExplodedNodeSet &Dst, 704 const CFGBlock *DstT, 705 const CFGBlock *DstF) { 706 BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF); 707 if (Pred->getState()->contains<InitializedTemporariesSet>( 708 std::make_pair(BTE, Pred->getStackFrame()))) { 709 TempDtorBuilder.markInfeasible(false); 710 TempDtorBuilder.generateNode(Pred->getState(), true, Pred); 711 } else { 712 TempDtorBuilder.markInfeasible(true); 713 TempDtorBuilder.generateNode(Pred->getState(), false, Pred); 714 } 715 } 716 717 void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE, 718 ExplodedNodeSet &PreVisit, 719 ExplodedNodeSet &Dst) { 720 if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) { 721 // In case we don't have temporary destructors in the CFG, do not mark 722 // the initialization - we would otherwise never clean it up. 723 Dst = PreVisit; 724 return; 725 } 726 StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx); 727 for (ExplodedNode *Node : PreVisit) { 728 ProgramStateRef State = Node->getState(); 729 730 if (!State->contains<InitializedTemporariesSet>( 731 std::make_pair(BTE, Node->getStackFrame()))) { 732 // FIXME: Currently the state might already contain the marker due to 733 // incorrect handling of temporaries bound to default parameters; for 734 // those, we currently skip the CXXBindTemporaryExpr but rely on adding 735 // temporary destructor nodes. 736 State = State->add<InitializedTemporariesSet>( 737 std::make_pair(BTE, Node->getStackFrame())); 738 } 739 StmtBldr.generateNode(BTE, Node, State); 740 } 741 } 742 743 void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred, 744 ExplodedNodeSet &DstTop) { 745 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 746 S->getLocStart(), 747 "Error evaluating statement"); 748 ExplodedNodeSet Dst; 749 StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx); 750 751 assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens()); 752 753 switch (S->getStmtClass()) { 754 // C++ and ARC stuff we don't support yet. 755 case Expr::ObjCIndirectCopyRestoreExprClass: 756 case Stmt::CXXDependentScopeMemberExprClass: 757 case Stmt::CXXInheritedCtorInitExprClass: 758 case Stmt::CXXTryStmtClass: 759 case Stmt::CXXTypeidExprClass: 760 case Stmt::CXXUuidofExprClass: 761 case Stmt::CXXFoldExprClass: 762 case Stmt::MSPropertyRefExprClass: 763 case Stmt::MSPropertySubscriptExprClass: 764 case Stmt::CXXUnresolvedConstructExprClass: 765 case Stmt::DependentScopeDeclRefExprClass: 766 case Stmt::ArrayTypeTraitExprClass: 767 case Stmt::ExpressionTraitExprClass: 768 case Stmt::UnresolvedLookupExprClass: 769 case Stmt::UnresolvedMemberExprClass: 770 case Stmt::TypoExprClass: 771 case Stmt::CXXNoexceptExprClass: 772 case Stmt::PackExpansionExprClass: 773 case Stmt::SubstNonTypeTemplateParmPackExprClass: 774 case Stmt::FunctionParmPackExprClass: 775 case Stmt::CoroutineBodyStmtClass: 776 case Stmt::CoawaitExprClass: 777 case Stmt::CoreturnStmtClass: 778 case Stmt::CoyieldExprClass: 779 case Stmt::SEHTryStmtClass: 780 case Stmt::SEHExceptStmtClass: 781 case Stmt::SEHLeaveStmtClass: 782 case Stmt::SEHFinallyStmtClass: { 783 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 784 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 785 break; 786 } 787 788 case Stmt::ParenExprClass: 789 llvm_unreachable("ParenExprs already handled."); 790 case Stmt::GenericSelectionExprClass: 791 llvm_unreachable("GenericSelectionExprs already handled."); 792 // Cases that should never be evaluated simply because they shouldn't 793 // appear in the CFG. 794 case Stmt::BreakStmtClass: 795 case Stmt::CaseStmtClass: 796 case Stmt::CompoundStmtClass: 797 case Stmt::ContinueStmtClass: 798 case Stmt::CXXForRangeStmtClass: 799 case Stmt::DefaultStmtClass: 800 case Stmt::DoStmtClass: 801 case Stmt::ForStmtClass: 802 case Stmt::GotoStmtClass: 803 case Stmt::IfStmtClass: 804 case Stmt::IndirectGotoStmtClass: 805 case Stmt::LabelStmtClass: 806 case Stmt::NoStmtClass: 807 case Stmt::NullStmtClass: 808 case Stmt::SwitchStmtClass: 809 case Stmt::WhileStmtClass: 810 case Expr::MSDependentExistsStmtClass: 811 case Stmt::CapturedStmtClass: 812 case Stmt::OMPParallelDirectiveClass: 813 case Stmt::OMPSimdDirectiveClass: 814 case Stmt::OMPForDirectiveClass: 815 case Stmt::OMPForSimdDirectiveClass: 816 case Stmt::OMPSectionsDirectiveClass: 817 case Stmt::OMPSectionDirectiveClass: 818 case Stmt::OMPSingleDirectiveClass: 819 case Stmt::OMPMasterDirectiveClass: 820 case Stmt::OMPCriticalDirectiveClass: 821 case Stmt::OMPParallelForDirectiveClass: 822 case Stmt::OMPParallelForSimdDirectiveClass: 823 case Stmt::OMPParallelSectionsDirectiveClass: 824 case Stmt::OMPTaskDirectiveClass: 825 case Stmt::OMPTaskyieldDirectiveClass: 826 case Stmt::OMPBarrierDirectiveClass: 827 case Stmt::OMPTaskwaitDirectiveClass: 828 case Stmt::OMPTaskgroupDirectiveClass: 829 case Stmt::OMPFlushDirectiveClass: 830 case Stmt::OMPOrderedDirectiveClass: 831 case Stmt::OMPAtomicDirectiveClass: 832 case Stmt::OMPTargetDirectiveClass: 833 case Stmt::OMPTargetDataDirectiveClass: 834 case Stmt::OMPTargetEnterDataDirectiveClass: 835 case Stmt::OMPTargetExitDataDirectiveClass: 836 case Stmt::OMPTargetParallelDirectiveClass: 837 case Stmt::OMPTargetParallelForDirectiveClass: 838 case Stmt::OMPTargetUpdateDirectiveClass: 839 case Stmt::OMPTeamsDirectiveClass: 840 case Stmt::OMPCancellationPointDirectiveClass: 841 case Stmt::OMPCancelDirectiveClass: 842 case Stmt::OMPTaskLoopDirectiveClass: 843 case Stmt::OMPTaskLoopSimdDirectiveClass: 844 case Stmt::OMPDistributeDirectiveClass: 845 case Stmt::OMPDistributeParallelForDirectiveClass: 846 case Stmt::OMPDistributeParallelForSimdDirectiveClass: 847 case Stmt::OMPDistributeSimdDirectiveClass: 848 case Stmt::OMPTargetParallelForSimdDirectiveClass: 849 case Stmt::OMPTargetSimdDirectiveClass: 850 case Stmt::OMPTeamsDistributeDirectiveClass: 851 llvm_unreachable("Stmt should not be in analyzer evaluation loop"); 852 853 case Stmt::ObjCSubscriptRefExprClass: 854 case Stmt::ObjCPropertyRefExprClass: 855 llvm_unreachable("These are handled by PseudoObjectExpr"); 856 857 case Stmt::GNUNullExprClass: { 858 // GNU __null is a pointer-width integer, not an actual pointer. 859 ProgramStateRef state = Pred->getState(); 860 state = state->BindExpr(S, Pred->getLocationContext(), 861 svalBuilder.makeIntValWithPtrWidth(0, false)); 862 Bldr.generateNode(S, Pred, state); 863 break; 864 } 865 866 case Stmt::ObjCAtSynchronizedStmtClass: 867 Bldr.takeNodes(Pred); 868 VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst); 869 Bldr.addNodes(Dst); 870 break; 871 872 case Stmt::ExprWithCleanupsClass: 873 // Handled due to fully linearised CFG. 874 break; 875 876 case Stmt::CXXBindTemporaryExprClass: { 877 Bldr.takeNodes(Pred); 878 ExplodedNodeSet PreVisit; 879 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 880 ExplodedNodeSet Next; 881 VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next); 882 getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this); 883 Bldr.addNodes(Dst); 884 break; 885 } 886 887 // Cases not handled yet; but will handle some day. 888 case Stmt::DesignatedInitExprClass: 889 case Stmt::DesignatedInitUpdateExprClass: 890 case Stmt::ExtVectorElementExprClass: 891 case Stmt::ImaginaryLiteralClass: 892 case Stmt::ObjCAtCatchStmtClass: 893 case Stmt::ObjCAtFinallyStmtClass: 894 case Stmt::ObjCAtTryStmtClass: 895 case Stmt::ObjCAutoreleasePoolStmtClass: 896 case Stmt::ObjCEncodeExprClass: 897 case Stmt::ObjCIsaExprClass: 898 case Stmt::ObjCProtocolExprClass: 899 case Stmt::ObjCSelectorExprClass: 900 case Stmt::ParenListExprClass: 901 case Stmt::ShuffleVectorExprClass: 902 case Stmt::ConvertVectorExprClass: 903 case Stmt::VAArgExprClass: 904 case Stmt::CUDAKernelCallExprClass: 905 case Stmt::OpaqueValueExprClass: 906 case Stmt::AsTypeExprClass: 907 // Fall through. 908 909 // Cases we intentionally don't evaluate, since they don't need 910 // to be explicitly evaluated. 911 case Stmt::PredefinedExprClass: 912 case Stmt::AddrLabelExprClass: 913 case Stmt::AttributedStmtClass: 914 case Stmt::IntegerLiteralClass: 915 case Stmt::CharacterLiteralClass: 916 case Stmt::ImplicitValueInitExprClass: 917 case Stmt::CXXScalarValueInitExprClass: 918 case Stmt::CXXBoolLiteralExprClass: 919 case Stmt::ObjCBoolLiteralExprClass: 920 case Stmt::ObjCAvailabilityCheckExprClass: 921 case Stmt::FloatingLiteralClass: 922 case Stmt::NoInitExprClass: 923 case Stmt::SizeOfPackExprClass: 924 case Stmt::StringLiteralClass: 925 case Stmt::ObjCStringLiteralClass: 926 case Stmt::CXXPseudoDestructorExprClass: 927 case Stmt::SubstNonTypeTemplateParmExprClass: 928 case Stmt::CXXNullPtrLiteralExprClass: 929 case Stmt::OMPArraySectionExprClass: 930 case Stmt::TypeTraitExprClass: { 931 Bldr.takeNodes(Pred); 932 ExplodedNodeSet preVisit; 933 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 934 getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this); 935 Bldr.addNodes(Dst); 936 break; 937 } 938 939 case Stmt::CXXDefaultArgExprClass: 940 case Stmt::CXXDefaultInitExprClass: { 941 Bldr.takeNodes(Pred); 942 ExplodedNodeSet PreVisit; 943 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 944 945 ExplodedNodeSet Tmp; 946 StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx); 947 948 const Expr *ArgE; 949 if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S)) 950 ArgE = DefE->getExpr(); 951 else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S)) 952 ArgE = DefE->getExpr(); 953 else 954 llvm_unreachable("unknown constant wrapper kind"); 955 956 bool IsTemporary = false; 957 if (const MaterializeTemporaryExpr *MTE = 958 dyn_cast<MaterializeTemporaryExpr>(ArgE)) { 959 ArgE = MTE->GetTemporaryExpr(); 960 IsTemporary = true; 961 } 962 963 Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE); 964 if (!ConstantVal) 965 ConstantVal = UnknownVal(); 966 967 const LocationContext *LCtx = Pred->getLocationContext(); 968 for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end(); 969 I != E; ++I) { 970 ProgramStateRef State = (*I)->getState(); 971 State = State->BindExpr(S, LCtx, *ConstantVal); 972 if (IsTemporary) 973 State = createTemporaryRegionIfNeeded(State, LCtx, 974 cast<Expr>(S), 975 cast<Expr>(S)); 976 Bldr2.generateNode(S, *I, State); 977 } 978 979 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 980 Bldr.addNodes(Dst); 981 break; 982 } 983 984 // Cases we evaluate as opaque expressions, conjuring a symbol. 985 case Stmt::CXXStdInitializerListExprClass: 986 case Expr::ObjCArrayLiteralClass: 987 case Expr::ObjCDictionaryLiteralClass: 988 case Expr::ObjCBoxedExprClass: { 989 Bldr.takeNodes(Pred); 990 991 ExplodedNodeSet preVisit; 992 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 993 994 ExplodedNodeSet Tmp; 995 StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx); 996 997 const Expr *Ex = cast<Expr>(S); 998 QualType resultType = Ex->getType(); 999 1000 for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end(); 1001 it != et; ++it) { 1002 ExplodedNode *N = *it; 1003 const LocationContext *LCtx = N->getLocationContext(); 1004 SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx, 1005 resultType, 1006 currBldrCtx->blockCount()); 1007 ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result); 1008 Bldr2.generateNode(S, N, state); 1009 } 1010 1011 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1012 Bldr.addNodes(Dst); 1013 break; 1014 } 1015 1016 case Stmt::ArraySubscriptExprClass: 1017 Bldr.takeNodes(Pred); 1018 VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst); 1019 Bldr.addNodes(Dst); 1020 break; 1021 1022 case Stmt::GCCAsmStmtClass: 1023 Bldr.takeNodes(Pred); 1024 VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst); 1025 Bldr.addNodes(Dst); 1026 break; 1027 1028 case Stmt::MSAsmStmtClass: 1029 Bldr.takeNodes(Pred); 1030 VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst); 1031 Bldr.addNodes(Dst); 1032 break; 1033 1034 case Stmt::BlockExprClass: 1035 Bldr.takeNodes(Pred); 1036 VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst); 1037 Bldr.addNodes(Dst); 1038 break; 1039 1040 case Stmt::LambdaExprClass: 1041 if (AMgr.options.shouldInlineLambdas()) { 1042 Bldr.takeNodes(Pred); 1043 VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst); 1044 Bldr.addNodes(Dst); 1045 } else { 1046 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 1047 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 1048 } 1049 break; 1050 1051 case Stmt::BinaryOperatorClass: { 1052 const BinaryOperator* B = cast<BinaryOperator>(S); 1053 if (B->isLogicalOp()) { 1054 Bldr.takeNodes(Pred); 1055 VisitLogicalExpr(B, Pred, Dst); 1056 Bldr.addNodes(Dst); 1057 break; 1058 } 1059 else if (B->getOpcode() == BO_Comma) { 1060 ProgramStateRef state = Pred->getState(); 1061 Bldr.generateNode(B, Pred, 1062 state->BindExpr(B, Pred->getLocationContext(), 1063 state->getSVal(B->getRHS(), 1064 Pred->getLocationContext()))); 1065 break; 1066 } 1067 1068 Bldr.takeNodes(Pred); 1069 1070 if (AMgr.options.eagerlyAssumeBinOpBifurcation && 1071 (B->isRelationalOp() || B->isEqualityOp())) { 1072 ExplodedNodeSet Tmp; 1073 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp); 1074 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S)); 1075 } 1076 else 1077 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1078 1079 Bldr.addNodes(Dst); 1080 break; 1081 } 1082 1083 case Stmt::CXXOperatorCallExprClass: { 1084 const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S); 1085 1086 // For instance method operators, make sure the 'this' argument has a 1087 // valid region. 1088 const Decl *Callee = OCE->getCalleeDecl(); 1089 if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) { 1090 if (MD->isInstance()) { 1091 ProgramStateRef State = Pred->getState(); 1092 const LocationContext *LCtx = Pred->getLocationContext(); 1093 ProgramStateRef NewState = 1094 createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0)); 1095 if (NewState != State) { 1096 Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr, 1097 ProgramPoint::PreStmtKind); 1098 // Did we cache out? 1099 if (!Pred) 1100 break; 1101 } 1102 } 1103 } 1104 // FALLTHROUGH 1105 } 1106 case Stmt::CallExprClass: 1107 case Stmt::CXXMemberCallExprClass: 1108 case Stmt::UserDefinedLiteralClass: { 1109 Bldr.takeNodes(Pred); 1110 VisitCallExpr(cast<CallExpr>(S), Pred, Dst); 1111 Bldr.addNodes(Dst); 1112 break; 1113 } 1114 1115 case Stmt::CXXCatchStmtClass: { 1116 Bldr.takeNodes(Pred); 1117 VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst); 1118 Bldr.addNodes(Dst); 1119 break; 1120 } 1121 1122 case Stmt::CXXTemporaryObjectExprClass: 1123 case Stmt::CXXConstructExprClass: { 1124 Bldr.takeNodes(Pred); 1125 VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst); 1126 Bldr.addNodes(Dst); 1127 break; 1128 } 1129 1130 case Stmt::CXXNewExprClass: { 1131 Bldr.takeNodes(Pred); 1132 ExplodedNodeSet PostVisit; 1133 VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit); 1134 getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this); 1135 Bldr.addNodes(Dst); 1136 break; 1137 } 1138 1139 case Stmt::CXXDeleteExprClass: { 1140 Bldr.takeNodes(Pred); 1141 ExplodedNodeSet PreVisit; 1142 const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S); 1143 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1144 1145 for (ExplodedNodeSet::iterator i = PreVisit.begin(), 1146 e = PreVisit.end(); i != e ; ++i) 1147 VisitCXXDeleteExpr(CDE, *i, Dst); 1148 1149 Bldr.addNodes(Dst); 1150 break; 1151 } 1152 // FIXME: ChooseExpr is really a constant. We need to fix 1153 // the CFG do not model them as explicit control-flow. 1154 1155 case Stmt::ChooseExprClass: { // __builtin_choose_expr 1156 Bldr.takeNodes(Pred); 1157 const ChooseExpr *C = cast<ChooseExpr>(S); 1158 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); 1159 Bldr.addNodes(Dst); 1160 break; 1161 } 1162 1163 case Stmt::CompoundAssignOperatorClass: 1164 Bldr.takeNodes(Pred); 1165 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1166 Bldr.addNodes(Dst); 1167 break; 1168 1169 case Stmt::CompoundLiteralExprClass: 1170 Bldr.takeNodes(Pred); 1171 VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst); 1172 Bldr.addNodes(Dst); 1173 break; 1174 1175 case Stmt::BinaryConditionalOperatorClass: 1176 case Stmt::ConditionalOperatorClass: { // '?' operator 1177 Bldr.takeNodes(Pred); 1178 const AbstractConditionalOperator *C 1179 = cast<AbstractConditionalOperator>(S); 1180 VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst); 1181 Bldr.addNodes(Dst); 1182 break; 1183 } 1184 1185 case Stmt::CXXThisExprClass: 1186 Bldr.takeNodes(Pred); 1187 VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst); 1188 Bldr.addNodes(Dst); 1189 break; 1190 1191 case Stmt::DeclRefExprClass: { 1192 Bldr.takeNodes(Pred); 1193 const DeclRefExpr *DE = cast<DeclRefExpr>(S); 1194 VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst); 1195 Bldr.addNodes(Dst); 1196 break; 1197 } 1198 1199 case Stmt::DeclStmtClass: 1200 Bldr.takeNodes(Pred); 1201 VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst); 1202 Bldr.addNodes(Dst); 1203 break; 1204 1205 case Stmt::ImplicitCastExprClass: 1206 case Stmt::CStyleCastExprClass: 1207 case Stmt::CXXStaticCastExprClass: 1208 case Stmt::CXXDynamicCastExprClass: 1209 case Stmt::CXXReinterpretCastExprClass: 1210 case Stmt::CXXConstCastExprClass: 1211 case Stmt::CXXFunctionalCastExprClass: 1212 case Stmt::ObjCBridgedCastExprClass: { 1213 Bldr.takeNodes(Pred); 1214 const CastExpr *C = cast<CastExpr>(S); 1215 // Handle the previsit checks. 1216 ExplodedNodeSet dstPrevisit; 1217 getCheckerManager().runCheckersForPreStmt(dstPrevisit, Pred, C, *this); 1218 1219 // Handle the expression itself. 1220 ExplodedNodeSet dstExpr; 1221 for (ExplodedNodeSet::iterator i = dstPrevisit.begin(), 1222 e = dstPrevisit.end(); i != e ; ++i) { 1223 VisitCast(C, C->getSubExpr(), *i, dstExpr); 1224 } 1225 1226 // Handle the postvisit checks. 1227 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this); 1228 Bldr.addNodes(Dst); 1229 break; 1230 } 1231 1232 case Expr::MaterializeTemporaryExprClass: { 1233 Bldr.takeNodes(Pred); 1234 const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S); 1235 CreateCXXTemporaryObject(MTE, Pred, Dst); 1236 Bldr.addNodes(Dst); 1237 break; 1238 } 1239 1240 case Stmt::InitListExprClass: 1241 Bldr.takeNodes(Pred); 1242 VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst); 1243 Bldr.addNodes(Dst); 1244 break; 1245 1246 case Stmt::MemberExprClass: 1247 Bldr.takeNodes(Pred); 1248 VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst); 1249 Bldr.addNodes(Dst); 1250 break; 1251 1252 case Stmt::AtomicExprClass: 1253 Bldr.takeNodes(Pred); 1254 VisitAtomicExpr(cast<AtomicExpr>(S), Pred, Dst); 1255 Bldr.addNodes(Dst); 1256 break; 1257 1258 case Stmt::ObjCIvarRefExprClass: 1259 Bldr.takeNodes(Pred); 1260 VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst); 1261 Bldr.addNodes(Dst); 1262 break; 1263 1264 case Stmt::ObjCForCollectionStmtClass: 1265 Bldr.takeNodes(Pred); 1266 VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst); 1267 Bldr.addNodes(Dst); 1268 break; 1269 1270 case Stmt::ObjCMessageExprClass: 1271 Bldr.takeNodes(Pred); 1272 VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst); 1273 Bldr.addNodes(Dst); 1274 break; 1275 1276 case Stmt::ObjCAtThrowStmtClass: 1277 case Stmt::CXXThrowExprClass: 1278 // FIXME: This is not complete. We basically treat @throw as 1279 // an abort. 1280 Bldr.generateSink(S, Pred, Pred->getState()); 1281 break; 1282 1283 case Stmt::ReturnStmtClass: 1284 Bldr.takeNodes(Pred); 1285 VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst); 1286 Bldr.addNodes(Dst); 1287 break; 1288 1289 case Stmt::OffsetOfExprClass: 1290 Bldr.takeNodes(Pred); 1291 VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst); 1292 Bldr.addNodes(Dst); 1293 break; 1294 1295 case Stmt::UnaryExprOrTypeTraitExprClass: 1296 Bldr.takeNodes(Pred); 1297 VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S), 1298 Pred, Dst); 1299 Bldr.addNodes(Dst); 1300 break; 1301 1302 case Stmt::StmtExprClass: { 1303 const StmtExpr *SE = cast<StmtExpr>(S); 1304 1305 if (SE->getSubStmt()->body_empty()) { 1306 // Empty statement expression. 1307 assert(SE->getType() == getContext().VoidTy 1308 && "Empty statement expression must have void type."); 1309 break; 1310 } 1311 1312 if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) { 1313 ProgramStateRef state = Pred->getState(); 1314 Bldr.generateNode(SE, Pred, 1315 state->BindExpr(SE, Pred->getLocationContext(), 1316 state->getSVal(LastExpr, 1317 Pred->getLocationContext()))); 1318 } 1319 break; 1320 } 1321 1322 case Stmt::UnaryOperatorClass: { 1323 Bldr.takeNodes(Pred); 1324 const UnaryOperator *U = cast<UnaryOperator>(S); 1325 if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) { 1326 ExplodedNodeSet Tmp; 1327 VisitUnaryOperator(U, Pred, Tmp); 1328 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U); 1329 } 1330 else 1331 VisitUnaryOperator(U, Pred, Dst); 1332 Bldr.addNodes(Dst); 1333 break; 1334 } 1335 1336 case Stmt::PseudoObjectExprClass: { 1337 Bldr.takeNodes(Pred); 1338 ProgramStateRef state = Pred->getState(); 1339 const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S); 1340 if (const Expr *Result = PE->getResultExpr()) { 1341 SVal V = state->getSVal(Result, Pred->getLocationContext()); 1342 Bldr.generateNode(S, Pred, 1343 state->BindExpr(S, Pred->getLocationContext(), V)); 1344 } 1345 else 1346 Bldr.generateNode(S, Pred, 1347 state->BindExpr(S, Pred->getLocationContext(), 1348 UnknownVal())); 1349 1350 Bldr.addNodes(Dst); 1351 break; 1352 } 1353 } 1354 } 1355 1356 bool ExprEngine::replayWithoutInlining(ExplodedNode *N, 1357 const LocationContext *CalleeLC) { 1358 const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1359 const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame(); 1360 assert(CalleeSF && CallerSF); 1361 ExplodedNode *BeforeProcessingCall = nullptr; 1362 const Stmt *CE = CalleeSF->getCallSite(); 1363 1364 // Find the first node before we started processing the call expression. 1365 while (N) { 1366 ProgramPoint L = N->getLocation(); 1367 BeforeProcessingCall = N; 1368 N = N->pred_empty() ? nullptr : *(N->pred_begin()); 1369 1370 // Skip the nodes corresponding to the inlined code. 1371 if (L.getLocationContext()->getCurrentStackFrame() != CallerSF) 1372 continue; 1373 // We reached the caller. Find the node right before we started 1374 // processing the call. 1375 if (L.isPurgeKind()) 1376 continue; 1377 if (L.getAs<PreImplicitCall>()) 1378 continue; 1379 if (L.getAs<CallEnter>()) 1380 continue; 1381 if (Optional<StmtPoint> SP = L.getAs<StmtPoint>()) 1382 if (SP->getStmt() == CE) 1383 continue; 1384 break; 1385 } 1386 1387 if (!BeforeProcessingCall) 1388 return false; 1389 1390 // TODO: Clean up the unneeded nodes. 1391 1392 // Build an Epsilon node from which we will restart the analyzes. 1393 // Note that CE is permitted to be NULL! 1394 ProgramPoint NewNodeLoc = 1395 EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE); 1396 // Add the special flag to GDM to signal retrying with no inlining. 1397 // Note, changing the state ensures that we are not going to cache out. 1398 ProgramStateRef NewNodeState = BeforeProcessingCall->getState(); 1399 NewNodeState = 1400 NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE)); 1401 1402 // Make the new node a successor of BeforeProcessingCall. 1403 bool IsNew = false; 1404 ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew); 1405 // We cached out at this point. Caching out is common due to us backtracking 1406 // from the inlined function, which might spawn several paths. 1407 if (!IsNew) 1408 return true; 1409 1410 NewNode->addPredecessor(BeforeProcessingCall, G); 1411 1412 // Add the new node to the work list. 1413 Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(), 1414 CalleeSF->getIndex()); 1415 NumTimesRetriedWithoutInlining++; 1416 return true; 1417 } 1418 1419 /// Block entrance. (Update counters). 1420 void ExprEngine::processCFGBlockEntrance(const BlockEdge &L, 1421 NodeBuilderWithSinks &nodeBuilder, 1422 ExplodedNode *Pred) { 1423 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1424 1425 // If this block is terminated by a loop and it has already been visited the 1426 // maximum number of times, widen the loop. 1427 unsigned int BlockCount = nodeBuilder.getContext().blockCount(); 1428 if (BlockCount == AMgr.options.maxBlockVisitOnPath - 1 && 1429 AMgr.options.shouldWidenLoops()) { 1430 const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator(); 1431 if (!(Term && 1432 (isa<ForStmt>(Term) || isa<WhileStmt>(Term) || isa<DoStmt>(Term)))) 1433 return; 1434 // Widen. 1435 const LocationContext *LCtx = Pred->getLocationContext(); 1436 ProgramStateRef WidenedState = 1437 getWidenedLoopState(Pred->getState(), LCtx, BlockCount, Term); 1438 nodeBuilder.generateNode(WidenedState, Pred); 1439 return; 1440 } 1441 1442 // FIXME: Refactor this into a checker. 1443 if (BlockCount >= AMgr.options.maxBlockVisitOnPath) { 1444 static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded"); 1445 const ExplodedNode *Sink = 1446 nodeBuilder.generateSink(Pred->getState(), Pred, &tag); 1447 1448 // Check if we stopped at the top level function or not. 1449 // Root node should have the location context of the top most function. 1450 const LocationContext *CalleeLC = Pred->getLocation().getLocationContext(); 1451 const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1452 const LocationContext *RootLC = 1453 (*G.roots_begin())->getLocation().getLocationContext(); 1454 if (RootLC->getCurrentStackFrame() != CalleeSF) { 1455 Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl()); 1456 1457 // Re-run the call evaluation without inlining it, by storing the 1458 // no-inlining policy in the state and enqueuing the new work item on 1459 // the list. Replay should almost never fail. Use the stats to catch it 1460 // if it does. 1461 if ((!AMgr.options.NoRetryExhausted && 1462 replayWithoutInlining(Pred, CalleeLC))) 1463 return; 1464 NumMaxBlockCountReachedInInlined++; 1465 } else 1466 NumMaxBlockCountReached++; 1467 1468 // Make sink nodes as exhausted(for stats) only if retry failed. 1469 Engine.blocksExhausted.push_back(std::make_pair(L, Sink)); 1470 } 1471 } 1472 1473 //===----------------------------------------------------------------------===// 1474 // Branch processing. 1475 //===----------------------------------------------------------------------===// 1476 1477 /// RecoverCastedSymbol - A helper function for ProcessBranch that is used 1478 /// to try to recover some path-sensitivity for casts of symbolic 1479 /// integers that promote their values (which are currently not tracked well). 1480 /// This function returns the SVal bound to Condition->IgnoreCasts if all the 1481 // cast(s) did was sign-extend the original value. 1482 static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr, 1483 ProgramStateRef state, 1484 const Stmt *Condition, 1485 const LocationContext *LCtx, 1486 ASTContext &Ctx) { 1487 1488 const Expr *Ex = dyn_cast<Expr>(Condition); 1489 if (!Ex) 1490 return UnknownVal(); 1491 1492 uint64_t bits = 0; 1493 bool bitsInit = false; 1494 1495 while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) { 1496 QualType T = CE->getType(); 1497 1498 if (!T->isIntegralOrEnumerationType()) 1499 return UnknownVal(); 1500 1501 uint64_t newBits = Ctx.getTypeSize(T); 1502 if (!bitsInit || newBits < bits) { 1503 bitsInit = true; 1504 bits = newBits; 1505 } 1506 1507 Ex = CE->getSubExpr(); 1508 } 1509 1510 // We reached a non-cast. Is it a symbolic value? 1511 QualType T = Ex->getType(); 1512 1513 if (!bitsInit || !T->isIntegralOrEnumerationType() || 1514 Ctx.getTypeSize(T) > bits) 1515 return UnknownVal(); 1516 1517 return state->getSVal(Ex, LCtx); 1518 } 1519 1520 #ifndef NDEBUG 1521 static const Stmt *getRightmostLeaf(const Stmt *Condition) { 1522 while (Condition) { 1523 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1524 if (!BO || !BO->isLogicalOp()) { 1525 return Condition; 1526 } 1527 Condition = BO->getRHS()->IgnoreParens(); 1528 } 1529 return nullptr; 1530 } 1531 #endif 1532 1533 // Returns the condition the branch at the end of 'B' depends on and whose value 1534 // has been evaluated within 'B'. 1535 // In most cases, the terminator condition of 'B' will be evaluated fully in 1536 // the last statement of 'B'; in those cases, the resolved condition is the 1537 // given 'Condition'. 1538 // If the condition of the branch is a logical binary operator tree, the CFG is 1539 // optimized: in that case, we know that the expression formed by all but the 1540 // rightmost leaf of the logical binary operator tree must be true, and thus 1541 // the branch condition is at this point equivalent to the truth value of that 1542 // rightmost leaf; the CFG block thus only evaluates this rightmost leaf 1543 // expression in its final statement. As the full condition in that case was 1544 // not evaluated, and is thus not in the SVal cache, we need to use that leaf 1545 // expression to evaluate the truth value of the condition in the current state 1546 // space. 1547 static const Stmt *ResolveCondition(const Stmt *Condition, 1548 const CFGBlock *B) { 1549 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1550 Condition = Ex->IgnoreParens(); 1551 1552 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1553 if (!BO || !BO->isLogicalOp()) 1554 return Condition; 1555 1556 assert(!B->getTerminator().isTemporaryDtorsBranch() && 1557 "Temporary destructor branches handled by processBindTemporary."); 1558 1559 // For logical operations, we still have the case where some branches 1560 // use the traditional "merge" approach and others sink the branch 1561 // directly into the basic blocks representing the logical operation. 1562 // We need to distinguish between those two cases here. 1563 1564 // The invariants are still shifting, but it is possible that the 1565 // last element in a CFGBlock is not a CFGStmt. Look for the last 1566 // CFGStmt as the value of the condition. 1567 CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend(); 1568 for (; I != E; ++I) { 1569 CFGElement Elem = *I; 1570 Optional<CFGStmt> CS = Elem.getAs<CFGStmt>(); 1571 if (!CS) 1572 continue; 1573 const Stmt *LastStmt = CS->getStmt(); 1574 assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition)); 1575 return LastStmt; 1576 } 1577 llvm_unreachable("could not resolve condition"); 1578 } 1579 1580 void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term, 1581 NodeBuilderContext& BldCtx, 1582 ExplodedNode *Pred, 1583 ExplodedNodeSet &Dst, 1584 const CFGBlock *DstT, 1585 const CFGBlock *DstF) { 1586 assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) && 1587 "CXXBindTemporaryExprs are handled by processBindTemporary."); 1588 const LocationContext *LCtx = Pred->getLocationContext(); 1589 PrettyStackTraceLocationContext StackCrashInfo(LCtx); 1590 currBldrCtx = &BldCtx; 1591 1592 // Check for NULL conditions; e.g. "for(;;)" 1593 if (!Condition) { 1594 BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF); 1595 NullCondBldr.markInfeasible(false); 1596 NullCondBldr.generateNode(Pred->getState(), true, Pred); 1597 return; 1598 } 1599 1600 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1601 Condition = Ex->IgnoreParens(); 1602 1603 Condition = ResolveCondition(Condition, BldCtx.getBlock()); 1604 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 1605 Condition->getLocStart(), 1606 "Error evaluating branch"); 1607 1608 ExplodedNodeSet CheckersOutSet; 1609 getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet, 1610 Pred, *this); 1611 // We generated only sinks. 1612 if (CheckersOutSet.empty()) 1613 return; 1614 1615 BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF); 1616 for (NodeBuilder::iterator I = CheckersOutSet.begin(), 1617 E = CheckersOutSet.end(); E != I; ++I) { 1618 ExplodedNode *PredI = *I; 1619 1620 if (PredI->isSink()) 1621 continue; 1622 1623 ProgramStateRef PrevState = PredI->getState(); 1624 SVal X = PrevState->getSVal(Condition, PredI->getLocationContext()); 1625 1626 if (X.isUnknownOrUndef()) { 1627 // Give it a chance to recover from unknown. 1628 if (const Expr *Ex = dyn_cast<Expr>(Condition)) { 1629 if (Ex->getType()->isIntegralOrEnumerationType()) { 1630 // Try to recover some path-sensitivity. Right now casts of symbolic 1631 // integers that promote their values are currently not tracked well. 1632 // If 'Condition' is such an expression, try and recover the 1633 // underlying value and use that instead. 1634 SVal recovered = RecoverCastedSymbol(getStateManager(), 1635 PrevState, Condition, 1636 PredI->getLocationContext(), 1637 getContext()); 1638 1639 if (!recovered.isUnknown()) { 1640 X = recovered; 1641 } 1642 } 1643 } 1644 } 1645 1646 // If the condition is still unknown, give up. 1647 if (X.isUnknownOrUndef()) { 1648 builder.generateNode(PrevState, true, PredI); 1649 builder.generateNode(PrevState, false, PredI); 1650 continue; 1651 } 1652 1653 DefinedSVal V = X.castAs<DefinedSVal>(); 1654 1655 ProgramStateRef StTrue, StFalse; 1656 std::tie(StTrue, StFalse) = PrevState->assume(V); 1657 1658 // Process the true branch. 1659 if (builder.isFeasible(true)) { 1660 if (StTrue) 1661 builder.generateNode(StTrue, true, PredI); 1662 else 1663 builder.markInfeasible(true); 1664 } 1665 1666 // Process the false branch. 1667 if (builder.isFeasible(false)) { 1668 if (StFalse) 1669 builder.generateNode(StFalse, false, PredI); 1670 else 1671 builder.markInfeasible(false); 1672 } 1673 } 1674 currBldrCtx = nullptr; 1675 } 1676 1677 /// The GDM component containing the set of global variables which have been 1678 /// previously initialized with explicit initializers. 1679 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet, 1680 llvm::ImmutableSet<const VarDecl *>) 1681 1682 void ExprEngine::processStaticInitializer(const DeclStmt *DS, 1683 NodeBuilderContext &BuilderCtx, 1684 ExplodedNode *Pred, 1685 clang::ento::ExplodedNodeSet &Dst, 1686 const CFGBlock *DstT, 1687 const CFGBlock *DstF) { 1688 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1689 currBldrCtx = &BuilderCtx; 1690 1691 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl()); 1692 ProgramStateRef state = Pred->getState(); 1693 bool initHasRun = state->contains<InitializedGlobalsSet>(VD); 1694 BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF); 1695 1696 if (!initHasRun) { 1697 state = state->add<InitializedGlobalsSet>(VD); 1698 } 1699 1700 builder.generateNode(state, initHasRun, Pred); 1701 builder.markInfeasible(!initHasRun); 1702 1703 currBldrCtx = nullptr; 1704 } 1705 1706 /// processIndirectGoto - Called by CoreEngine. Used to generate successor 1707 /// nodes by processing the 'effects' of a computed goto jump. 1708 void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) { 1709 1710 ProgramStateRef state = builder.getState(); 1711 SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext()); 1712 1713 // Three possibilities: 1714 // 1715 // (1) We know the computed label. 1716 // (2) The label is NULL (or some other constant), or Undefined. 1717 // (3) We have no clue about the label. Dispatch to all targets. 1718 // 1719 1720 typedef IndirectGotoNodeBuilder::iterator iterator; 1721 1722 if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) { 1723 const LabelDecl *L = LV->getLabel(); 1724 1725 for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) { 1726 if (I.getLabel() == L) { 1727 builder.generateNode(I, state); 1728 return; 1729 } 1730 } 1731 1732 llvm_unreachable("No block with label."); 1733 } 1734 1735 if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) { 1736 // Dispatch to the first target and mark it as a sink. 1737 //ExplodedNode* N = builder.generateNode(builder.begin(), state, true); 1738 // FIXME: add checker visit. 1739 // UndefBranches.insert(N); 1740 return; 1741 } 1742 1743 // This is really a catch-all. We don't support symbolics yet. 1744 // FIXME: Implement dispatch for symbolic pointers. 1745 1746 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I) 1747 builder.generateNode(I, state); 1748 } 1749 1750 #if 0 1751 static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) { 1752 const StackFrameContext* Frame = Pred.getStackFrame(); 1753 const llvm::ImmutableSet<CXXBindTemporaryContext> &Set = 1754 Pred.getState()->get<InitializedTemporariesSet>(); 1755 return std::find_if(Set.begin(), Set.end(), 1756 [&](const CXXBindTemporaryContext &Ctx) { 1757 if (Ctx.second == Frame) { 1758 Ctx.first->dump(); 1759 llvm::errs() << "\n"; 1760 } 1761 return Ctx.second == Frame; 1762 }) == Set.end(); 1763 } 1764 #endif 1765 1766 void ExprEngine::processBeginOfFunction(NodeBuilderContext &BC, 1767 ExplodedNode *Pred, 1768 ExplodedNodeSet &Dst, 1769 const BlockEdge &L) { 1770 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC); 1771 getCheckerManager().runCheckersForBeginFunction(Dst, L, Pred, *this); 1772 } 1773 1774 /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path 1775 /// nodes when the control reaches the end of a function. 1776 void ExprEngine::processEndOfFunction(NodeBuilderContext& BC, 1777 ExplodedNode *Pred) { 1778 // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)). 1779 // We currently cannot enable this assert, as lifetime extended temporaries 1780 // are not modelled correctly. 1781 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1782 StateMgr.EndPath(Pred->getState()); 1783 1784 ExplodedNodeSet Dst; 1785 if (Pred->getLocationContext()->inTopFrame()) { 1786 // Remove dead symbols. 1787 ExplodedNodeSet AfterRemovedDead; 1788 removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead); 1789 1790 // Notify checkers. 1791 for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(), 1792 E = AfterRemovedDead.end(); I != E; ++I) { 1793 getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this); 1794 } 1795 } else { 1796 getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this); 1797 } 1798 1799 Engine.enqueueEndOfFunction(Dst); 1800 } 1801 1802 /// ProcessSwitch - Called by CoreEngine. Used to generate successor 1803 /// nodes by processing the 'effects' of a switch statement. 1804 void ExprEngine::processSwitch(SwitchNodeBuilder& builder) { 1805 typedef SwitchNodeBuilder::iterator iterator; 1806 ProgramStateRef state = builder.getState(); 1807 const Expr *CondE = builder.getCondition(); 1808 SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext()); 1809 1810 if (CondV_untested.isUndef()) { 1811 //ExplodedNode* N = builder.generateDefaultCaseNode(state, true); 1812 // FIXME: add checker 1813 //UndefBranches.insert(N); 1814 1815 return; 1816 } 1817 DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>(); 1818 1819 ProgramStateRef DefaultSt = state; 1820 1821 iterator I = builder.begin(), EI = builder.end(); 1822 bool defaultIsFeasible = I == EI; 1823 1824 for ( ; I != EI; ++I) { 1825 // Successor may be pruned out during CFG construction. 1826 if (!I.getBlock()) 1827 continue; 1828 1829 const CaseStmt *Case = I.getCase(); 1830 1831 // Evaluate the LHS of the case value. 1832 llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext()); 1833 assert(V1.getBitWidth() == getContext().getTypeSize(CondE->getType())); 1834 1835 // Get the RHS of the case, if it exists. 1836 llvm::APSInt V2; 1837 if (const Expr *E = Case->getRHS()) 1838 V2 = E->EvaluateKnownConstInt(getContext()); 1839 else 1840 V2 = V1; 1841 1842 ProgramStateRef StateCase; 1843 if (Optional<NonLoc> NL = CondV.getAs<NonLoc>()) 1844 std::tie(StateCase, DefaultSt) = 1845 DefaultSt->assumeWithinInclusiveRange(*NL, V1, V2); 1846 else // UnknownVal 1847 StateCase = DefaultSt; 1848 1849 if (StateCase) 1850 builder.generateCaseStmtNode(I, StateCase); 1851 1852 // Now "assume" that the case doesn't match. Add this state 1853 // to the default state (if it is feasible). 1854 if (DefaultSt) 1855 defaultIsFeasible = true; 1856 else { 1857 defaultIsFeasible = false; 1858 break; 1859 } 1860 } 1861 1862 if (!defaultIsFeasible) 1863 return; 1864 1865 // If we have switch(enum value), the default branch is not 1866 // feasible if all of the enum constants not covered by 'case:' statements 1867 // are not feasible values for the switch condition. 1868 // 1869 // Note that this isn't as accurate as it could be. Even if there isn't 1870 // a case for a particular enum value as long as that enum value isn't 1871 // feasible then it shouldn't be considered for making 'default:' reachable. 1872 const SwitchStmt *SS = builder.getSwitch(); 1873 const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts(); 1874 if (CondExpr->getType()->getAs<EnumType>()) { 1875 if (SS->isAllEnumCasesCovered()) 1876 return; 1877 } 1878 1879 builder.generateDefaultCaseNode(DefaultSt); 1880 } 1881 1882 //===----------------------------------------------------------------------===// 1883 // Transfer functions: Loads and stores. 1884 //===----------------------------------------------------------------------===// 1885 1886 void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D, 1887 ExplodedNode *Pred, 1888 ExplodedNodeSet &Dst) { 1889 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1890 1891 ProgramStateRef state = Pred->getState(); 1892 const LocationContext *LCtx = Pred->getLocationContext(); 1893 1894 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) { 1895 // C permits "extern void v", and if you cast the address to a valid type, 1896 // you can even do things with it. We simply pretend 1897 assert(Ex->isGLValue() || VD->getType()->isVoidType()); 1898 const LocationContext *LocCtxt = Pred->getLocationContext(); 1899 const Decl *D = LocCtxt->getDecl(); 1900 const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr; 1901 const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex); 1902 SVal V; 1903 bool IsReference; 1904 if (AMgr.options.shouldInlineLambdas() && DeclRefEx && 1905 DeclRefEx->refersToEnclosingVariableOrCapture() && MD && 1906 MD->getParent()->isLambda()) { 1907 // Lookup the field of the lambda. 1908 const CXXRecordDecl *CXXRec = MD->getParent(); 1909 llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields; 1910 FieldDecl *LambdaThisCaptureField; 1911 CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField); 1912 const FieldDecl *FD = LambdaCaptureFields[VD]; 1913 if (!FD) { 1914 // When a constant is captured, sometimes no corresponding field is 1915 // created in the lambda object. 1916 assert(VD->getType().isConstQualified()); 1917 V = state->getLValue(VD, LocCtxt); 1918 IsReference = false; 1919 } else { 1920 Loc CXXThis = 1921 svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame()); 1922 SVal CXXThisVal = state->getSVal(CXXThis); 1923 V = state->getLValue(FD, CXXThisVal); 1924 IsReference = FD->getType()->isReferenceType(); 1925 } 1926 } else { 1927 V = state->getLValue(VD, LocCtxt); 1928 IsReference = VD->getType()->isReferenceType(); 1929 } 1930 1931 // For references, the 'lvalue' is the pointer address stored in the 1932 // reference region. 1933 if (IsReference) { 1934 if (const MemRegion *R = V.getAsRegion()) 1935 V = state->getSVal(R); 1936 else 1937 V = UnknownVal(); 1938 } 1939 1940 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1941 ProgramPoint::PostLValueKind); 1942 return; 1943 } 1944 if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) { 1945 assert(!Ex->isGLValue()); 1946 SVal V = svalBuilder.makeIntVal(ED->getInitVal()); 1947 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V)); 1948 return; 1949 } 1950 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 1951 SVal V = svalBuilder.getFunctionPointer(FD); 1952 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1953 ProgramPoint::PostLValueKind); 1954 return; 1955 } 1956 if (isa<FieldDecl>(D)) { 1957 // FIXME: Compute lvalue of field pointers-to-member. 1958 // Right now we just use a non-null void pointer, so that it gives proper 1959 // results in boolean contexts. 1960 SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy, 1961 currBldrCtx->blockCount()); 1962 state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true); 1963 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1964 ProgramPoint::PostLValueKind); 1965 return; 1966 } 1967 1968 llvm_unreachable("Support for this Decl not implemented."); 1969 } 1970 1971 /// VisitArraySubscriptExpr - Transfer function for array accesses 1972 void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A, 1973 ExplodedNode *Pred, 1974 ExplodedNodeSet &Dst){ 1975 1976 const Expr *Base = A->getBase()->IgnoreParens(); 1977 const Expr *Idx = A->getIdx()->IgnoreParens(); 1978 1979 ExplodedNodeSet checkerPreStmt; 1980 getCheckerManager().runCheckersForPreStmt(checkerPreStmt, Pred, A, *this); 1981 1982 StmtNodeBuilder Bldr(checkerPreStmt, Dst, *currBldrCtx); 1983 assert(A->isGLValue() || 1984 (!AMgr.getLangOpts().CPlusPlus && 1985 A->getType().isCForbiddenLValueType())); 1986 1987 for (ExplodedNodeSet::iterator it = checkerPreStmt.begin(), 1988 ei = checkerPreStmt.end(); it != ei; ++it) { 1989 const LocationContext *LCtx = (*it)->getLocationContext(); 1990 ProgramStateRef state = (*it)->getState(); 1991 SVal V = state->getLValue(A->getType(), 1992 state->getSVal(Idx, LCtx), 1993 state->getSVal(Base, LCtx)); 1994 Bldr.generateNode(A, *it, state->BindExpr(A, LCtx, V), nullptr, 1995 ProgramPoint::PostLValueKind); 1996 } 1997 } 1998 1999 /// VisitMemberExpr - Transfer function for member expressions. 2000 void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred, 2001 ExplodedNodeSet &Dst) { 2002 2003 // FIXME: Prechecks eventually go in ::Visit(). 2004 ExplodedNodeSet CheckedSet; 2005 getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this); 2006 2007 ExplodedNodeSet EvalSet; 2008 ValueDecl *Member = M->getMemberDecl(); 2009 2010 // Handle static member variables and enum constants accessed via 2011 // member syntax. 2012 if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) { 2013 ExplodedNodeSet Dst; 2014 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2015 I != E; ++I) { 2016 VisitCommonDeclRefExpr(M, Member, Pred, EvalSet); 2017 } 2018 } else { 2019 StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); 2020 ExplodedNodeSet Tmp; 2021 2022 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2023 I != E; ++I) { 2024 ProgramStateRef state = (*I)->getState(); 2025 const LocationContext *LCtx = (*I)->getLocationContext(); 2026 Expr *BaseExpr = M->getBase(); 2027 2028 // Handle C++ method calls. 2029 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) { 2030 if (MD->isInstance()) 2031 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2032 2033 SVal MDVal = svalBuilder.getFunctionPointer(MD); 2034 state = state->BindExpr(M, LCtx, MDVal); 2035 2036 Bldr.generateNode(M, *I, state); 2037 continue; 2038 } 2039 2040 // Handle regular struct fields / member variables. 2041 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2042 SVal baseExprVal = state->getSVal(BaseExpr, LCtx); 2043 2044 FieldDecl *field = cast<FieldDecl>(Member); 2045 SVal L = state->getLValue(field, baseExprVal); 2046 2047 if (M->isGLValue() || M->getType()->isArrayType()) { 2048 // We special-case rvalues of array type because the analyzer cannot 2049 // reason about them, since we expect all regions to be wrapped in Locs. 2050 // We instead treat these as lvalues and assume that they will decay to 2051 // pointers as soon as they are used. 2052 if (!M->isGLValue()) { 2053 assert(M->getType()->isArrayType()); 2054 const ImplicitCastExpr *PE = 2055 dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParent(M)); 2056 if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) { 2057 llvm_unreachable("should always be wrapped in ArrayToPointerDecay"); 2058 } 2059 } 2060 2061 if (field->getType()->isReferenceType()) { 2062 if (const MemRegion *R = L.getAsRegion()) 2063 L = state->getSVal(R); 2064 else 2065 L = UnknownVal(); 2066 } 2067 2068 Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr, 2069 ProgramPoint::PostLValueKind); 2070 } else { 2071 Bldr.takeNodes(*I); 2072 evalLoad(Tmp, M, M, *I, state, L); 2073 Bldr.addNodes(Tmp); 2074 } 2075 } 2076 } 2077 2078 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this); 2079 } 2080 2081 void ExprEngine::VisitAtomicExpr(const AtomicExpr *AE, ExplodedNode *Pred, 2082 ExplodedNodeSet &Dst) { 2083 ExplodedNodeSet AfterPreSet; 2084 getCheckerManager().runCheckersForPreStmt(AfterPreSet, Pred, AE, *this); 2085 2086 // For now, treat all the arguments to C11 atomics as escaping. 2087 // FIXME: Ideally we should model the behavior of the atomics precisely here. 2088 2089 ExplodedNodeSet AfterInvalidateSet; 2090 StmtNodeBuilder Bldr(AfterPreSet, AfterInvalidateSet, *currBldrCtx); 2091 2092 for (ExplodedNodeSet::iterator I = AfterPreSet.begin(), E = AfterPreSet.end(); 2093 I != E; ++I) { 2094 ProgramStateRef State = (*I)->getState(); 2095 const LocationContext *LCtx = (*I)->getLocationContext(); 2096 2097 SmallVector<SVal, 8> ValuesToInvalidate; 2098 for (unsigned SI = 0, Count = AE->getNumSubExprs(); SI != Count; SI++) { 2099 const Expr *SubExpr = AE->getSubExprs()[SI]; 2100 SVal SubExprVal = State->getSVal(SubExpr, LCtx); 2101 ValuesToInvalidate.push_back(SubExprVal); 2102 } 2103 2104 State = State->invalidateRegions(ValuesToInvalidate, AE, 2105 currBldrCtx->blockCount(), 2106 LCtx, 2107 /*CausedByPointerEscape*/true, 2108 /*Symbols=*/nullptr); 2109 2110 SVal ResultVal = UnknownVal(); 2111 State = State->BindExpr(AE, LCtx, ResultVal); 2112 Bldr.generateNode(AE, *I, State, nullptr, 2113 ProgramPoint::PostStmtKind); 2114 } 2115 2116 getCheckerManager().runCheckersForPostStmt(Dst, AfterInvalidateSet, AE, *this); 2117 } 2118 2119 namespace { 2120 class CollectReachableSymbolsCallback final : public SymbolVisitor { 2121 InvalidatedSymbols Symbols; 2122 2123 public: 2124 CollectReachableSymbolsCallback(ProgramStateRef State) {} 2125 const InvalidatedSymbols &getSymbols() const { return Symbols; } 2126 2127 bool VisitSymbol(SymbolRef Sym) override { 2128 Symbols.insert(Sym); 2129 return true; 2130 } 2131 }; 2132 } // end anonymous namespace 2133 2134 // A value escapes in three possible cases: 2135 // (1) We are binding to something that is not a memory region. 2136 // (2) We are binding to a MemrRegion that does not have stack storage. 2137 // (3) We are binding to a MemRegion with stack storage that the store 2138 // does not understand. 2139 ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State, 2140 SVal Loc, SVal Val) { 2141 // Are we storing to something that causes the value to "escape"? 2142 bool escapes = true; 2143 2144 // TODO: Move to StoreManager. 2145 if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) { 2146 escapes = !regionLoc->getRegion()->hasStackStorage(); 2147 2148 if (!escapes) { 2149 // To test (3), generate a new state with the binding added. If it is 2150 // the same state, then it escapes (since the store cannot represent 2151 // the binding). 2152 // Do this only if we know that the store is not supposed to generate the 2153 // same state. 2154 SVal StoredVal = State->getSVal(regionLoc->getRegion()); 2155 if (StoredVal != Val) 2156 escapes = (State == (State->bindLoc(*regionLoc, Val))); 2157 } 2158 } 2159 2160 // If our store can represent the binding and we aren't storing to something 2161 // that doesn't have local storage then just return and have the simulation 2162 // state continue as is. 2163 if (!escapes) 2164 return State; 2165 2166 // Otherwise, find all symbols referenced by 'val' that we are tracking 2167 // and stop tracking them. 2168 CollectReachableSymbolsCallback Scanner = 2169 State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val); 2170 const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols(); 2171 State = getCheckerManager().runCheckersForPointerEscape(State, 2172 EscapedSymbols, 2173 /*CallEvent*/ nullptr, 2174 PSK_EscapeOnBind, 2175 nullptr); 2176 2177 return State; 2178 } 2179 2180 ProgramStateRef 2181 ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State, 2182 const InvalidatedSymbols *Invalidated, 2183 ArrayRef<const MemRegion *> ExplicitRegions, 2184 ArrayRef<const MemRegion *> Regions, 2185 const CallEvent *Call, 2186 RegionAndSymbolInvalidationTraits &ITraits) { 2187 2188 if (!Invalidated || Invalidated->empty()) 2189 return State; 2190 2191 if (!Call) 2192 return getCheckerManager().runCheckersForPointerEscape(State, 2193 *Invalidated, 2194 nullptr, 2195 PSK_EscapeOther, 2196 &ITraits); 2197 2198 // If the symbols were invalidated by a call, we want to find out which ones 2199 // were invalidated directly due to being arguments to the call. 2200 InvalidatedSymbols SymbolsDirectlyInvalidated; 2201 for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(), 2202 E = ExplicitRegions.end(); I != E; ++I) { 2203 if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>()) 2204 SymbolsDirectlyInvalidated.insert(R->getSymbol()); 2205 } 2206 2207 InvalidatedSymbols SymbolsIndirectlyInvalidated; 2208 for (InvalidatedSymbols::const_iterator I=Invalidated->begin(), 2209 E = Invalidated->end(); I!=E; ++I) { 2210 SymbolRef sym = *I; 2211 if (SymbolsDirectlyInvalidated.count(sym)) 2212 continue; 2213 SymbolsIndirectlyInvalidated.insert(sym); 2214 } 2215 2216 if (!SymbolsDirectlyInvalidated.empty()) 2217 State = getCheckerManager().runCheckersForPointerEscape(State, 2218 SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits); 2219 2220 // Notify about the symbols that get indirectly invalidated by the call. 2221 if (!SymbolsIndirectlyInvalidated.empty()) 2222 State = getCheckerManager().runCheckersForPointerEscape(State, 2223 SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits); 2224 2225 return State; 2226 } 2227 2228 /// evalBind - Handle the semantics of binding a value to a specific location. 2229 /// This method is used by evalStore and (soon) VisitDeclStmt, and others. 2230 void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE, 2231 ExplodedNode *Pred, 2232 SVal location, SVal Val, 2233 bool atDeclInit, const ProgramPoint *PP) { 2234 2235 const LocationContext *LC = Pred->getLocationContext(); 2236 PostStmt PS(StoreE, LC); 2237 if (!PP) 2238 PP = &PS; 2239 2240 // Do a previsit of the bind. 2241 ExplodedNodeSet CheckedSet; 2242 getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val, 2243 StoreE, *this, *PP); 2244 2245 StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx); 2246 2247 // If the location is not a 'Loc', it will already be handled by 2248 // the checkers. There is nothing left to do. 2249 if (!location.getAs<Loc>()) { 2250 const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr, 2251 /*tag*/nullptr); 2252 ProgramStateRef state = Pred->getState(); 2253 state = processPointerEscapedOnBind(state, location, Val); 2254 Bldr.generateNode(L, state, Pred); 2255 return; 2256 } 2257 2258 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2259 I!=E; ++I) { 2260 ExplodedNode *PredI = *I; 2261 ProgramStateRef state = PredI->getState(); 2262 2263 state = processPointerEscapedOnBind(state, location, Val); 2264 2265 // When binding the value, pass on the hint that this is a initialization. 2266 // For initializations, we do not need to inform clients of region 2267 // changes. 2268 state = state->bindLoc(location.castAs<Loc>(), 2269 Val, /* notifyChanges = */ !atDeclInit); 2270 2271 const MemRegion *LocReg = nullptr; 2272 if (Optional<loc::MemRegionVal> LocRegVal = 2273 location.getAs<loc::MemRegionVal>()) { 2274 LocReg = LocRegVal->getRegion(); 2275 } 2276 2277 const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr); 2278 Bldr.generateNode(L, state, PredI); 2279 } 2280 } 2281 2282 /// evalStore - Handle the semantics of a store via an assignment. 2283 /// @param Dst The node set to store generated state nodes 2284 /// @param AssignE The assignment expression if the store happens in an 2285 /// assignment. 2286 /// @param LocationE The location expression that is stored to. 2287 /// @param state The current simulation state 2288 /// @param location The location to store the value 2289 /// @param Val The value to be stored 2290 void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE, 2291 const Expr *LocationE, 2292 ExplodedNode *Pred, 2293 ProgramStateRef state, SVal location, SVal Val, 2294 const ProgramPointTag *tag) { 2295 // Proceed with the store. We use AssignE as the anchor for the PostStore 2296 // ProgramPoint if it is non-NULL, and LocationE otherwise. 2297 const Expr *StoreE = AssignE ? AssignE : LocationE; 2298 2299 // Evaluate the location (checks for bad dereferences). 2300 ExplodedNodeSet Tmp; 2301 evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false); 2302 2303 if (Tmp.empty()) 2304 return; 2305 2306 if (location.isUndef()) 2307 return; 2308 2309 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) 2310 evalBind(Dst, StoreE, *NI, location, Val, false); 2311 } 2312 2313 void ExprEngine::evalLoad(ExplodedNodeSet &Dst, 2314 const Expr *NodeEx, 2315 const Expr *BoundEx, 2316 ExplodedNode *Pred, 2317 ProgramStateRef state, 2318 SVal location, 2319 const ProgramPointTag *tag, 2320 QualType LoadTy) 2321 { 2322 assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc."); 2323 2324 // Are we loading from a region? This actually results in two loads; one 2325 // to fetch the address of the referenced value and one to fetch the 2326 // referenced value. 2327 if (const TypedValueRegion *TR = 2328 dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) { 2329 2330 QualType ValTy = TR->getValueType(); 2331 if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) { 2332 static SimpleProgramPointTag 2333 loadReferenceTag(TagProviderName, "Load Reference"); 2334 ExplodedNodeSet Tmp; 2335 evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state, 2336 location, &loadReferenceTag, 2337 getContext().getPointerType(RT->getPointeeType())); 2338 2339 // Perform the load from the referenced value. 2340 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) { 2341 state = (*I)->getState(); 2342 location = state->getSVal(BoundEx, (*I)->getLocationContext()); 2343 evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy); 2344 } 2345 return; 2346 } 2347 } 2348 2349 evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy); 2350 } 2351 2352 void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst, 2353 const Expr *NodeEx, 2354 const Expr *BoundEx, 2355 ExplodedNode *Pred, 2356 ProgramStateRef state, 2357 SVal location, 2358 const ProgramPointTag *tag, 2359 QualType LoadTy) { 2360 assert(NodeEx); 2361 assert(BoundEx); 2362 // Evaluate the location (checks for bad dereferences). 2363 ExplodedNodeSet Tmp; 2364 evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true); 2365 if (Tmp.empty()) 2366 return; 2367 2368 StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 2369 if (location.isUndef()) 2370 return; 2371 2372 // Proceed with the load. 2373 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) { 2374 state = (*NI)->getState(); 2375 const LocationContext *LCtx = (*NI)->getLocationContext(); 2376 2377 SVal V = UnknownVal(); 2378 if (location.isValid()) { 2379 if (LoadTy.isNull()) 2380 LoadTy = BoundEx->getType(); 2381 V = state->getSVal(location.castAs<Loc>(), LoadTy); 2382 } 2383 2384 Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag, 2385 ProgramPoint::PostLoadKind); 2386 } 2387 } 2388 2389 void ExprEngine::evalLocation(ExplodedNodeSet &Dst, 2390 const Stmt *NodeEx, 2391 const Stmt *BoundEx, 2392 ExplodedNode *Pred, 2393 ProgramStateRef state, 2394 SVal location, 2395 const ProgramPointTag *tag, 2396 bool isLoad) { 2397 StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx); 2398 // Early checks for performance reason. 2399 if (location.isUnknown()) { 2400 return; 2401 } 2402 2403 ExplodedNodeSet Src; 2404 BldrTop.takeNodes(Pred); 2405 StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx); 2406 if (Pred->getState() != state) { 2407 // Associate this new state with an ExplodedNode. 2408 // FIXME: If I pass null tag, the graph is incorrect, e.g for 2409 // int *p; 2410 // p = 0; 2411 // *p = 0xDEADBEEF; 2412 // "p = 0" is not noted as "Null pointer value stored to 'p'" but 2413 // instead "int *p" is noted as 2414 // "Variable 'p' initialized to a null pointer value" 2415 2416 static SimpleProgramPointTag tag(TagProviderName, "Location"); 2417 Bldr.generateNode(NodeEx, Pred, state, &tag); 2418 } 2419 ExplodedNodeSet Tmp; 2420 getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad, 2421 NodeEx, BoundEx, *this); 2422 BldrTop.addNodes(Tmp); 2423 } 2424 2425 std::pair<const ProgramPointTag *, const ProgramPointTag*> 2426 ExprEngine::geteagerlyAssumeBinOpBifurcationTags() { 2427 static SimpleProgramPointTag 2428 eagerlyAssumeBinOpBifurcationTrue(TagProviderName, 2429 "Eagerly Assume True"), 2430 eagerlyAssumeBinOpBifurcationFalse(TagProviderName, 2431 "Eagerly Assume False"); 2432 return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue, 2433 &eagerlyAssumeBinOpBifurcationFalse); 2434 } 2435 2436 void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst, 2437 ExplodedNodeSet &Src, 2438 const Expr *Ex) { 2439 StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx); 2440 2441 for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) { 2442 ExplodedNode *Pred = *I; 2443 // Test if the previous node was as the same expression. This can happen 2444 // when the expression fails to evaluate to anything meaningful and 2445 // (as an optimization) we don't generate a node. 2446 ProgramPoint P = Pred->getLocation(); 2447 if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) { 2448 continue; 2449 } 2450 2451 ProgramStateRef state = Pred->getState(); 2452 SVal V = state->getSVal(Ex, Pred->getLocationContext()); 2453 Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>(); 2454 if (SEV && SEV->isExpression()) { 2455 const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags = 2456 geteagerlyAssumeBinOpBifurcationTags(); 2457 2458 ProgramStateRef StateTrue, StateFalse; 2459 std::tie(StateTrue, StateFalse) = state->assume(*SEV); 2460 2461 // First assume that the condition is true. 2462 if (StateTrue) { 2463 SVal Val = svalBuilder.makeIntVal(1U, Ex->getType()); 2464 StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val); 2465 Bldr.generateNode(Ex, Pred, StateTrue, tags.first); 2466 } 2467 2468 // Next, assume that the condition is false. 2469 if (StateFalse) { 2470 SVal Val = svalBuilder.makeIntVal(0U, Ex->getType()); 2471 StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val); 2472 Bldr.generateNode(Ex, Pred, StateFalse, tags.second); 2473 } 2474 } 2475 } 2476 } 2477 2478 void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred, 2479 ExplodedNodeSet &Dst) { 2480 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2481 // We have processed both the inputs and the outputs. All of the outputs 2482 // should evaluate to Locs. Nuke all of their values. 2483 2484 // FIXME: Some day in the future it would be nice to allow a "plug-in" 2485 // which interprets the inline asm and stores proper results in the 2486 // outputs. 2487 2488 ProgramStateRef state = Pred->getState(); 2489 2490 for (const Expr *O : A->outputs()) { 2491 SVal X = state->getSVal(O, Pred->getLocationContext()); 2492 assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef. 2493 2494 if (Optional<Loc> LV = X.getAs<Loc>()) 2495 state = state->bindLoc(*LV, UnknownVal()); 2496 } 2497 2498 Bldr.generateNode(A, Pred, state); 2499 } 2500 2501 void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred, 2502 ExplodedNodeSet &Dst) { 2503 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2504 Bldr.generateNode(A, Pred, Pred->getState()); 2505 } 2506 2507 //===----------------------------------------------------------------------===// 2508 // Visualization. 2509 //===----------------------------------------------------------------------===// 2510 2511 #ifndef NDEBUG 2512 static ExprEngine* GraphPrintCheckerState; 2513 static SourceManager* GraphPrintSourceManager; 2514 2515 namespace llvm { 2516 template<> 2517 struct DOTGraphTraits<ExplodedNode*> : 2518 public DefaultDOTGraphTraits { 2519 2520 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {} 2521 2522 // FIXME: Since we do not cache error nodes in ExprEngine now, this does not 2523 // work. 2524 static std::string getNodeAttributes(const ExplodedNode *N, void*) { 2525 return ""; 2526 } 2527 2528 // De-duplicate some source location pretty-printing. 2529 static void printLocation(raw_ostream &Out, SourceLocation SLoc) { 2530 if (SLoc.isFileID()) { 2531 Out << "\\lline=" 2532 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2533 << " col=" 2534 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc) 2535 << "\\l"; 2536 } 2537 } 2538 static void printLocation2(raw_ostream &Out, SourceLocation SLoc) { 2539 if (SLoc.isFileID() && GraphPrintSourceManager->isInMainFile(SLoc)) 2540 Out << "line " << GraphPrintSourceManager->getExpansionLineNumber(SLoc); 2541 else 2542 SLoc.print(Out, *GraphPrintSourceManager); 2543 } 2544 2545 static std::string getNodeLabel(const ExplodedNode *N, void*){ 2546 2547 std::string sbuf; 2548 llvm::raw_string_ostream Out(sbuf); 2549 2550 // Program Location. 2551 ProgramPoint Loc = N->getLocation(); 2552 2553 switch (Loc.getKind()) { 2554 case ProgramPoint::BlockEntranceKind: { 2555 Out << "Block Entrance: B" 2556 << Loc.castAs<BlockEntrance>().getBlock()->getBlockID(); 2557 break; 2558 } 2559 2560 case ProgramPoint::BlockExitKind: 2561 assert (false); 2562 break; 2563 2564 case ProgramPoint::CallEnterKind: 2565 Out << "CallEnter"; 2566 break; 2567 2568 case ProgramPoint::CallExitBeginKind: 2569 Out << "CallExitBegin"; 2570 break; 2571 2572 case ProgramPoint::CallExitEndKind: 2573 Out << "CallExitEnd"; 2574 break; 2575 2576 case ProgramPoint::PostStmtPurgeDeadSymbolsKind: 2577 Out << "PostStmtPurgeDeadSymbols"; 2578 break; 2579 2580 case ProgramPoint::PreStmtPurgeDeadSymbolsKind: 2581 Out << "PreStmtPurgeDeadSymbols"; 2582 break; 2583 2584 case ProgramPoint::EpsilonKind: 2585 Out << "Epsilon Point"; 2586 break; 2587 2588 case ProgramPoint::PreImplicitCallKind: { 2589 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2590 Out << "PreCall: "; 2591 2592 // FIXME: Get proper printing options. 2593 PC.getDecl()->print(Out, LangOptions()); 2594 printLocation(Out, PC.getLocation()); 2595 break; 2596 } 2597 2598 case ProgramPoint::PostImplicitCallKind: { 2599 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2600 Out << "PostCall: "; 2601 2602 // FIXME: Get proper printing options. 2603 PC.getDecl()->print(Out, LangOptions()); 2604 printLocation(Out, PC.getLocation()); 2605 break; 2606 } 2607 2608 case ProgramPoint::PostInitializerKind: { 2609 Out << "PostInitializer: "; 2610 const CXXCtorInitializer *Init = 2611 Loc.castAs<PostInitializer>().getInitializer(); 2612 if (const FieldDecl *FD = Init->getAnyMember()) 2613 Out << *FD; 2614 else { 2615 QualType Ty = Init->getTypeSourceInfo()->getType(); 2616 Ty = Ty.getLocalUnqualifiedType(); 2617 LangOptions LO; // FIXME. 2618 Ty.print(Out, LO); 2619 } 2620 break; 2621 } 2622 2623 case ProgramPoint::BlockEdgeKind: { 2624 const BlockEdge &E = Loc.castAs<BlockEdge>(); 2625 Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B" 2626 << E.getDst()->getBlockID() << ')'; 2627 2628 if (const Stmt *T = E.getSrc()->getTerminator()) { 2629 SourceLocation SLoc = T->getLocStart(); 2630 2631 Out << "\\|Terminator: "; 2632 LangOptions LO; // FIXME. 2633 E.getSrc()->printTerminator(Out, LO); 2634 2635 if (SLoc.isFileID()) { 2636 Out << "\\lline=" 2637 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2638 << " col=" 2639 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc); 2640 } 2641 2642 if (isa<SwitchStmt>(T)) { 2643 const Stmt *Label = E.getDst()->getLabel(); 2644 2645 if (Label) { 2646 if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) { 2647 Out << "\\lcase "; 2648 LangOptions LO; // FIXME. 2649 if (C->getLHS()) 2650 C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO)); 2651 2652 if (const Stmt *RHS = C->getRHS()) { 2653 Out << " .. "; 2654 RHS->printPretty(Out, nullptr, PrintingPolicy(LO)); 2655 } 2656 2657 Out << ":"; 2658 } 2659 else { 2660 assert (isa<DefaultStmt>(Label)); 2661 Out << "\\ldefault:"; 2662 } 2663 } 2664 else 2665 Out << "\\l(implicit) default:"; 2666 } 2667 else if (isa<IndirectGotoStmt>(T)) { 2668 // FIXME 2669 } 2670 else { 2671 Out << "\\lCondition: "; 2672 if (*E.getSrc()->succ_begin() == E.getDst()) 2673 Out << "true"; 2674 else 2675 Out << "false"; 2676 } 2677 2678 Out << "\\l"; 2679 } 2680 2681 break; 2682 } 2683 2684 default: { 2685 const Stmt *S = Loc.castAs<StmtPoint>().getStmt(); 2686 assert(S != nullptr && "Expecting non-null Stmt"); 2687 2688 Out << S->getStmtClassName() << ' ' << (const void*) S << ' '; 2689 LangOptions LO; // FIXME. 2690 S->printPretty(Out, nullptr, PrintingPolicy(LO)); 2691 printLocation(Out, S->getLocStart()); 2692 2693 if (Loc.getAs<PreStmt>()) 2694 Out << "\\lPreStmt\\l;"; 2695 else if (Loc.getAs<PostLoad>()) 2696 Out << "\\lPostLoad\\l;"; 2697 else if (Loc.getAs<PostStore>()) 2698 Out << "\\lPostStore\\l"; 2699 else if (Loc.getAs<PostLValue>()) 2700 Out << "\\lPostLValue\\l"; 2701 2702 break; 2703 } 2704 } 2705 2706 ProgramStateRef state = N->getState(); 2707 Out << "\\|StateID: " << (const void*) state.get() 2708 << " NodeID: " << (const void*) N << "\\|"; 2709 2710 // Analysis stack backtrace. 2711 Out << "Location context stack (from current to outer):\\l"; 2712 const LocationContext *LC = Loc.getLocationContext(); 2713 unsigned Idx = 0; 2714 for (; LC; LC = LC->getParent(), ++Idx) { 2715 Out << Idx << ". (" << (const void *)LC << ") "; 2716 switch (LC->getKind()) { 2717 case LocationContext::StackFrame: 2718 if (const NamedDecl *D = dyn_cast<NamedDecl>(LC->getDecl())) 2719 Out << "Calling " << D->getQualifiedNameAsString(); 2720 else 2721 Out << "Calling anonymous code"; 2722 if (const Stmt *S = cast<StackFrameContext>(LC)->getCallSite()) { 2723 Out << " at "; 2724 printLocation2(Out, S->getLocStart()); 2725 } 2726 break; 2727 case LocationContext::Block: 2728 Out << "Invoking block"; 2729 if (const Decl *D = cast<BlockInvocationContext>(LC)->getBlockDecl()) { 2730 Out << " defined at "; 2731 printLocation2(Out, D->getLocStart()); 2732 } 2733 break; 2734 case LocationContext::Scope: 2735 Out << "Entering scope"; 2736 // FIXME: Add more info once ScopeContext is activated. 2737 break; 2738 } 2739 Out << "\\l"; 2740 } 2741 Out << "\\l"; 2742 2743 state->printDOT(Out); 2744 2745 Out << "\\l"; 2746 2747 if (const ProgramPointTag *tag = Loc.getTag()) { 2748 Out << "\\|Tag: " << tag->getTagDescription(); 2749 Out << "\\l"; 2750 } 2751 return Out.str(); 2752 } 2753 }; 2754 } // end llvm namespace 2755 #endif 2756 2757 void ExprEngine::ViewGraph(bool trim) { 2758 #ifndef NDEBUG 2759 if (trim) { 2760 std::vector<const ExplodedNode*> Src; 2761 2762 // Flush any outstanding reports to make sure we cover all the nodes. 2763 // This does not cause them to get displayed. 2764 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I) 2765 const_cast<BugType*>(*I)->FlushReports(BR); 2766 2767 // Iterate through the reports and get their nodes. 2768 for (BugReporter::EQClasses_iterator 2769 EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) { 2770 ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode()); 2771 if (N) Src.push_back(N); 2772 } 2773 2774 ViewGraph(Src); 2775 } 2776 else { 2777 GraphPrintCheckerState = this; 2778 GraphPrintSourceManager = &getContext().getSourceManager(); 2779 2780 llvm::ViewGraph(*G.roots_begin(), "ExprEngine"); 2781 2782 GraphPrintCheckerState = nullptr; 2783 GraphPrintSourceManager = nullptr; 2784 } 2785 #endif 2786 } 2787 2788 void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) { 2789 #ifndef NDEBUG 2790 GraphPrintCheckerState = this; 2791 GraphPrintSourceManager = &getContext().getSourceManager(); 2792 2793 std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes)); 2794 2795 if (!TrimmedG.get()) 2796 llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n"; 2797 else 2798 llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine"); 2799 2800 GraphPrintCheckerState = nullptr; 2801 GraphPrintSourceManager = nullptr; 2802 #endif 2803 } 2804