1 //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-= 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a meta-engine for path-sensitive dataflow analysis that 11 // is built on GREngine, but provides the boilerplate to execute transfer 12 // functions and build the ExplodedGraph at the expression level. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 17 #include "PrettyStackTraceLocationContext.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/AST/ParentMap.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/AST/StmtObjC.h" 22 #include "clang/Basic/Builtins.h" 23 #include "clang/Basic/PrettyStackTrace.h" 24 #include "clang/Basic/SourceManager.h" 25 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 26 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/LoopWidening.h" 30 #include "llvm/ADT/Statistic.h" 31 #include "llvm/Support/SaveAndRestore.h" 32 #include "llvm/Support/raw_ostream.h" 33 34 #ifndef NDEBUG 35 #include "llvm/Support/GraphWriter.h" 36 #endif 37 38 using namespace clang; 39 using namespace ento; 40 using llvm::APSInt; 41 42 #define DEBUG_TYPE "ExprEngine" 43 44 STATISTIC(NumRemoveDeadBindings, 45 "The # of times RemoveDeadBindings is called"); 46 STATISTIC(NumMaxBlockCountReached, 47 "The # of aborted paths due to reaching the maximum block count in " 48 "a top level function"); 49 STATISTIC(NumMaxBlockCountReachedInInlined, 50 "The # of aborted paths due to reaching the maximum block count in " 51 "an inlined function"); 52 STATISTIC(NumTimesRetriedWithoutInlining, 53 "The # of times we re-evaluated a call without inlining"); 54 55 typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *> 56 CXXBindTemporaryContext; 57 58 // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated. 59 // The StackFrameContext assures that nested calls due to inlined recursive 60 // functions do not interfere. 61 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet, 62 llvm::ImmutableSet<CXXBindTemporaryContext>) 63 64 //===----------------------------------------------------------------------===// 65 // Engine construction and deletion. 66 //===----------------------------------------------------------------------===// 67 68 static const char* TagProviderName = "ExprEngine"; 69 70 ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled, 71 SetOfConstDecls *VisitedCalleesIn, 72 FunctionSummariesTy *FS, 73 InliningModes HowToInlineIn) 74 : AMgr(mgr), 75 AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()), 76 Engine(*this, FS), 77 G(Engine.getGraph()), 78 StateMgr(getContext(), mgr.getStoreManagerCreator(), 79 mgr.getConstraintManagerCreator(), G.getAllocator(), 80 this), 81 SymMgr(StateMgr.getSymbolManager()), 82 svalBuilder(StateMgr.getSValBuilder()), 83 currStmtIdx(0), currBldrCtx(nullptr), 84 ObjCNoRet(mgr.getASTContext()), 85 ObjCGCEnabled(gcEnabled), BR(mgr, *this), 86 VisitedCallees(VisitedCalleesIn), 87 HowToInline(HowToInlineIn) 88 { 89 unsigned TrimInterval = mgr.options.getGraphTrimInterval(); 90 if (TrimInterval != 0) { 91 // Enable eager node reclaimation when constructing the ExplodedGraph. 92 G.enableNodeReclamation(TrimInterval); 93 } 94 } 95 96 ExprEngine::~ExprEngine() { 97 BR.FlushReports(); 98 } 99 100 //===----------------------------------------------------------------------===// 101 // Utility methods. 102 //===----------------------------------------------------------------------===// 103 104 ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) { 105 ProgramStateRef state = StateMgr.getInitialState(InitLoc); 106 const Decl *D = InitLoc->getDecl(); 107 108 // Preconditions. 109 // FIXME: It would be nice if we had a more general mechanism to add 110 // such preconditions. Some day. 111 do { 112 113 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 114 // Precondition: the first argument of 'main' is an integer guaranteed 115 // to be > 0. 116 const IdentifierInfo *II = FD->getIdentifier(); 117 if (!II || !(II->getName() == "main" && FD->getNumParams() > 0)) 118 break; 119 120 const ParmVarDecl *PD = FD->getParamDecl(0); 121 QualType T = PD->getType(); 122 const BuiltinType *BT = dyn_cast<BuiltinType>(T); 123 if (!BT || !BT->isInteger()) 124 break; 125 126 const MemRegion *R = state->getRegion(PD, InitLoc); 127 if (!R) 128 break; 129 130 SVal V = state->getSVal(loc::MemRegionVal(R)); 131 SVal Constraint_untested = evalBinOp(state, BO_GT, V, 132 svalBuilder.makeZeroVal(T), 133 svalBuilder.getConditionType()); 134 135 Optional<DefinedOrUnknownSVal> Constraint = 136 Constraint_untested.getAs<DefinedOrUnknownSVal>(); 137 138 if (!Constraint) 139 break; 140 141 if (ProgramStateRef newState = state->assume(*Constraint, true)) 142 state = newState; 143 } 144 break; 145 } 146 while (0); 147 148 if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 149 // Precondition: 'self' is always non-null upon entry to an Objective-C 150 // method. 151 const ImplicitParamDecl *SelfD = MD->getSelfDecl(); 152 const MemRegion *R = state->getRegion(SelfD, InitLoc); 153 SVal V = state->getSVal(loc::MemRegionVal(R)); 154 155 if (Optional<Loc> LV = V.getAs<Loc>()) { 156 // Assume that the pointer value in 'self' is non-null. 157 state = state->assume(*LV, true); 158 assert(state && "'self' cannot be null"); 159 } 160 } 161 162 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) { 163 if (!MD->isStatic()) { 164 // Precondition: 'this' is always non-null upon entry to the 165 // top-level function. This is our starting assumption for 166 // analyzing an "open" program. 167 const StackFrameContext *SFC = InitLoc->getCurrentStackFrame(); 168 if (SFC->getParent() == nullptr) { 169 loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC); 170 SVal V = state->getSVal(L); 171 if (Optional<Loc> LV = V.getAs<Loc>()) { 172 state = state->assume(*LV, true); 173 assert(state && "'this' cannot be null"); 174 } 175 } 176 } 177 } 178 179 return state; 180 } 181 182 ProgramStateRef 183 ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State, 184 const LocationContext *LC, 185 const Expr *Ex, 186 const Expr *Result) { 187 SVal V = State->getSVal(Ex, LC); 188 if (!Result) { 189 // If we don't have an explicit result expression, we're in "if needed" 190 // mode. Only create a region if the current value is a NonLoc. 191 if (!V.getAs<NonLoc>()) 192 return State; 193 Result = Ex; 194 } else { 195 // We need to create a region no matter what. For sanity, make sure we don't 196 // try to stuff a Loc into a non-pointer temporary region. 197 assert(!V.getAs<Loc>() || Loc::isLocType(Result->getType()) || 198 Result->getType()->isMemberPointerType()); 199 } 200 201 ProgramStateManager &StateMgr = State->getStateManager(); 202 MemRegionManager &MRMgr = StateMgr.getRegionManager(); 203 StoreManager &StoreMgr = StateMgr.getStoreManager(); 204 205 // We need to be careful about treating a derived type's value as 206 // bindings for a base type. Unless we're creating a temporary pointer region, 207 // start by stripping and recording base casts. 208 SmallVector<const CastExpr *, 4> Casts; 209 const Expr *Inner = Ex->IgnoreParens(); 210 if (!Loc::isLocType(Result->getType())) { 211 while (const CastExpr *CE = dyn_cast<CastExpr>(Inner)) { 212 if (CE->getCastKind() == CK_DerivedToBase || 213 CE->getCastKind() == CK_UncheckedDerivedToBase) 214 Casts.push_back(CE); 215 else if (CE->getCastKind() != CK_NoOp) 216 break; 217 218 Inner = CE->getSubExpr()->IgnoreParens(); 219 } 220 } 221 222 // Create a temporary object region for the inner expression (which may have 223 // a more derived type) and bind the value into it. 224 const TypedValueRegion *TR = nullptr; 225 if (const MaterializeTemporaryExpr *MT = 226 dyn_cast<MaterializeTemporaryExpr>(Result)) { 227 StorageDuration SD = MT->getStorageDuration(); 228 // If this object is bound to a reference with static storage duration, we 229 // put it in a different region to prevent "address leakage" warnings. 230 if (SD == SD_Static || SD == SD_Thread) 231 TR = MRMgr.getCXXStaticTempObjectRegion(Inner); 232 } 233 if (!TR) 234 TR = MRMgr.getCXXTempObjectRegion(Inner, LC); 235 236 SVal Reg = loc::MemRegionVal(TR); 237 238 if (V.isUnknown()) 239 V = getSValBuilder().conjureSymbolVal(Result, LC, TR->getValueType(), 240 currBldrCtx->blockCount()); 241 State = State->bindLoc(Reg, V); 242 243 // Re-apply the casts (from innermost to outermost) for type sanity. 244 for (SmallVectorImpl<const CastExpr *>::reverse_iterator I = Casts.rbegin(), 245 E = Casts.rend(); 246 I != E; ++I) { 247 Reg = StoreMgr.evalDerivedToBase(Reg, *I); 248 } 249 250 State = State->BindExpr(Result, LC, Reg); 251 return State; 252 } 253 254 //===----------------------------------------------------------------------===// 255 // Top-level transfer function logic (Dispatcher). 256 //===----------------------------------------------------------------------===// 257 258 /// evalAssume - Called by ConstraintManager. Used to call checker-specific 259 /// logic for handling assumptions on symbolic values. 260 ProgramStateRef ExprEngine::processAssume(ProgramStateRef state, 261 SVal cond, bool assumption) { 262 return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption); 263 } 264 265 ProgramStateRef 266 ExprEngine::processRegionChanges(ProgramStateRef state, 267 const InvalidatedSymbols *invalidated, 268 ArrayRef<const MemRegion *> Explicits, 269 ArrayRef<const MemRegion *> Regions, 270 const CallEvent *Call) { 271 return getCheckerManager().runCheckersForRegionChanges(state, invalidated, 272 Explicits, Regions, Call); 273 } 274 275 void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State, 276 const char *NL, const char *Sep) { 277 getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep); 278 } 279 280 void ExprEngine::processEndWorklist(bool hasWorkRemaining) { 281 getCheckerManager().runCheckersForEndAnalysis(G, BR, *this); 282 } 283 284 void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred, 285 unsigned StmtIdx, NodeBuilderContext *Ctx) { 286 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 287 currStmtIdx = StmtIdx; 288 currBldrCtx = Ctx; 289 290 switch (E.getKind()) { 291 case CFGElement::Statement: 292 ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred); 293 return; 294 case CFGElement::Initializer: 295 ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred); 296 return; 297 case CFGElement::NewAllocator: 298 ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(), 299 Pred); 300 return; 301 case CFGElement::AutomaticObjectDtor: 302 case CFGElement::DeleteDtor: 303 case CFGElement::BaseDtor: 304 case CFGElement::MemberDtor: 305 case CFGElement::TemporaryDtor: 306 ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred); 307 return; 308 } 309 } 310 311 static bool shouldRemoveDeadBindings(AnalysisManager &AMgr, 312 const CFGStmt S, 313 const ExplodedNode *Pred, 314 const LocationContext *LC) { 315 316 // Are we never purging state values? 317 if (AMgr.options.AnalysisPurgeOpt == PurgeNone) 318 return false; 319 320 // Is this the beginning of a basic block? 321 if (Pred->getLocation().getAs<BlockEntrance>()) 322 return true; 323 324 // Is this on a non-expression? 325 if (!isa<Expr>(S.getStmt())) 326 return true; 327 328 // Run before processing a call. 329 if (CallEvent::isCallStmt(S.getStmt())) 330 return true; 331 332 // Is this an expression that is consumed by another expression? If so, 333 // postpone cleaning out the state. 334 ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap(); 335 return !PM.isConsumedExpr(cast<Expr>(S.getStmt())); 336 } 337 338 void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out, 339 const Stmt *ReferenceStmt, 340 const LocationContext *LC, 341 const Stmt *DiagnosticStmt, 342 ProgramPoint::Kind K) { 343 assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind || 344 ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt)) 345 && "PostStmt is not generally supported by the SymbolReaper yet"); 346 assert(LC && "Must pass the current (or expiring) LocationContext"); 347 348 if (!DiagnosticStmt) { 349 DiagnosticStmt = ReferenceStmt; 350 assert(DiagnosticStmt && "Required for clearing a LocationContext"); 351 } 352 353 NumRemoveDeadBindings++; 354 ProgramStateRef CleanedState = Pred->getState(); 355 356 // LC is the location context being destroyed, but SymbolReaper wants a 357 // location context that is still live. (If this is the top-level stack 358 // frame, this will be null.) 359 if (!ReferenceStmt) { 360 assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind && 361 "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext"); 362 LC = LC->getParent(); 363 } 364 365 const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr; 366 SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager()); 367 368 getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper); 369 370 // Create a state in which dead bindings are removed from the environment 371 // and the store. TODO: The function should just return new env and store, 372 // not a new state. 373 CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper); 374 375 // Process any special transfer function for dead symbols. 376 // A tag to track convenience transitions, which can be removed at cleanup. 377 static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node"); 378 if (!SymReaper.hasDeadSymbols()) { 379 // Generate a CleanedNode that has the environment and store cleaned 380 // up. Since no symbols are dead, we can optimize and not clean out 381 // the constraint manager. 382 StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx); 383 Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K); 384 385 } else { 386 // Call checkers with the non-cleaned state so that they could query the 387 // values of the soon to be dead symbols. 388 ExplodedNodeSet CheckedSet; 389 getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper, 390 DiagnosticStmt, *this, K); 391 392 // For each node in CheckedSet, generate CleanedNodes that have the 393 // environment, the store, and the constraints cleaned up but have the 394 // user-supplied states as the predecessors. 395 StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx); 396 for (ExplodedNodeSet::const_iterator 397 I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { 398 ProgramStateRef CheckerState = (*I)->getState(); 399 400 // The constraint manager has not been cleaned up yet, so clean up now. 401 CheckerState = getConstraintManager().removeDeadBindings(CheckerState, 402 SymReaper); 403 404 assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) && 405 "Checkers are not allowed to modify the Environment as a part of " 406 "checkDeadSymbols processing."); 407 assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) && 408 "Checkers are not allowed to modify the Store as a part of " 409 "checkDeadSymbols processing."); 410 411 // Create a state based on CleanedState with CheckerState GDM and 412 // generate a transition to that state. 413 ProgramStateRef CleanedCheckerSt = 414 StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState); 415 Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K); 416 } 417 } 418 } 419 420 void ExprEngine::ProcessStmt(const CFGStmt S, 421 ExplodedNode *Pred) { 422 // Reclaim any unnecessary nodes in the ExplodedGraph. 423 G.reclaimRecentlyAllocatedNodes(); 424 425 const Stmt *currStmt = S.getStmt(); 426 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 427 currStmt->getLocStart(), 428 "Error evaluating statement"); 429 430 // Remove dead bindings and symbols. 431 ExplodedNodeSet CleanedStates; 432 if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){ 433 removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext()); 434 } else 435 CleanedStates.Add(Pred); 436 437 // Visit the statement. 438 ExplodedNodeSet Dst; 439 for (ExplodedNodeSet::iterator I = CleanedStates.begin(), 440 E = CleanedStates.end(); I != E; ++I) { 441 ExplodedNodeSet DstI; 442 // Visit the statement. 443 Visit(currStmt, *I, DstI); 444 Dst.insert(DstI); 445 } 446 447 // Enqueue the new nodes onto the work list. 448 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 449 } 450 451 void ExprEngine::ProcessInitializer(const CFGInitializer Init, 452 ExplodedNode *Pred) { 453 const CXXCtorInitializer *BMI = Init.getInitializer(); 454 455 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 456 BMI->getSourceLocation(), 457 "Error evaluating initializer"); 458 459 // We don't clean up dead bindings here. 460 const StackFrameContext *stackFrame = 461 cast<StackFrameContext>(Pred->getLocationContext()); 462 const CXXConstructorDecl *decl = 463 cast<CXXConstructorDecl>(stackFrame->getDecl()); 464 465 ProgramStateRef State = Pred->getState(); 466 SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame)); 467 468 ExplodedNodeSet Tmp(Pred); 469 SVal FieldLoc; 470 471 // Evaluate the initializer, if necessary 472 if (BMI->isAnyMemberInitializer()) { 473 // Constructors build the object directly in the field, 474 // but non-objects must be copied in from the initializer. 475 if (auto *CtorExpr = findDirectConstructorForCurrentCFGElement()) { 476 assert(BMI->getInit()->IgnoreImplicit() == CtorExpr); 477 (void)CtorExpr; 478 // The field was directly constructed, so there is no need to bind. 479 } else { 480 const Expr *Init = BMI->getInit()->IgnoreImplicit(); 481 const ValueDecl *Field; 482 if (BMI->isIndirectMemberInitializer()) { 483 Field = BMI->getIndirectMember(); 484 FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal); 485 } else { 486 Field = BMI->getMember(); 487 FieldLoc = State->getLValue(BMI->getMember(), thisVal); 488 } 489 490 SVal InitVal; 491 if (BMI->getNumArrayIndices() > 0) { 492 // Handle arrays of trivial type. We can represent this with a 493 // primitive load/copy from the base array region. 494 const ArraySubscriptExpr *ASE; 495 while ((ASE = dyn_cast<ArraySubscriptExpr>(Init))) 496 Init = ASE->getBase()->IgnoreImplicit(); 497 498 SVal LValue = State->getSVal(Init, stackFrame); 499 if (Optional<Loc> LValueLoc = LValue.getAs<Loc>()) 500 InitVal = State->getSVal(*LValueLoc); 501 502 // If we fail to get the value for some reason, use a symbolic value. 503 if (InitVal.isUnknownOrUndef()) { 504 SValBuilder &SVB = getSValBuilder(); 505 InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame, 506 Field->getType(), 507 currBldrCtx->blockCount()); 508 } 509 } else { 510 InitVal = State->getSVal(BMI->getInit(), stackFrame); 511 } 512 513 assert(Tmp.size() == 1 && "have not generated any new nodes yet"); 514 assert(*Tmp.begin() == Pred && "have not generated any new nodes yet"); 515 Tmp.clear(); 516 517 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 518 evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP); 519 } 520 } else { 521 assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer()); 522 // We already did all the work when visiting the CXXConstructExpr. 523 } 524 525 // Construct PostInitializer nodes whether the state changed or not, 526 // so that the diagnostics don't get confused. 527 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 528 ExplodedNodeSet Dst; 529 NodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 530 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) { 531 ExplodedNode *N = *I; 532 Bldr.generateNode(PP, N->getState(), N); 533 } 534 535 // Enqueue the new nodes onto the work list. 536 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 537 } 538 539 void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D, 540 ExplodedNode *Pred) { 541 ExplodedNodeSet Dst; 542 switch (D.getKind()) { 543 case CFGElement::AutomaticObjectDtor: 544 ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst); 545 break; 546 case CFGElement::BaseDtor: 547 ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst); 548 break; 549 case CFGElement::MemberDtor: 550 ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst); 551 break; 552 case CFGElement::TemporaryDtor: 553 ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst); 554 break; 555 case CFGElement::DeleteDtor: 556 ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst); 557 break; 558 default: 559 llvm_unreachable("Unexpected dtor kind."); 560 } 561 562 // Enqueue the new nodes onto the work list. 563 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 564 } 565 566 void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE, 567 ExplodedNode *Pred) { 568 ExplodedNodeSet Dst; 569 AnalysisManager &AMgr = getAnalysisManager(); 570 AnalyzerOptions &Opts = AMgr.options; 571 // TODO: We're not evaluating allocators for all cases just yet as 572 // we're not handling the return value correctly, which causes false 573 // positives when the alpha.cplusplus.NewDeleteLeaks check is on. 574 if (Opts.mayInlineCXXAllocator()) 575 VisitCXXNewAllocatorCall(NE, Pred, Dst); 576 else { 577 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 578 const LocationContext *LCtx = Pred->getLocationContext(); 579 PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx); 580 Bldr.generateNode(PP, Pred->getState(), Pred); 581 } 582 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 583 } 584 585 void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor, 586 ExplodedNode *Pred, 587 ExplodedNodeSet &Dst) { 588 const VarDecl *varDecl = Dtor.getVarDecl(); 589 QualType varType = varDecl->getType(); 590 591 ProgramStateRef state = Pred->getState(); 592 SVal dest = state->getLValue(varDecl, Pred->getLocationContext()); 593 const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion(); 594 595 if (const ReferenceType *refType = varType->getAs<ReferenceType>()) { 596 varType = refType->getPointeeType(); 597 Region = state->getSVal(Region).getAsRegion(); 598 } 599 600 VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false, 601 Pred, Dst); 602 } 603 604 void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor, 605 ExplodedNode *Pred, 606 ExplodedNodeSet &Dst) { 607 ProgramStateRef State = Pred->getState(); 608 const LocationContext *LCtx = Pred->getLocationContext(); 609 const CXXDeleteExpr *DE = Dtor.getDeleteExpr(); 610 const Stmt *Arg = DE->getArgument(); 611 SVal ArgVal = State->getSVal(Arg, LCtx); 612 613 // If the argument to delete is known to be a null value, 614 // don't run destructor. 615 if (State->isNull(ArgVal).isConstrainedTrue()) { 616 QualType DTy = DE->getDestroyedType(); 617 QualType BTy = getContext().getBaseElementType(DTy); 618 const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl(); 619 const CXXDestructorDecl *Dtor = RD->getDestructor(); 620 621 PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx); 622 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 623 Bldr.generateNode(PP, Pred->getState(), Pred); 624 return; 625 } 626 627 VisitCXXDestructor(DE->getDestroyedType(), 628 ArgVal.getAsRegion(), 629 DE, /*IsBase=*/ false, 630 Pred, Dst); 631 } 632 633 void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D, 634 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 635 const LocationContext *LCtx = Pred->getLocationContext(); 636 637 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 638 Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor, 639 LCtx->getCurrentStackFrame()); 640 SVal ThisVal = Pred->getState()->getSVal(ThisPtr); 641 642 // Create the base object region. 643 const CXXBaseSpecifier *Base = D.getBaseSpecifier(); 644 QualType BaseTy = Base->getType(); 645 SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy, 646 Base->isVirtual()); 647 648 VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(), 649 CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst); 650 } 651 652 void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D, 653 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 654 const FieldDecl *Member = D.getFieldDecl(); 655 ProgramStateRef State = Pred->getState(); 656 const LocationContext *LCtx = Pred->getLocationContext(); 657 658 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 659 Loc ThisVal = getSValBuilder().getCXXThis(CurDtor, 660 LCtx->getCurrentStackFrame()); 661 SVal FieldVal = 662 State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>()); 663 664 VisitCXXDestructor(Member->getType(), 665 FieldVal.castAs<loc::MemRegionVal>().getRegion(), 666 CurDtor->getBody(), /*IsBase=*/false, Pred, Dst); 667 } 668 669 void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D, 670 ExplodedNode *Pred, 671 ExplodedNodeSet &Dst) { 672 ExplodedNodeSet CleanDtorState; 673 StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx); 674 ProgramStateRef State = Pred->getState(); 675 if (State->contains<InitializedTemporariesSet>( 676 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) { 677 // FIXME: Currently we insert temporary destructors for default parameters, 678 // but we don't insert the constructors. 679 State = State->remove<InitializedTemporariesSet>( 680 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame())); 681 } 682 StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State); 683 684 QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType(); 685 // FIXME: Currently CleanDtorState can be empty here due to temporaries being 686 // bound to default parameters. 687 assert(CleanDtorState.size() <= 1); 688 ExplodedNode *CleanPred = 689 CleanDtorState.empty() ? Pred : *CleanDtorState.begin(); 690 // FIXME: Inlining of temporary destructors is not supported yet anyway, so 691 // we just put a NULL region for now. This will need to be changed later. 692 VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(), 693 /*IsBase=*/false, CleanPred, Dst); 694 } 695 696 void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 697 NodeBuilderContext &BldCtx, 698 ExplodedNode *Pred, 699 ExplodedNodeSet &Dst, 700 const CFGBlock *DstT, 701 const CFGBlock *DstF) { 702 BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF); 703 if (Pred->getState()->contains<InitializedTemporariesSet>( 704 std::make_pair(BTE, Pred->getStackFrame()))) { 705 TempDtorBuilder.markInfeasible(false); 706 TempDtorBuilder.generateNode(Pred->getState(), true, Pred); 707 } else { 708 TempDtorBuilder.markInfeasible(true); 709 TempDtorBuilder.generateNode(Pred->getState(), false, Pred); 710 } 711 } 712 713 void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE, 714 ExplodedNodeSet &PreVisit, 715 ExplodedNodeSet &Dst) { 716 if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) { 717 // In case we don't have temporary destructors in the CFG, do not mark 718 // the initialization - we would otherwise never clean it up. 719 Dst = PreVisit; 720 return; 721 } 722 StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx); 723 for (ExplodedNode *Node : PreVisit) { 724 ProgramStateRef State = Node->getState(); 725 726 if (!State->contains<InitializedTemporariesSet>( 727 std::make_pair(BTE, Node->getStackFrame()))) { 728 // FIXME: Currently the state might already contain the marker due to 729 // incorrect handling of temporaries bound to default parameters; for 730 // those, we currently skip the CXXBindTemporaryExpr but rely on adding 731 // temporary destructor nodes. 732 State = State->add<InitializedTemporariesSet>( 733 std::make_pair(BTE, Node->getStackFrame())); 734 } 735 StmtBldr.generateNode(BTE, Node, State); 736 } 737 } 738 739 void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred, 740 ExplodedNodeSet &DstTop) { 741 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 742 S->getLocStart(), 743 "Error evaluating statement"); 744 ExplodedNodeSet Dst; 745 StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx); 746 747 assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens()); 748 749 switch (S->getStmtClass()) { 750 // C++ and ARC stuff we don't support yet. 751 case Expr::ObjCIndirectCopyRestoreExprClass: 752 case Stmt::CXXDependentScopeMemberExprClass: 753 case Stmt::CXXInheritedCtorInitExprClass: 754 case Stmt::CXXTryStmtClass: 755 case Stmt::CXXTypeidExprClass: 756 case Stmt::CXXUuidofExprClass: 757 case Stmt::CXXFoldExprClass: 758 case Stmt::MSPropertyRefExprClass: 759 case Stmt::MSPropertySubscriptExprClass: 760 case Stmt::CXXUnresolvedConstructExprClass: 761 case Stmt::DependentScopeDeclRefExprClass: 762 case Stmt::ArrayTypeTraitExprClass: 763 case Stmt::ExpressionTraitExprClass: 764 case Stmt::UnresolvedLookupExprClass: 765 case Stmt::UnresolvedMemberExprClass: 766 case Stmt::TypoExprClass: 767 case Stmt::CXXNoexceptExprClass: 768 case Stmt::PackExpansionExprClass: 769 case Stmt::SubstNonTypeTemplateParmPackExprClass: 770 case Stmt::FunctionParmPackExprClass: 771 case Stmt::CoroutineBodyStmtClass: 772 case Stmt::CoawaitExprClass: 773 case Stmt::CoreturnStmtClass: 774 case Stmt::CoyieldExprClass: 775 case Stmt::SEHTryStmtClass: 776 case Stmt::SEHExceptStmtClass: 777 case Stmt::SEHLeaveStmtClass: 778 case Stmt::SEHFinallyStmtClass: { 779 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 780 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 781 break; 782 } 783 784 case Stmt::ParenExprClass: 785 llvm_unreachable("ParenExprs already handled."); 786 case Stmt::GenericSelectionExprClass: 787 llvm_unreachable("GenericSelectionExprs already handled."); 788 // Cases that should never be evaluated simply because they shouldn't 789 // appear in the CFG. 790 case Stmt::BreakStmtClass: 791 case Stmt::CaseStmtClass: 792 case Stmt::CompoundStmtClass: 793 case Stmt::ContinueStmtClass: 794 case Stmt::CXXForRangeStmtClass: 795 case Stmt::DefaultStmtClass: 796 case Stmt::DoStmtClass: 797 case Stmt::ForStmtClass: 798 case Stmt::GotoStmtClass: 799 case Stmt::IfStmtClass: 800 case Stmt::IndirectGotoStmtClass: 801 case Stmt::LabelStmtClass: 802 case Stmt::NoStmtClass: 803 case Stmt::NullStmtClass: 804 case Stmt::SwitchStmtClass: 805 case Stmt::WhileStmtClass: 806 case Expr::MSDependentExistsStmtClass: 807 case Stmt::CapturedStmtClass: 808 case Stmt::OMPParallelDirectiveClass: 809 case Stmt::OMPSimdDirectiveClass: 810 case Stmt::OMPForDirectiveClass: 811 case Stmt::OMPForSimdDirectiveClass: 812 case Stmt::OMPSectionsDirectiveClass: 813 case Stmt::OMPSectionDirectiveClass: 814 case Stmt::OMPSingleDirectiveClass: 815 case Stmt::OMPMasterDirectiveClass: 816 case Stmt::OMPCriticalDirectiveClass: 817 case Stmt::OMPParallelForDirectiveClass: 818 case Stmt::OMPParallelForSimdDirectiveClass: 819 case Stmt::OMPParallelSectionsDirectiveClass: 820 case Stmt::OMPTaskDirectiveClass: 821 case Stmt::OMPTaskyieldDirectiveClass: 822 case Stmt::OMPBarrierDirectiveClass: 823 case Stmt::OMPTaskwaitDirectiveClass: 824 case Stmt::OMPTaskgroupDirectiveClass: 825 case Stmt::OMPFlushDirectiveClass: 826 case Stmt::OMPOrderedDirectiveClass: 827 case Stmt::OMPAtomicDirectiveClass: 828 case Stmt::OMPTargetDirectiveClass: 829 case Stmt::OMPTargetDataDirectiveClass: 830 case Stmt::OMPTargetEnterDataDirectiveClass: 831 case Stmt::OMPTargetExitDataDirectiveClass: 832 case Stmt::OMPTargetParallelDirectiveClass: 833 case Stmt::OMPTargetParallelForDirectiveClass: 834 case Stmt::OMPTargetUpdateDirectiveClass: 835 case Stmt::OMPTeamsDirectiveClass: 836 case Stmt::OMPCancellationPointDirectiveClass: 837 case Stmt::OMPCancelDirectiveClass: 838 case Stmt::OMPTaskLoopDirectiveClass: 839 case Stmt::OMPTaskLoopSimdDirectiveClass: 840 case Stmt::OMPDistributeDirectiveClass: 841 case Stmt::OMPDistributeParallelForDirectiveClass: 842 case Stmt::OMPDistributeParallelForSimdDirectiveClass: 843 case Stmt::OMPDistributeSimdDirectiveClass: 844 case Stmt::OMPTargetParallelForSimdDirectiveClass: 845 case Stmt::OMPTargetSimdDirectiveClass: 846 case Stmt::OMPTeamsDistributeDirectiveClass: 847 case Stmt::OMPTeamsDistributeSimdDirectiveClass: 848 llvm_unreachable("Stmt should not be in analyzer evaluation loop"); 849 850 case Stmt::ObjCSubscriptRefExprClass: 851 case Stmt::ObjCPropertyRefExprClass: 852 llvm_unreachable("These are handled by PseudoObjectExpr"); 853 854 case Stmt::GNUNullExprClass: { 855 // GNU __null is a pointer-width integer, not an actual pointer. 856 ProgramStateRef state = Pred->getState(); 857 state = state->BindExpr(S, Pred->getLocationContext(), 858 svalBuilder.makeIntValWithPtrWidth(0, false)); 859 Bldr.generateNode(S, Pred, state); 860 break; 861 } 862 863 case Stmt::ObjCAtSynchronizedStmtClass: 864 Bldr.takeNodes(Pred); 865 VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst); 866 Bldr.addNodes(Dst); 867 break; 868 869 case Stmt::ExprWithCleanupsClass: 870 // Handled due to fully linearised CFG. 871 break; 872 873 case Stmt::CXXBindTemporaryExprClass: { 874 Bldr.takeNodes(Pred); 875 ExplodedNodeSet PreVisit; 876 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 877 ExplodedNodeSet Next; 878 VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next); 879 getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this); 880 Bldr.addNodes(Dst); 881 break; 882 } 883 884 // Cases not handled yet; but will handle some day. 885 case Stmt::DesignatedInitExprClass: 886 case Stmt::DesignatedInitUpdateExprClass: 887 case Stmt::ExtVectorElementExprClass: 888 case Stmt::ImaginaryLiteralClass: 889 case Stmt::ObjCAtCatchStmtClass: 890 case Stmt::ObjCAtFinallyStmtClass: 891 case Stmt::ObjCAtTryStmtClass: 892 case Stmt::ObjCAutoreleasePoolStmtClass: 893 case Stmt::ObjCEncodeExprClass: 894 case Stmt::ObjCIsaExprClass: 895 case Stmt::ObjCProtocolExprClass: 896 case Stmt::ObjCSelectorExprClass: 897 case Stmt::ParenListExprClass: 898 case Stmt::ShuffleVectorExprClass: 899 case Stmt::ConvertVectorExprClass: 900 case Stmt::VAArgExprClass: 901 case Stmt::CUDAKernelCallExprClass: 902 case Stmt::OpaqueValueExprClass: 903 case Stmt::AsTypeExprClass: 904 // Fall through. 905 906 // Cases we intentionally don't evaluate, since they don't need 907 // to be explicitly evaluated. 908 case Stmt::PredefinedExprClass: 909 case Stmt::AddrLabelExprClass: 910 case Stmt::AttributedStmtClass: 911 case Stmt::IntegerLiteralClass: 912 case Stmt::CharacterLiteralClass: 913 case Stmt::ImplicitValueInitExprClass: 914 case Stmt::CXXScalarValueInitExprClass: 915 case Stmt::CXXBoolLiteralExprClass: 916 case Stmt::ObjCBoolLiteralExprClass: 917 case Stmt::ObjCAvailabilityCheckExprClass: 918 case Stmt::FloatingLiteralClass: 919 case Stmt::NoInitExprClass: 920 case Stmt::SizeOfPackExprClass: 921 case Stmt::StringLiteralClass: 922 case Stmt::ObjCStringLiteralClass: 923 case Stmt::CXXPseudoDestructorExprClass: 924 case Stmt::SubstNonTypeTemplateParmExprClass: 925 case Stmt::CXXNullPtrLiteralExprClass: 926 case Stmt::OMPArraySectionExprClass: 927 case Stmt::TypeTraitExprClass: { 928 Bldr.takeNodes(Pred); 929 ExplodedNodeSet preVisit; 930 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 931 getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this); 932 Bldr.addNodes(Dst); 933 break; 934 } 935 936 case Stmt::CXXDefaultArgExprClass: 937 case Stmt::CXXDefaultInitExprClass: { 938 Bldr.takeNodes(Pred); 939 ExplodedNodeSet PreVisit; 940 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 941 942 ExplodedNodeSet Tmp; 943 StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx); 944 945 const Expr *ArgE; 946 if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S)) 947 ArgE = DefE->getExpr(); 948 else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S)) 949 ArgE = DefE->getExpr(); 950 else 951 llvm_unreachable("unknown constant wrapper kind"); 952 953 bool IsTemporary = false; 954 if (const MaterializeTemporaryExpr *MTE = 955 dyn_cast<MaterializeTemporaryExpr>(ArgE)) { 956 ArgE = MTE->GetTemporaryExpr(); 957 IsTemporary = true; 958 } 959 960 Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE); 961 if (!ConstantVal) 962 ConstantVal = UnknownVal(); 963 964 const LocationContext *LCtx = Pred->getLocationContext(); 965 for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end(); 966 I != E; ++I) { 967 ProgramStateRef State = (*I)->getState(); 968 State = State->BindExpr(S, LCtx, *ConstantVal); 969 if (IsTemporary) 970 State = createTemporaryRegionIfNeeded(State, LCtx, 971 cast<Expr>(S), 972 cast<Expr>(S)); 973 Bldr2.generateNode(S, *I, State); 974 } 975 976 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 977 Bldr.addNodes(Dst); 978 break; 979 } 980 981 // Cases we evaluate as opaque expressions, conjuring a symbol. 982 case Stmt::CXXStdInitializerListExprClass: 983 case Expr::ObjCArrayLiteralClass: 984 case Expr::ObjCDictionaryLiteralClass: 985 case Expr::ObjCBoxedExprClass: { 986 Bldr.takeNodes(Pred); 987 988 ExplodedNodeSet preVisit; 989 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 990 991 ExplodedNodeSet Tmp; 992 StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx); 993 994 const Expr *Ex = cast<Expr>(S); 995 QualType resultType = Ex->getType(); 996 997 for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end(); 998 it != et; ++it) { 999 ExplodedNode *N = *it; 1000 const LocationContext *LCtx = N->getLocationContext(); 1001 SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx, 1002 resultType, 1003 currBldrCtx->blockCount()); 1004 ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result); 1005 Bldr2.generateNode(S, N, state); 1006 } 1007 1008 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1009 Bldr.addNodes(Dst); 1010 break; 1011 } 1012 1013 case Stmt::ArraySubscriptExprClass: 1014 Bldr.takeNodes(Pred); 1015 VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst); 1016 Bldr.addNodes(Dst); 1017 break; 1018 1019 case Stmt::GCCAsmStmtClass: 1020 Bldr.takeNodes(Pred); 1021 VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst); 1022 Bldr.addNodes(Dst); 1023 break; 1024 1025 case Stmt::MSAsmStmtClass: 1026 Bldr.takeNodes(Pred); 1027 VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst); 1028 Bldr.addNodes(Dst); 1029 break; 1030 1031 case Stmt::BlockExprClass: 1032 Bldr.takeNodes(Pred); 1033 VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst); 1034 Bldr.addNodes(Dst); 1035 break; 1036 1037 case Stmt::LambdaExprClass: 1038 if (AMgr.options.shouldInlineLambdas()) { 1039 Bldr.takeNodes(Pred); 1040 VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst); 1041 Bldr.addNodes(Dst); 1042 } else { 1043 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 1044 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 1045 } 1046 break; 1047 1048 case Stmt::BinaryOperatorClass: { 1049 const BinaryOperator* B = cast<BinaryOperator>(S); 1050 if (B->isLogicalOp()) { 1051 Bldr.takeNodes(Pred); 1052 VisitLogicalExpr(B, Pred, Dst); 1053 Bldr.addNodes(Dst); 1054 break; 1055 } 1056 else if (B->getOpcode() == BO_Comma) { 1057 ProgramStateRef state = Pred->getState(); 1058 Bldr.generateNode(B, Pred, 1059 state->BindExpr(B, Pred->getLocationContext(), 1060 state->getSVal(B->getRHS(), 1061 Pred->getLocationContext()))); 1062 break; 1063 } 1064 1065 Bldr.takeNodes(Pred); 1066 1067 if (AMgr.options.eagerlyAssumeBinOpBifurcation && 1068 (B->isRelationalOp() || B->isEqualityOp())) { 1069 ExplodedNodeSet Tmp; 1070 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp); 1071 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S)); 1072 } 1073 else 1074 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1075 1076 Bldr.addNodes(Dst); 1077 break; 1078 } 1079 1080 case Stmt::CXXOperatorCallExprClass: { 1081 const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S); 1082 1083 // For instance method operators, make sure the 'this' argument has a 1084 // valid region. 1085 const Decl *Callee = OCE->getCalleeDecl(); 1086 if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) { 1087 if (MD->isInstance()) { 1088 ProgramStateRef State = Pred->getState(); 1089 const LocationContext *LCtx = Pred->getLocationContext(); 1090 ProgramStateRef NewState = 1091 createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0)); 1092 if (NewState != State) { 1093 Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr, 1094 ProgramPoint::PreStmtKind); 1095 // Did we cache out? 1096 if (!Pred) 1097 break; 1098 } 1099 } 1100 } 1101 // FALLTHROUGH 1102 } 1103 case Stmt::CallExprClass: 1104 case Stmt::CXXMemberCallExprClass: 1105 case Stmt::UserDefinedLiteralClass: { 1106 Bldr.takeNodes(Pred); 1107 VisitCallExpr(cast<CallExpr>(S), Pred, Dst); 1108 Bldr.addNodes(Dst); 1109 break; 1110 } 1111 1112 case Stmt::CXXCatchStmtClass: { 1113 Bldr.takeNodes(Pred); 1114 VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst); 1115 Bldr.addNodes(Dst); 1116 break; 1117 } 1118 1119 case Stmt::CXXTemporaryObjectExprClass: 1120 case Stmt::CXXConstructExprClass: { 1121 Bldr.takeNodes(Pred); 1122 VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst); 1123 Bldr.addNodes(Dst); 1124 break; 1125 } 1126 1127 case Stmt::CXXNewExprClass: { 1128 Bldr.takeNodes(Pred); 1129 ExplodedNodeSet PostVisit; 1130 VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit); 1131 getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this); 1132 Bldr.addNodes(Dst); 1133 break; 1134 } 1135 1136 case Stmt::CXXDeleteExprClass: { 1137 Bldr.takeNodes(Pred); 1138 ExplodedNodeSet PreVisit; 1139 const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S); 1140 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1141 1142 for (ExplodedNodeSet::iterator i = PreVisit.begin(), 1143 e = PreVisit.end(); i != e ; ++i) 1144 VisitCXXDeleteExpr(CDE, *i, Dst); 1145 1146 Bldr.addNodes(Dst); 1147 break; 1148 } 1149 // FIXME: ChooseExpr is really a constant. We need to fix 1150 // the CFG do not model them as explicit control-flow. 1151 1152 case Stmt::ChooseExprClass: { // __builtin_choose_expr 1153 Bldr.takeNodes(Pred); 1154 const ChooseExpr *C = cast<ChooseExpr>(S); 1155 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); 1156 Bldr.addNodes(Dst); 1157 break; 1158 } 1159 1160 case Stmt::CompoundAssignOperatorClass: 1161 Bldr.takeNodes(Pred); 1162 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1163 Bldr.addNodes(Dst); 1164 break; 1165 1166 case Stmt::CompoundLiteralExprClass: 1167 Bldr.takeNodes(Pred); 1168 VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst); 1169 Bldr.addNodes(Dst); 1170 break; 1171 1172 case Stmt::BinaryConditionalOperatorClass: 1173 case Stmt::ConditionalOperatorClass: { // '?' operator 1174 Bldr.takeNodes(Pred); 1175 const AbstractConditionalOperator *C 1176 = cast<AbstractConditionalOperator>(S); 1177 VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst); 1178 Bldr.addNodes(Dst); 1179 break; 1180 } 1181 1182 case Stmt::CXXThisExprClass: 1183 Bldr.takeNodes(Pred); 1184 VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst); 1185 Bldr.addNodes(Dst); 1186 break; 1187 1188 case Stmt::DeclRefExprClass: { 1189 Bldr.takeNodes(Pred); 1190 const DeclRefExpr *DE = cast<DeclRefExpr>(S); 1191 VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst); 1192 Bldr.addNodes(Dst); 1193 break; 1194 } 1195 1196 case Stmt::DeclStmtClass: 1197 Bldr.takeNodes(Pred); 1198 VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst); 1199 Bldr.addNodes(Dst); 1200 break; 1201 1202 case Stmt::ImplicitCastExprClass: 1203 case Stmt::CStyleCastExprClass: 1204 case Stmt::CXXStaticCastExprClass: 1205 case Stmt::CXXDynamicCastExprClass: 1206 case Stmt::CXXReinterpretCastExprClass: 1207 case Stmt::CXXConstCastExprClass: 1208 case Stmt::CXXFunctionalCastExprClass: 1209 case Stmt::ObjCBridgedCastExprClass: { 1210 Bldr.takeNodes(Pred); 1211 const CastExpr *C = cast<CastExpr>(S); 1212 ExplodedNodeSet dstExpr; 1213 VisitCast(C, C->getSubExpr(), Pred, dstExpr); 1214 1215 // Handle the postvisit checks. 1216 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this); 1217 Bldr.addNodes(Dst); 1218 break; 1219 } 1220 1221 case Expr::MaterializeTemporaryExprClass: { 1222 Bldr.takeNodes(Pred); 1223 const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S); 1224 CreateCXXTemporaryObject(MTE, Pred, Dst); 1225 Bldr.addNodes(Dst); 1226 break; 1227 } 1228 1229 case Stmt::InitListExprClass: 1230 Bldr.takeNodes(Pred); 1231 VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst); 1232 Bldr.addNodes(Dst); 1233 break; 1234 1235 case Stmt::MemberExprClass: 1236 Bldr.takeNodes(Pred); 1237 VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst); 1238 Bldr.addNodes(Dst); 1239 break; 1240 1241 case Stmt::AtomicExprClass: 1242 Bldr.takeNodes(Pred); 1243 VisitAtomicExpr(cast<AtomicExpr>(S), Pred, Dst); 1244 Bldr.addNodes(Dst); 1245 break; 1246 1247 case Stmt::ObjCIvarRefExprClass: 1248 Bldr.takeNodes(Pred); 1249 VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst); 1250 Bldr.addNodes(Dst); 1251 break; 1252 1253 case Stmt::ObjCForCollectionStmtClass: 1254 Bldr.takeNodes(Pred); 1255 VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst); 1256 Bldr.addNodes(Dst); 1257 break; 1258 1259 case Stmt::ObjCMessageExprClass: 1260 Bldr.takeNodes(Pred); 1261 VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst); 1262 Bldr.addNodes(Dst); 1263 break; 1264 1265 case Stmt::ObjCAtThrowStmtClass: 1266 case Stmt::CXXThrowExprClass: 1267 // FIXME: This is not complete. We basically treat @throw as 1268 // an abort. 1269 Bldr.generateSink(S, Pred, Pred->getState()); 1270 break; 1271 1272 case Stmt::ReturnStmtClass: 1273 Bldr.takeNodes(Pred); 1274 VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst); 1275 Bldr.addNodes(Dst); 1276 break; 1277 1278 case Stmt::OffsetOfExprClass: 1279 Bldr.takeNodes(Pred); 1280 VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst); 1281 Bldr.addNodes(Dst); 1282 break; 1283 1284 case Stmt::UnaryExprOrTypeTraitExprClass: 1285 Bldr.takeNodes(Pred); 1286 VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S), 1287 Pred, Dst); 1288 Bldr.addNodes(Dst); 1289 break; 1290 1291 case Stmt::StmtExprClass: { 1292 const StmtExpr *SE = cast<StmtExpr>(S); 1293 1294 if (SE->getSubStmt()->body_empty()) { 1295 // Empty statement expression. 1296 assert(SE->getType() == getContext().VoidTy 1297 && "Empty statement expression must have void type."); 1298 break; 1299 } 1300 1301 if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) { 1302 ProgramStateRef state = Pred->getState(); 1303 Bldr.generateNode(SE, Pred, 1304 state->BindExpr(SE, Pred->getLocationContext(), 1305 state->getSVal(LastExpr, 1306 Pred->getLocationContext()))); 1307 } 1308 break; 1309 } 1310 1311 case Stmt::UnaryOperatorClass: { 1312 Bldr.takeNodes(Pred); 1313 const UnaryOperator *U = cast<UnaryOperator>(S); 1314 if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) { 1315 ExplodedNodeSet Tmp; 1316 VisitUnaryOperator(U, Pred, Tmp); 1317 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U); 1318 } 1319 else 1320 VisitUnaryOperator(U, Pred, Dst); 1321 Bldr.addNodes(Dst); 1322 break; 1323 } 1324 1325 case Stmt::PseudoObjectExprClass: { 1326 Bldr.takeNodes(Pred); 1327 ProgramStateRef state = Pred->getState(); 1328 const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S); 1329 if (const Expr *Result = PE->getResultExpr()) { 1330 SVal V = state->getSVal(Result, Pred->getLocationContext()); 1331 Bldr.generateNode(S, Pred, 1332 state->BindExpr(S, Pred->getLocationContext(), V)); 1333 } 1334 else 1335 Bldr.generateNode(S, Pred, 1336 state->BindExpr(S, Pred->getLocationContext(), 1337 UnknownVal())); 1338 1339 Bldr.addNodes(Dst); 1340 break; 1341 } 1342 } 1343 } 1344 1345 bool ExprEngine::replayWithoutInlining(ExplodedNode *N, 1346 const LocationContext *CalleeLC) { 1347 const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1348 const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame(); 1349 assert(CalleeSF && CallerSF); 1350 ExplodedNode *BeforeProcessingCall = nullptr; 1351 const Stmt *CE = CalleeSF->getCallSite(); 1352 1353 // Find the first node before we started processing the call expression. 1354 while (N) { 1355 ProgramPoint L = N->getLocation(); 1356 BeforeProcessingCall = N; 1357 N = N->pred_empty() ? nullptr : *(N->pred_begin()); 1358 1359 // Skip the nodes corresponding to the inlined code. 1360 if (L.getLocationContext()->getCurrentStackFrame() != CallerSF) 1361 continue; 1362 // We reached the caller. Find the node right before we started 1363 // processing the call. 1364 if (L.isPurgeKind()) 1365 continue; 1366 if (L.getAs<PreImplicitCall>()) 1367 continue; 1368 if (L.getAs<CallEnter>()) 1369 continue; 1370 if (Optional<StmtPoint> SP = L.getAs<StmtPoint>()) 1371 if (SP->getStmt() == CE) 1372 continue; 1373 break; 1374 } 1375 1376 if (!BeforeProcessingCall) 1377 return false; 1378 1379 // TODO: Clean up the unneeded nodes. 1380 1381 // Build an Epsilon node from which we will restart the analyzes. 1382 // Note that CE is permitted to be NULL! 1383 ProgramPoint NewNodeLoc = 1384 EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE); 1385 // Add the special flag to GDM to signal retrying with no inlining. 1386 // Note, changing the state ensures that we are not going to cache out. 1387 ProgramStateRef NewNodeState = BeforeProcessingCall->getState(); 1388 NewNodeState = 1389 NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE)); 1390 1391 // Make the new node a successor of BeforeProcessingCall. 1392 bool IsNew = false; 1393 ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew); 1394 // We cached out at this point. Caching out is common due to us backtracking 1395 // from the inlined function, which might spawn several paths. 1396 if (!IsNew) 1397 return true; 1398 1399 NewNode->addPredecessor(BeforeProcessingCall, G); 1400 1401 // Add the new node to the work list. 1402 Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(), 1403 CalleeSF->getIndex()); 1404 NumTimesRetriedWithoutInlining++; 1405 return true; 1406 } 1407 1408 /// Block entrance. (Update counters). 1409 void ExprEngine::processCFGBlockEntrance(const BlockEdge &L, 1410 NodeBuilderWithSinks &nodeBuilder, 1411 ExplodedNode *Pred) { 1412 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1413 1414 // If this block is terminated by a loop and it has already been visited the 1415 // maximum number of times, widen the loop. 1416 unsigned int BlockCount = nodeBuilder.getContext().blockCount(); 1417 if (BlockCount == AMgr.options.maxBlockVisitOnPath - 1 && 1418 AMgr.options.shouldWidenLoops()) { 1419 const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator(); 1420 if (!(Term && 1421 (isa<ForStmt>(Term) || isa<WhileStmt>(Term) || isa<DoStmt>(Term)))) 1422 return; 1423 // Widen. 1424 const LocationContext *LCtx = Pred->getLocationContext(); 1425 ProgramStateRef WidenedState = 1426 getWidenedLoopState(Pred->getState(), LCtx, BlockCount, Term); 1427 nodeBuilder.generateNode(WidenedState, Pred); 1428 return; 1429 } 1430 1431 // FIXME: Refactor this into a checker. 1432 if (BlockCount >= AMgr.options.maxBlockVisitOnPath) { 1433 static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded"); 1434 const ExplodedNode *Sink = 1435 nodeBuilder.generateSink(Pred->getState(), Pred, &tag); 1436 1437 // Check if we stopped at the top level function or not. 1438 // Root node should have the location context of the top most function. 1439 const LocationContext *CalleeLC = Pred->getLocation().getLocationContext(); 1440 const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1441 const LocationContext *RootLC = 1442 (*G.roots_begin())->getLocation().getLocationContext(); 1443 if (RootLC->getCurrentStackFrame() != CalleeSF) { 1444 Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl()); 1445 1446 // Re-run the call evaluation without inlining it, by storing the 1447 // no-inlining policy in the state and enqueuing the new work item on 1448 // the list. Replay should almost never fail. Use the stats to catch it 1449 // if it does. 1450 if ((!AMgr.options.NoRetryExhausted && 1451 replayWithoutInlining(Pred, CalleeLC))) 1452 return; 1453 NumMaxBlockCountReachedInInlined++; 1454 } else 1455 NumMaxBlockCountReached++; 1456 1457 // Make sink nodes as exhausted(for stats) only if retry failed. 1458 Engine.blocksExhausted.push_back(std::make_pair(L, Sink)); 1459 } 1460 } 1461 1462 //===----------------------------------------------------------------------===// 1463 // Branch processing. 1464 //===----------------------------------------------------------------------===// 1465 1466 /// RecoverCastedSymbol - A helper function for ProcessBranch that is used 1467 /// to try to recover some path-sensitivity for casts of symbolic 1468 /// integers that promote their values (which are currently not tracked well). 1469 /// This function returns the SVal bound to Condition->IgnoreCasts if all the 1470 // cast(s) did was sign-extend the original value. 1471 static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr, 1472 ProgramStateRef state, 1473 const Stmt *Condition, 1474 const LocationContext *LCtx, 1475 ASTContext &Ctx) { 1476 1477 const Expr *Ex = dyn_cast<Expr>(Condition); 1478 if (!Ex) 1479 return UnknownVal(); 1480 1481 uint64_t bits = 0; 1482 bool bitsInit = false; 1483 1484 while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) { 1485 QualType T = CE->getType(); 1486 1487 if (!T->isIntegralOrEnumerationType()) 1488 return UnknownVal(); 1489 1490 uint64_t newBits = Ctx.getTypeSize(T); 1491 if (!bitsInit || newBits < bits) { 1492 bitsInit = true; 1493 bits = newBits; 1494 } 1495 1496 Ex = CE->getSubExpr(); 1497 } 1498 1499 // We reached a non-cast. Is it a symbolic value? 1500 QualType T = Ex->getType(); 1501 1502 if (!bitsInit || !T->isIntegralOrEnumerationType() || 1503 Ctx.getTypeSize(T) > bits) 1504 return UnknownVal(); 1505 1506 return state->getSVal(Ex, LCtx); 1507 } 1508 1509 #ifndef NDEBUG 1510 static const Stmt *getRightmostLeaf(const Stmt *Condition) { 1511 while (Condition) { 1512 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1513 if (!BO || !BO->isLogicalOp()) { 1514 return Condition; 1515 } 1516 Condition = BO->getRHS()->IgnoreParens(); 1517 } 1518 return nullptr; 1519 } 1520 #endif 1521 1522 // Returns the condition the branch at the end of 'B' depends on and whose value 1523 // has been evaluated within 'B'. 1524 // In most cases, the terminator condition of 'B' will be evaluated fully in 1525 // the last statement of 'B'; in those cases, the resolved condition is the 1526 // given 'Condition'. 1527 // If the condition of the branch is a logical binary operator tree, the CFG is 1528 // optimized: in that case, we know that the expression formed by all but the 1529 // rightmost leaf of the logical binary operator tree must be true, and thus 1530 // the branch condition is at this point equivalent to the truth value of that 1531 // rightmost leaf; the CFG block thus only evaluates this rightmost leaf 1532 // expression in its final statement. As the full condition in that case was 1533 // not evaluated, and is thus not in the SVal cache, we need to use that leaf 1534 // expression to evaluate the truth value of the condition in the current state 1535 // space. 1536 static const Stmt *ResolveCondition(const Stmt *Condition, 1537 const CFGBlock *B) { 1538 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1539 Condition = Ex->IgnoreParens(); 1540 1541 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1542 if (!BO || !BO->isLogicalOp()) 1543 return Condition; 1544 1545 assert(!B->getTerminator().isTemporaryDtorsBranch() && 1546 "Temporary destructor branches handled by processBindTemporary."); 1547 1548 // For logical operations, we still have the case where some branches 1549 // use the traditional "merge" approach and others sink the branch 1550 // directly into the basic blocks representing the logical operation. 1551 // We need to distinguish between those two cases here. 1552 1553 // The invariants are still shifting, but it is possible that the 1554 // last element in a CFGBlock is not a CFGStmt. Look for the last 1555 // CFGStmt as the value of the condition. 1556 CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend(); 1557 for (; I != E; ++I) { 1558 CFGElement Elem = *I; 1559 Optional<CFGStmt> CS = Elem.getAs<CFGStmt>(); 1560 if (!CS) 1561 continue; 1562 const Stmt *LastStmt = CS->getStmt(); 1563 assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition)); 1564 return LastStmt; 1565 } 1566 llvm_unreachable("could not resolve condition"); 1567 } 1568 1569 void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term, 1570 NodeBuilderContext& BldCtx, 1571 ExplodedNode *Pred, 1572 ExplodedNodeSet &Dst, 1573 const CFGBlock *DstT, 1574 const CFGBlock *DstF) { 1575 assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) && 1576 "CXXBindTemporaryExprs are handled by processBindTemporary."); 1577 const LocationContext *LCtx = Pred->getLocationContext(); 1578 PrettyStackTraceLocationContext StackCrashInfo(LCtx); 1579 currBldrCtx = &BldCtx; 1580 1581 // Check for NULL conditions; e.g. "for(;;)" 1582 if (!Condition) { 1583 BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF); 1584 NullCondBldr.markInfeasible(false); 1585 NullCondBldr.generateNode(Pred->getState(), true, Pred); 1586 return; 1587 } 1588 1589 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1590 Condition = Ex->IgnoreParens(); 1591 1592 Condition = ResolveCondition(Condition, BldCtx.getBlock()); 1593 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 1594 Condition->getLocStart(), 1595 "Error evaluating branch"); 1596 1597 ExplodedNodeSet CheckersOutSet; 1598 getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet, 1599 Pred, *this); 1600 // We generated only sinks. 1601 if (CheckersOutSet.empty()) 1602 return; 1603 1604 BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF); 1605 for (NodeBuilder::iterator I = CheckersOutSet.begin(), 1606 E = CheckersOutSet.end(); E != I; ++I) { 1607 ExplodedNode *PredI = *I; 1608 1609 if (PredI->isSink()) 1610 continue; 1611 1612 ProgramStateRef PrevState = PredI->getState(); 1613 SVal X = PrevState->getSVal(Condition, PredI->getLocationContext()); 1614 1615 if (X.isUnknownOrUndef()) { 1616 // Give it a chance to recover from unknown. 1617 if (const Expr *Ex = dyn_cast<Expr>(Condition)) { 1618 if (Ex->getType()->isIntegralOrEnumerationType()) { 1619 // Try to recover some path-sensitivity. Right now casts of symbolic 1620 // integers that promote their values are currently not tracked well. 1621 // If 'Condition' is such an expression, try and recover the 1622 // underlying value and use that instead. 1623 SVal recovered = RecoverCastedSymbol(getStateManager(), 1624 PrevState, Condition, 1625 PredI->getLocationContext(), 1626 getContext()); 1627 1628 if (!recovered.isUnknown()) { 1629 X = recovered; 1630 } 1631 } 1632 } 1633 } 1634 1635 // If the condition is still unknown, give up. 1636 if (X.isUnknownOrUndef()) { 1637 builder.generateNode(PrevState, true, PredI); 1638 builder.generateNode(PrevState, false, PredI); 1639 continue; 1640 } 1641 1642 DefinedSVal V = X.castAs<DefinedSVal>(); 1643 1644 ProgramStateRef StTrue, StFalse; 1645 std::tie(StTrue, StFalse) = PrevState->assume(V); 1646 1647 // Process the true branch. 1648 if (builder.isFeasible(true)) { 1649 if (StTrue) 1650 builder.generateNode(StTrue, true, PredI); 1651 else 1652 builder.markInfeasible(true); 1653 } 1654 1655 // Process the false branch. 1656 if (builder.isFeasible(false)) { 1657 if (StFalse) 1658 builder.generateNode(StFalse, false, PredI); 1659 else 1660 builder.markInfeasible(false); 1661 } 1662 } 1663 currBldrCtx = nullptr; 1664 } 1665 1666 /// The GDM component containing the set of global variables which have been 1667 /// previously initialized with explicit initializers. 1668 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet, 1669 llvm::ImmutableSet<const VarDecl *>) 1670 1671 void ExprEngine::processStaticInitializer(const DeclStmt *DS, 1672 NodeBuilderContext &BuilderCtx, 1673 ExplodedNode *Pred, 1674 clang::ento::ExplodedNodeSet &Dst, 1675 const CFGBlock *DstT, 1676 const CFGBlock *DstF) { 1677 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1678 currBldrCtx = &BuilderCtx; 1679 1680 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl()); 1681 ProgramStateRef state = Pred->getState(); 1682 bool initHasRun = state->contains<InitializedGlobalsSet>(VD); 1683 BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF); 1684 1685 if (!initHasRun) { 1686 state = state->add<InitializedGlobalsSet>(VD); 1687 } 1688 1689 builder.generateNode(state, initHasRun, Pred); 1690 builder.markInfeasible(!initHasRun); 1691 1692 currBldrCtx = nullptr; 1693 } 1694 1695 /// processIndirectGoto - Called by CoreEngine. Used to generate successor 1696 /// nodes by processing the 'effects' of a computed goto jump. 1697 void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) { 1698 1699 ProgramStateRef state = builder.getState(); 1700 SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext()); 1701 1702 // Three possibilities: 1703 // 1704 // (1) We know the computed label. 1705 // (2) The label is NULL (or some other constant), or Undefined. 1706 // (3) We have no clue about the label. Dispatch to all targets. 1707 // 1708 1709 typedef IndirectGotoNodeBuilder::iterator iterator; 1710 1711 if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) { 1712 const LabelDecl *L = LV->getLabel(); 1713 1714 for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) { 1715 if (I.getLabel() == L) { 1716 builder.generateNode(I, state); 1717 return; 1718 } 1719 } 1720 1721 llvm_unreachable("No block with label."); 1722 } 1723 1724 if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) { 1725 // Dispatch to the first target and mark it as a sink. 1726 //ExplodedNode* N = builder.generateNode(builder.begin(), state, true); 1727 // FIXME: add checker visit. 1728 // UndefBranches.insert(N); 1729 return; 1730 } 1731 1732 // This is really a catch-all. We don't support symbolics yet. 1733 // FIXME: Implement dispatch for symbolic pointers. 1734 1735 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I) 1736 builder.generateNode(I, state); 1737 } 1738 1739 #if 0 1740 static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) { 1741 const StackFrameContext* Frame = Pred.getStackFrame(); 1742 const llvm::ImmutableSet<CXXBindTemporaryContext> &Set = 1743 Pred.getState()->get<InitializedTemporariesSet>(); 1744 return std::find_if(Set.begin(), Set.end(), 1745 [&](const CXXBindTemporaryContext &Ctx) { 1746 if (Ctx.second == Frame) { 1747 Ctx.first->dump(); 1748 llvm::errs() << "\n"; 1749 } 1750 return Ctx.second == Frame; 1751 }) == Set.end(); 1752 } 1753 #endif 1754 1755 void ExprEngine::processBeginOfFunction(NodeBuilderContext &BC, 1756 ExplodedNode *Pred, 1757 ExplodedNodeSet &Dst, 1758 const BlockEdge &L) { 1759 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC); 1760 getCheckerManager().runCheckersForBeginFunction(Dst, L, Pred, *this); 1761 } 1762 1763 /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path 1764 /// nodes when the control reaches the end of a function. 1765 void ExprEngine::processEndOfFunction(NodeBuilderContext& BC, 1766 ExplodedNode *Pred, 1767 const ReturnStmt *RS) { 1768 // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)). 1769 // We currently cannot enable this assert, as lifetime extended temporaries 1770 // are not modelled correctly. 1771 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1772 StateMgr.EndPath(Pred->getState()); 1773 1774 ExplodedNodeSet Dst; 1775 if (Pred->getLocationContext()->inTopFrame()) { 1776 // Remove dead symbols. 1777 ExplodedNodeSet AfterRemovedDead; 1778 removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead); 1779 1780 // Notify checkers. 1781 for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(), 1782 E = AfterRemovedDead.end(); I != E; ++I) { 1783 getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this); 1784 } 1785 } else { 1786 getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this); 1787 } 1788 1789 Engine.enqueueEndOfFunction(Dst, RS); 1790 } 1791 1792 /// ProcessSwitch - Called by CoreEngine. Used to generate successor 1793 /// nodes by processing the 'effects' of a switch statement. 1794 void ExprEngine::processSwitch(SwitchNodeBuilder& builder) { 1795 typedef SwitchNodeBuilder::iterator iterator; 1796 ProgramStateRef state = builder.getState(); 1797 const Expr *CondE = builder.getCondition(); 1798 SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext()); 1799 1800 if (CondV_untested.isUndef()) { 1801 //ExplodedNode* N = builder.generateDefaultCaseNode(state, true); 1802 // FIXME: add checker 1803 //UndefBranches.insert(N); 1804 1805 return; 1806 } 1807 DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>(); 1808 1809 ProgramStateRef DefaultSt = state; 1810 1811 iterator I = builder.begin(), EI = builder.end(); 1812 bool defaultIsFeasible = I == EI; 1813 1814 for ( ; I != EI; ++I) { 1815 // Successor may be pruned out during CFG construction. 1816 if (!I.getBlock()) 1817 continue; 1818 1819 const CaseStmt *Case = I.getCase(); 1820 1821 // Evaluate the LHS of the case value. 1822 llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext()); 1823 assert(V1.getBitWidth() == getContext().getTypeSize(CondE->getType())); 1824 1825 // Get the RHS of the case, if it exists. 1826 llvm::APSInt V2; 1827 if (const Expr *E = Case->getRHS()) 1828 V2 = E->EvaluateKnownConstInt(getContext()); 1829 else 1830 V2 = V1; 1831 1832 ProgramStateRef StateCase; 1833 if (Optional<NonLoc> NL = CondV.getAs<NonLoc>()) 1834 std::tie(StateCase, DefaultSt) = 1835 DefaultSt->assumeInclusiveRange(*NL, V1, V2); 1836 else // UnknownVal 1837 StateCase = DefaultSt; 1838 1839 if (StateCase) 1840 builder.generateCaseStmtNode(I, StateCase); 1841 1842 // Now "assume" that the case doesn't match. Add this state 1843 // to the default state (if it is feasible). 1844 if (DefaultSt) 1845 defaultIsFeasible = true; 1846 else { 1847 defaultIsFeasible = false; 1848 break; 1849 } 1850 } 1851 1852 if (!defaultIsFeasible) 1853 return; 1854 1855 // If we have switch(enum value), the default branch is not 1856 // feasible if all of the enum constants not covered by 'case:' statements 1857 // are not feasible values for the switch condition. 1858 // 1859 // Note that this isn't as accurate as it could be. Even if there isn't 1860 // a case for a particular enum value as long as that enum value isn't 1861 // feasible then it shouldn't be considered for making 'default:' reachable. 1862 const SwitchStmt *SS = builder.getSwitch(); 1863 const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts(); 1864 if (CondExpr->getType()->getAs<EnumType>()) { 1865 if (SS->isAllEnumCasesCovered()) 1866 return; 1867 } 1868 1869 builder.generateDefaultCaseNode(DefaultSt); 1870 } 1871 1872 //===----------------------------------------------------------------------===// 1873 // Transfer functions: Loads and stores. 1874 //===----------------------------------------------------------------------===// 1875 1876 void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D, 1877 ExplodedNode *Pred, 1878 ExplodedNodeSet &Dst) { 1879 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1880 1881 ProgramStateRef state = Pred->getState(); 1882 const LocationContext *LCtx = Pred->getLocationContext(); 1883 1884 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) { 1885 // C permits "extern void v", and if you cast the address to a valid type, 1886 // you can even do things with it. We simply pretend 1887 assert(Ex->isGLValue() || VD->getType()->isVoidType()); 1888 const LocationContext *LocCtxt = Pred->getLocationContext(); 1889 const Decl *D = LocCtxt->getDecl(); 1890 const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr; 1891 const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex); 1892 SVal V; 1893 bool IsReference; 1894 if (AMgr.options.shouldInlineLambdas() && DeclRefEx && 1895 DeclRefEx->refersToEnclosingVariableOrCapture() && MD && 1896 MD->getParent()->isLambda()) { 1897 // Lookup the field of the lambda. 1898 const CXXRecordDecl *CXXRec = MD->getParent(); 1899 llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields; 1900 FieldDecl *LambdaThisCaptureField; 1901 CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField); 1902 const FieldDecl *FD = LambdaCaptureFields[VD]; 1903 if (!FD) { 1904 // When a constant is captured, sometimes no corresponding field is 1905 // created in the lambda object. 1906 assert(VD->getType().isConstQualified()); 1907 V = state->getLValue(VD, LocCtxt); 1908 IsReference = false; 1909 } else { 1910 Loc CXXThis = 1911 svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame()); 1912 SVal CXXThisVal = state->getSVal(CXXThis); 1913 V = state->getLValue(FD, CXXThisVal); 1914 IsReference = FD->getType()->isReferenceType(); 1915 } 1916 } else { 1917 V = state->getLValue(VD, LocCtxt); 1918 IsReference = VD->getType()->isReferenceType(); 1919 } 1920 1921 // For references, the 'lvalue' is the pointer address stored in the 1922 // reference region. 1923 if (IsReference) { 1924 if (const MemRegion *R = V.getAsRegion()) 1925 V = state->getSVal(R); 1926 else 1927 V = UnknownVal(); 1928 } 1929 1930 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1931 ProgramPoint::PostLValueKind); 1932 return; 1933 } 1934 if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) { 1935 assert(!Ex->isGLValue()); 1936 SVal V = svalBuilder.makeIntVal(ED->getInitVal()); 1937 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V)); 1938 return; 1939 } 1940 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 1941 SVal V = svalBuilder.getFunctionPointer(FD); 1942 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1943 ProgramPoint::PostLValueKind); 1944 return; 1945 } 1946 if (isa<FieldDecl>(D)) { 1947 // FIXME: Compute lvalue of field pointers-to-member. 1948 // Right now we just use a non-null void pointer, so that it gives proper 1949 // results in boolean contexts. 1950 SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy, 1951 currBldrCtx->blockCount()); 1952 state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true); 1953 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1954 ProgramPoint::PostLValueKind); 1955 return; 1956 } 1957 1958 llvm_unreachable("Support for this Decl not implemented."); 1959 } 1960 1961 /// VisitArraySubscriptExpr - Transfer function for array accesses 1962 void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A, 1963 ExplodedNode *Pred, 1964 ExplodedNodeSet &Dst){ 1965 1966 const Expr *Base = A->getBase()->IgnoreParens(); 1967 const Expr *Idx = A->getIdx()->IgnoreParens(); 1968 1969 ExplodedNodeSet CheckerPreStmt; 1970 getCheckerManager().runCheckersForPreStmt(CheckerPreStmt, Pred, A, *this); 1971 1972 ExplodedNodeSet EvalSet; 1973 StmtNodeBuilder Bldr(CheckerPreStmt, EvalSet, *currBldrCtx); 1974 assert(A->isGLValue() || 1975 (!AMgr.getLangOpts().CPlusPlus && 1976 A->getType().isCForbiddenLValueType())); 1977 1978 for (auto *Node : CheckerPreStmt) { 1979 const LocationContext *LCtx = Node->getLocationContext(); 1980 ProgramStateRef state = Node->getState(); 1981 SVal V = state->getLValue(A->getType(), 1982 state->getSVal(Idx, LCtx), 1983 state->getSVal(Base, LCtx)); 1984 Bldr.generateNode(A, Node, state->BindExpr(A, LCtx, V), nullptr, 1985 ProgramPoint::PostLValueKind); 1986 } 1987 1988 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, A, *this); 1989 } 1990 1991 /// VisitMemberExpr - Transfer function for member expressions. 1992 void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred, 1993 ExplodedNodeSet &Dst) { 1994 1995 // FIXME: Prechecks eventually go in ::Visit(). 1996 ExplodedNodeSet CheckedSet; 1997 getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this); 1998 1999 ExplodedNodeSet EvalSet; 2000 ValueDecl *Member = M->getMemberDecl(); 2001 2002 // Handle static member variables and enum constants accessed via 2003 // member syntax. 2004 if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) { 2005 ExplodedNodeSet Dst; 2006 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2007 I != E; ++I) { 2008 VisitCommonDeclRefExpr(M, Member, Pred, EvalSet); 2009 } 2010 } else { 2011 StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); 2012 ExplodedNodeSet Tmp; 2013 2014 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2015 I != E; ++I) { 2016 ProgramStateRef state = (*I)->getState(); 2017 const LocationContext *LCtx = (*I)->getLocationContext(); 2018 Expr *BaseExpr = M->getBase(); 2019 2020 // Handle C++ method calls. 2021 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) { 2022 if (MD->isInstance()) 2023 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2024 2025 SVal MDVal = svalBuilder.getFunctionPointer(MD); 2026 state = state->BindExpr(M, LCtx, MDVal); 2027 2028 Bldr.generateNode(M, *I, state); 2029 continue; 2030 } 2031 2032 // Handle regular struct fields / member variables. 2033 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2034 SVal baseExprVal = state->getSVal(BaseExpr, LCtx); 2035 2036 FieldDecl *field = cast<FieldDecl>(Member); 2037 SVal L = state->getLValue(field, baseExprVal); 2038 2039 if (M->isGLValue() || M->getType()->isArrayType()) { 2040 // We special-case rvalues of array type because the analyzer cannot 2041 // reason about them, since we expect all regions to be wrapped in Locs. 2042 // We instead treat these as lvalues and assume that they will decay to 2043 // pointers as soon as they are used. 2044 if (!M->isGLValue()) { 2045 assert(M->getType()->isArrayType()); 2046 const ImplicitCastExpr *PE = 2047 dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParentIgnoreParens(M)); 2048 if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) { 2049 llvm_unreachable("should always be wrapped in ArrayToPointerDecay"); 2050 } 2051 } 2052 2053 if (field->getType()->isReferenceType()) { 2054 if (const MemRegion *R = L.getAsRegion()) 2055 L = state->getSVal(R); 2056 else 2057 L = UnknownVal(); 2058 } 2059 2060 Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr, 2061 ProgramPoint::PostLValueKind); 2062 } else { 2063 Bldr.takeNodes(*I); 2064 evalLoad(Tmp, M, M, *I, state, L); 2065 Bldr.addNodes(Tmp); 2066 } 2067 } 2068 } 2069 2070 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this); 2071 } 2072 2073 void ExprEngine::VisitAtomicExpr(const AtomicExpr *AE, ExplodedNode *Pred, 2074 ExplodedNodeSet &Dst) { 2075 ExplodedNodeSet AfterPreSet; 2076 getCheckerManager().runCheckersForPreStmt(AfterPreSet, Pred, AE, *this); 2077 2078 // For now, treat all the arguments to C11 atomics as escaping. 2079 // FIXME: Ideally we should model the behavior of the atomics precisely here. 2080 2081 ExplodedNodeSet AfterInvalidateSet; 2082 StmtNodeBuilder Bldr(AfterPreSet, AfterInvalidateSet, *currBldrCtx); 2083 2084 for (ExplodedNodeSet::iterator I = AfterPreSet.begin(), E = AfterPreSet.end(); 2085 I != E; ++I) { 2086 ProgramStateRef State = (*I)->getState(); 2087 const LocationContext *LCtx = (*I)->getLocationContext(); 2088 2089 SmallVector<SVal, 8> ValuesToInvalidate; 2090 for (unsigned SI = 0, Count = AE->getNumSubExprs(); SI != Count; SI++) { 2091 const Expr *SubExpr = AE->getSubExprs()[SI]; 2092 SVal SubExprVal = State->getSVal(SubExpr, LCtx); 2093 ValuesToInvalidate.push_back(SubExprVal); 2094 } 2095 2096 State = State->invalidateRegions(ValuesToInvalidate, AE, 2097 currBldrCtx->blockCount(), 2098 LCtx, 2099 /*CausedByPointerEscape*/true, 2100 /*Symbols=*/nullptr); 2101 2102 SVal ResultVal = UnknownVal(); 2103 State = State->BindExpr(AE, LCtx, ResultVal); 2104 Bldr.generateNode(AE, *I, State, nullptr, 2105 ProgramPoint::PostStmtKind); 2106 } 2107 2108 getCheckerManager().runCheckersForPostStmt(Dst, AfterInvalidateSet, AE, *this); 2109 } 2110 2111 namespace { 2112 class CollectReachableSymbolsCallback final : public SymbolVisitor { 2113 InvalidatedSymbols Symbols; 2114 2115 public: 2116 CollectReachableSymbolsCallback(ProgramStateRef State) {} 2117 const InvalidatedSymbols &getSymbols() const { return Symbols; } 2118 2119 bool VisitSymbol(SymbolRef Sym) override { 2120 Symbols.insert(Sym); 2121 return true; 2122 } 2123 }; 2124 } // end anonymous namespace 2125 2126 // A value escapes in three possible cases: 2127 // (1) We are binding to something that is not a memory region. 2128 // (2) We are binding to a MemrRegion that does not have stack storage. 2129 // (3) We are binding to a MemRegion with stack storage that the store 2130 // does not understand. 2131 ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State, 2132 SVal Loc, SVal Val) { 2133 // Are we storing to something that causes the value to "escape"? 2134 bool escapes = true; 2135 2136 // TODO: Move to StoreManager. 2137 if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) { 2138 escapes = !regionLoc->getRegion()->hasStackStorage(); 2139 2140 if (!escapes) { 2141 // To test (3), generate a new state with the binding added. If it is 2142 // the same state, then it escapes (since the store cannot represent 2143 // the binding). 2144 // Do this only if we know that the store is not supposed to generate the 2145 // same state. 2146 SVal StoredVal = State->getSVal(regionLoc->getRegion()); 2147 if (StoredVal != Val) 2148 escapes = (State == (State->bindLoc(*regionLoc, Val))); 2149 } 2150 } 2151 2152 // If our store can represent the binding and we aren't storing to something 2153 // that doesn't have local storage then just return and have the simulation 2154 // state continue as is. 2155 if (!escapes) 2156 return State; 2157 2158 // Otherwise, find all symbols referenced by 'val' that we are tracking 2159 // and stop tracking them. 2160 CollectReachableSymbolsCallback Scanner = 2161 State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val); 2162 const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols(); 2163 State = getCheckerManager().runCheckersForPointerEscape(State, 2164 EscapedSymbols, 2165 /*CallEvent*/ nullptr, 2166 PSK_EscapeOnBind, 2167 nullptr); 2168 2169 return State; 2170 } 2171 2172 ProgramStateRef 2173 ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State, 2174 const InvalidatedSymbols *Invalidated, 2175 ArrayRef<const MemRegion *> ExplicitRegions, 2176 ArrayRef<const MemRegion *> Regions, 2177 const CallEvent *Call, 2178 RegionAndSymbolInvalidationTraits &ITraits) { 2179 2180 if (!Invalidated || Invalidated->empty()) 2181 return State; 2182 2183 if (!Call) 2184 return getCheckerManager().runCheckersForPointerEscape(State, 2185 *Invalidated, 2186 nullptr, 2187 PSK_EscapeOther, 2188 &ITraits); 2189 2190 // If the symbols were invalidated by a call, we want to find out which ones 2191 // were invalidated directly due to being arguments to the call. 2192 InvalidatedSymbols SymbolsDirectlyInvalidated; 2193 for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(), 2194 E = ExplicitRegions.end(); I != E; ++I) { 2195 if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>()) 2196 SymbolsDirectlyInvalidated.insert(R->getSymbol()); 2197 } 2198 2199 InvalidatedSymbols SymbolsIndirectlyInvalidated; 2200 for (InvalidatedSymbols::const_iterator I=Invalidated->begin(), 2201 E = Invalidated->end(); I!=E; ++I) { 2202 SymbolRef sym = *I; 2203 if (SymbolsDirectlyInvalidated.count(sym)) 2204 continue; 2205 SymbolsIndirectlyInvalidated.insert(sym); 2206 } 2207 2208 if (!SymbolsDirectlyInvalidated.empty()) 2209 State = getCheckerManager().runCheckersForPointerEscape(State, 2210 SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits); 2211 2212 // Notify about the symbols that get indirectly invalidated by the call. 2213 if (!SymbolsIndirectlyInvalidated.empty()) 2214 State = getCheckerManager().runCheckersForPointerEscape(State, 2215 SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits); 2216 2217 return State; 2218 } 2219 2220 /// evalBind - Handle the semantics of binding a value to a specific location. 2221 /// This method is used by evalStore and (soon) VisitDeclStmt, and others. 2222 void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE, 2223 ExplodedNode *Pred, 2224 SVal location, SVal Val, 2225 bool atDeclInit, const ProgramPoint *PP) { 2226 2227 const LocationContext *LC = Pred->getLocationContext(); 2228 PostStmt PS(StoreE, LC); 2229 if (!PP) 2230 PP = &PS; 2231 2232 // Do a previsit of the bind. 2233 ExplodedNodeSet CheckedSet; 2234 getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val, 2235 StoreE, *this, *PP); 2236 2237 StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx); 2238 2239 // If the location is not a 'Loc', it will already be handled by 2240 // the checkers. There is nothing left to do. 2241 if (!location.getAs<Loc>()) { 2242 const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr, 2243 /*tag*/nullptr); 2244 ProgramStateRef state = Pred->getState(); 2245 state = processPointerEscapedOnBind(state, location, Val); 2246 Bldr.generateNode(L, state, Pred); 2247 return; 2248 } 2249 2250 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2251 I!=E; ++I) { 2252 ExplodedNode *PredI = *I; 2253 ProgramStateRef state = PredI->getState(); 2254 2255 state = processPointerEscapedOnBind(state, location, Val); 2256 2257 // When binding the value, pass on the hint that this is a initialization. 2258 // For initializations, we do not need to inform clients of region 2259 // changes. 2260 state = state->bindLoc(location.castAs<Loc>(), 2261 Val, /* notifyChanges = */ !atDeclInit); 2262 2263 const MemRegion *LocReg = nullptr; 2264 if (Optional<loc::MemRegionVal> LocRegVal = 2265 location.getAs<loc::MemRegionVal>()) { 2266 LocReg = LocRegVal->getRegion(); 2267 } 2268 2269 const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr); 2270 Bldr.generateNode(L, state, PredI); 2271 } 2272 } 2273 2274 /// evalStore - Handle the semantics of a store via an assignment. 2275 /// @param Dst The node set to store generated state nodes 2276 /// @param AssignE The assignment expression if the store happens in an 2277 /// assignment. 2278 /// @param LocationE The location expression that is stored to. 2279 /// @param state The current simulation state 2280 /// @param location The location to store the value 2281 /// @param Val The value to be stored 2282 void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE, 2283 const Expr *LocationE, 2284 ExplodedNode *Pred, 2285 ProgramStateRef state, SVal location, SVal Val, 2286 const ProgramPointTag *tag) { 2287 // Proceed with the store. We use AssignE as the anchor for the PostStore 2288 // ProgramPoint if it is non-NULL, and LocationE otherwise. 2289 const Expr *StoreE = AssignE ? AssignE : LocationE; 2290 2291 // Evaluate the location (checks for bad dereferences). 2292 ExplodedNodeSet Tmp; 2293 evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false); 2294 2295 if (Tmp.empty()) 2296 return; 2297 2298 if (location.isUndef()) 2299 return; 2300 2301 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) 2302 evalBind(Dst, StoreE, *NI, location, Val, false); 2303 } 2304 2305 void ExprEngine::evalLoad(ExplodedNodeSet &Dst, 2306 const Expr *NodeEx, 2307 const Expr *BoundEx, 2308 ExplodedNode *Pred, 2309 ProgramStateRef state, 2310 SVal location, 2311 const ProgramPointTag *tag, 2312 QualType LoadTy) 2313 { 2314 assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc."); 2315 2316 // Are we loading from a region? This actually results in two loads; one 2317 // to fetch the address of the referenced value and one to fetch the 2318 // referenced value. 2319 if (const TypedValueRegion *TR = 2320 dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) { 2321 2322 QualType ValTy = TR->getValueType(); 2323 if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) { 2324 static SimpleProgramPointTag 2325 loadReferenceTag(TagProviderName, "Load Reference"); 2326 ExplodedNodeSet Tmp; 2327 evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state, 2328 location, &loadReferenceTag, 2329 getContext().getPointerType(RT->getPointeeType())); 2330 2331 // Perform the load from the referenced value. 2332 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) { 2333 state = (*I)->getState(); 2334 location = state->getSVal(BoundEx, (*I)->getLocationContext()); 2335 evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy); 2336 } 2337 return; 2338 } 2339 } 2340 2341 evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy); 2342 } 2343 2344 void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst, 2345 const Expr *NodeEx, 2346 const Expr *BoundEx, 2347 ExplodedNode *Pred, 2348 ProgramStateRef state, 2349 SVal location, 2350 const ProgramPointTag *tag, 2351 QualType LoadTy) { 2352 assert(NodeEx); 2353 assert(BoundEx); 2354 // Evaluate the location (checks for bad dereferences). 2355 ExplodedNodeSet Tmp; 2356 evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true); 2357 if (Tmp.empty()) 2358 return; 2359 2360 StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 2361 if (location.isUndef()) 2362 return; 2363 2364 // Proceed with the load. 2365 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) { 2366 state = (*NI)->getState(); 2367 const LocationContext *LCtx = (*NI)->getLocationContext(); 2368 2369 SVal V = UnknownVal(); 2370 if (location.isValid()) { 2371 if (LoadTy.isNull()) 2372 LoadTy = BoundEx->getType(); 2373 V = state->getSVal(location.castAs<Loc>(), LoadTy); 2374 } 2375 2376 Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag, 2377 ProgramPoint::PostLoadKind); 2378 } 2379 } 2380 2381 void ExprEngine::evalLocation(ExplodedNodeSet &Dst, 2382 const Stmt *NodeEx, 2383 const Stmt *BoundEx, 2384 ExplodedNode *Pred, 2385 ProgramStateRef state, 2386 SVal location, 2387 const ProgramPointTag *tag, 2388 bool isLoad) { 2389 StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx); 2390 // Early checks for performance reason. 2391 if (location.isUnknown()) { 2392 return; 2393 } 2394 2395 ExplodedNodeSet Src; 2396 BldrTop.takeNodes(Pred); 2397 StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx); 2398 if (Pred->getState() != state) { 2399 // Associate this new state with an ExplodedNode. 2400 // FIXME: If I pass null tag, the graph is incorrect, e.g for 2401 // int *p; 2402 // p = 0; 2403 // *p = 0xDEADBEEF; 2404 // "p = 0" is not noted as "Null pointer value stored to 'p'" but 2405 // instead "int *p" is noted as 2406 // "Variable 'p' initialized to a null pointer value" 2407 2408 static SimpleProgramPointTag tag(TagProviderName, "Location"); 2409 Bldr.generateNode(NodeEx, Pred, state, &tag); 2410 } 2411 ExplodedNodeSet Tmp; 2412 getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad, 2413 NodeEx, BoundEx, *this); 2414 BldrTop.addNodes(Tmp); 2415 } 2416 2417 std::pair<const ProgramPointTag *, const ProgramPointTag*> 2418 ExprEngine::geteagerlyAssumeBinOpBifurcationTags() { 2419 static SimpleProgramPointTag 2420 eagerlyAssumeBinOpBifurcationTrue(TagProviderName, 2421 "Eagerly Assume True"), 2422 eagerlyAssumeBinOpBifurcationFalse(TagProviderName, 2423 "Eagerly Assume False"); 2424 return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue, 2425 &eagerlyAssumeBinOpBifurcationFalse); 2426 } 2427 2428 void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst, 2429 ExplodedNodeSet &Src, 2430 const Expr *Ex) { 2431 StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx); 2432 2433 for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) { 2434 ExplodedNode *Pred = *I; 2435 // Test if the previous node was as the same expression. This can happen 2436 // when the expression fails to evaluate to anything meaningful and 2437 // (as an optimization) we don't generate a node. 2438 ProgramPoint P = Pred->getLocation(); 2439 if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) { 2440 continue; 2441 } 2442 2443 ProgramStateRef state = Pred->getState(); 2444 SVal V = state->getSVal(Ex, Pred->getLocationContext()); 2445 Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>(); 2446 if (SEV && SEV->isExpression()) { 2447 const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags = 2448 geteagerlyAssumeBinOpBifurcationTags(); 2449 2450 ProgramStateRef StateTrue, StateFalse; 2451 std::tie(StateTrue, StateFalse) = state->assume(*SEV); 2452 2453 // First assume that the condition is true. 2454 if (StateTrue) { 2455 SVal Val = svalBuilder.makeIntVal(1U, Ex->getType()); 2456 StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val); 2457 Bldr.generateNode(Ex, Pred, StateTrue, tags.first); 2458 } 2459 2460 // Next, assume that the condition is false. 2461 if (StateFalse) { 2462 SVal Val = svalBuilder.makeIntVal(0U, Ex->getType()); 2463 StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val); 2464 Bldr.generateNode(Ex, Pred, StateFalse, tags.second); 2465 } 2466 } 2467 } 2468 } 2469 2470 void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred, 2471 ExplodedNodeSet &Dst) { 2472 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2473 // We have processed both the inputs and the outputs. All of the outputs 2474 // should evaluate to Locs. Nuke all of their values. 2475 2476 // FIXME: Some day in the future it would be nice to allow a "plug-in" 2477 // which interprets the inline asm and stores proper results in the 2478 // outputs. 2479 2480 ProgramStateRef state = Pred->getState(); 2481 2482 for (const Expr *O : A->outputs()) { 2483 SVal X = state->getSVal(O, Pred->getLocationContext()); 2484 assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef. 2485 2486 if (Optional<Loc> LV = X.getAs<Loc>()) 2487 state = state->bindLoc(*LV, UnknownVal()); 2488 } 2489 2490 Bldr.generateNode(A, Pred, state); 2491 } 2492 2493 void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred, 2494 ExplodedNodeSet &Dst) { 2495 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2496 Bldr.generateNode(A, Pred, Pred->getState()); 2497 } 2498 2499 //===----------------------------------------------------------------------===// 2500 // Visualization. 2501 //===----------------------------------------------------------------------===// 2502 2503 #ifndef NDEBUG 2504 static ExprEngine* GraphPrintCheckerState; 2505 static SourceManager* GraphPrintSourceManager; 2506 2507 namespace llvm { 2508 template<> 2509 struct DOTGraphTraits<ExplodedNode*> : 2510 public DefaultDOTGraphTraits { 2511 2512 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {} 2513 2514 // FIXME: Since we do not cache error nodes in ExprEngine now, this does not 2515 // work. 2516 static std::string getNodeAttributes(const ExplodedNode *N, void*) { 2517 return ""; 2518 } 2519 2520 // De-duplicate some source location pretty-printing. 2521 static void printLocation(raw_ostream &Out, SourceLocation SLoc) { 2522 if (SLoc.isFileID()) { 2523 Out << "\\lline=" 2524 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2525 << " col=" 2526 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc) 2527 << "\\l"; 2528 } 2529 } 2530 static void printLocation2(raw_ostream &Out, SourceLocation SLoc) { 2531 if (SLoc.isFileID() && GraphPrintSourceManager->isInMainFile(SLoc)) 2532 Out << "line " << GraphPrintSourceManager->getExpansionLineNumber(SLoc); 2533 else 2534 SLoc.print(Out, *GraphPrintSourceManager); 2535 } 2536 2537 static std::string getNodeLabel(const ExplodedNode *N, void*){ 2538 2539 std::string sbuf; 2540 llvm::raw_string_ostream Out(sbuf); 2541 2542 // Program Location. 2543 ProgramPoint Loc = N->getLocation(); 2544 2545 switch (Loc.getKind()) { 2546 case ProgramPoint::BlockEntranceKind: { 2547 Out << "Block Entrance: B" 2548 << Loc.castAs<BlockEntrance>().getBlock()->getBlockID(); 2549 break; 2550 } 2551 2552 case ProgramPoint::BlockExitKind: 2553 assert (false); 2554 break; 2555 2556 case ProgramPoint::CallEnterKind: 2557 Out << "CallEnter"; 2558 break; 2559 2560 case ProgramPoint::CallExitBeginKind: 2561 Out << "CallExitBegin"; 2562 break; 2563 2564 case ProgramPoint::CallExitEndKind: 2565 Out << "CallExitEnd"; 2566 break; 2567 2568 case ProgramPoint::PostStmtPurgeDeadSymbolsKind: 2569 Out << "PostStmtPurgeDeadSymbols"; 2570 break; 2571 2572 case ProgramPoint::PreStmtPurgeDeadSymbolsKind: 2573 Out << "PreStmtPurgeDeadSymbols"; 2574 break; 2575 2576 case ProgramPoint::EpsilonKind: 2577 Out << "Epsilon Point"; 2578 break; 2579 2580 case ProgramPoint::PreImplicitCallKind: { 2581 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2582 Out << "PreCall: "; 2583 2584 // FIXME: Get proper printing options. 2585 PC.getDecl()->print(Out, LangOptions()); 2586 printLocation(Out, PC.getLocation()); 2587 break; 2588 } 2589 2590 case ProgramPoint::PostImplicitCallKind: { 2591 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2592 Out << "PostCall: "; 2593 2594 // FIXME: Get proper printing options. 2595 PC.getDecl()->print(Out, LangOptions()); 2596 printLocation(Out, PC.getLocation()); 2597 break; 2598 } 2599 2600 case ProgramPoint::PostInitializerKind: { 2601 Out << "PostInitializer: "; 2602 const CXXCtorInitializer *Init = 2603 Loc.castAs<PostInitializer>().getInitializer(); 2604 if (const FieldDecl *FD = Init->getAnyMember()) 2605 Out << *FD; 2606 else { 2607 QualType Ty = Init->getTypeSourceInfo()->getType(); 2608 Ty = Ty.getLocalUnqualifiedType(); 2609 LangOptions LO; // FIXME. 2610 Ty.print(Out, LO); 2611 } 2612 break; 2613 } 2614 2615 case ProgramPoint::BlockEdgeKind: { 2616 const BlockEdge &E = Loc.castAs<BlockEdge>(); 2617 Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B" 2618 << E.getDst()->getBlockID() << ')'; 2619 2620 if (const Stmt *T = E.getSrc()->getTerminator()) { 2621 SourceLocation SLoc = T->getLocStart(); 2622 2623 Out << "\\|Terminator: "; 2624 LangOptions LO; // FIXME. 2625 E.getSrc()->printTerminator(Out, LO); 2626 2627 if (SLoc.isFileID()) { 2628 Out << "\\lline=" 2629 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2630 << " col=" 2631 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc); 2632 } 2633 2634 if (isa<SwitchStmt>(T)) { 2635 const Stmt *Label = E.getDst()->getLabel(); 2636 2637 if (Label) { 2638 if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) { 2639 Out << "\\lcase "; 2640 LangOptions LO; // FIXME. 2641 if (C->getLHS()) 2642 C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO)); 2643 2644 if (const Stmt *RHS = C->getRHS()) { 2645 Out << " .. "; 2646 RHS->printPretty(Out, nullptr, PrintingPolicy(LO)); 2647 } 2648 2649 Out << ":"; 2650 } 2651 else { 2652 assert (isa<DefaultStmt>(Label)); 2653 Out << "\\ldefault:"; 2654 } 2655 } 2656 else 2657 Out << "\\l(implicit) default:"; 2658 } 2659 else if (isa<IndirectGotoStmt>(T)) { 2660 // FIXME 2661 } 2662 else { 2663 Out << "\\lCondition: "; 2664 if (*E.getSrc()->succ_begin() == E.getDst()) 2665 Out << "true"; 2666 else 2667 Out << "false"; 2668 } 2669 2670 Out << "\\l"; 2671 } 2672 2673 break; 2674 } 2675 2676 default: { 2677 const Stmt *S = Loc.castAs<StmtPoint>().getStmt(); 2678 assert(S != nullptr && "Expecting non-null Stmt"); 2679 2680 Out << S->getStmtClassName() << ' ' << (const void*) S << ' '; 2681 LangOptions LO; // FIXME. 2682 S->printPretty(Out, nullptr, PrintingPolicy(LO)); 2683 printLocation(Out, S->getLocStart()); 2684 2685 if (Loc.getAs<PreStmt>()) 2686 Out << "\\lPreStmt\\l;"; 2687 else if (Loc.getAs<PostLoad>()) 2688 Out << "\\lPostLoad\\l;"; 2689 else if (Loc.getAs<PostStore>()) 2690 Out << "\\lPostStore\\l"; 2691 else if (Loc.getAs<PostLValue>()) 2692 Out << "\\lPostLValue\\l"; 2693 2694 break; 2695 } 2696 } 2697 2698 ProgramStateRef state = N->getState(); 2699 Out << "\\|StateID: " << (const void*) state.get() 2700 << " NodeID: " << (const void*) N << "\\|"; 2701 2702 // Analysis stack backtrace. 2703 Out << "Location context stack (from current to outer):\\l"; 2704 const LocationContext *LC = Loc.getLocationContext(); 2705 unsigned Idx = 0; 2706 for (; LC; LC = LC->getParent(), ++Idx) { 2707 Out << Idx << ". (" << (const void *)LC << ") "; 2708 switch (LC->getKind()) { 2709 case LocationContext::StackFrame: 2710 if (const NamedDecl *D = dyn_cast<NamedDecl>(LC->getDecl())) 2711 Out << "Calling " << D->getQualifiedNameAsString(); 2712 else 2713 Out << "Calling anonymous code"; 2714 if (const Stmt *S = cast<StackFrameContext>(LC)->getCallSite()) { 2715 Out << " at "; 2716 printLocation2(Out, S->getLocStart()); 2717 } 2718 break; 2719 case LocationContext::Block: 2720 Out << "Invoking block"; 2721 if (const Decl *D = cast<BlockInvocationContext>(LC)->getBlockDecl()) { 2722 Out << " defined at "; 2723 printLocation2(Out, D->getLocStart()); 2724 } 2725 break; 2726 case LocationContext::Scope: 2727 Out << "Entering scope"; 2728 // FIXME: Add more info once ScopeContext is activated. 2729 break; 2730 } 2731 Out << "\\l"; 2732 } 2733 Out << "\\l"; 2734 2735 state->printDOT(Out); 2736 2737 Out << "\\l"; 2738 2739 if (const ProgramPointTag *tag = Loc.getTag()) { 2740 Out << "\\|Tag: " << tag->getTagDescription(); 2741 Out << "\\l"; 2742 } 2743 return Out.str(); 2744 } 2745 }; 2746 } // end llvm namespace 2747 #endif 2748 2749 void ExprEngine::ViewGraph(bool trim) { 2750 #ifndef NDEBUG 2751 if (trim) { 2752 std::vector<const ExplodedNode*> Src; 2753 2754 // Flush any outstanding reports to make sure we cover all the nodes. 2755 // This does not cause them to get displayed. 2756 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I) 2757 const_cast<BugType*>(*I)->FlushReports(BR); 2758 2759 // Iterate through the reports and get their nodes. 2760 for (BugReporter::EQClasses_iterator 2761 EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) { 2762 ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode()); 2763 if (N) Src.push_back(N); 2764 } 2765 2766 ViewGraph(Src); 2767 } 2768 else { 2769 GraphPrintCheckerState = this; 2770 GraphPrintSourceManager = &getContext().getSourceManager(); 2771 2772 llvm::ViewGraph(*G.roots_begin(), "ExprEngine"); 2773 2774 GraphPrintCheckerState = nullptr; 2775 GraphPrintSourceManager = nullptr; 2776 } 2777 #endif 2778 } 2779 2780 void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) { 2781 #ifndef NDEBUG 2782 GraphPrintCheckerState = this; 2783 GraphPrintSourceManager = &getContext().getSourceManager(); 2784 2785 std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes)); 2786 2787 if (!TrimmedG.get()) 2788 llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n"; 2789 else 2790 llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine"); 2791 2792 GraphPrintCheckerState = nullptr; 2793 GraphPrintSourceManager = nullptr; 2794 #endif 2795 } 2796