1 //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-= 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a meta-engine for path-sensitive dataflow analysis that 11 // is built on GREngine, but provides the boilerplate to execute transfer 12 // functions and build the ExplodedGraph at the expression level. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 17 #include "PrettyStackTraceLocationContext.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/AST/ParentMap.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/AST/StmtObjC.h" 22 #include "clang/Basic/Builtins.h" 23 #include "clang/Basic/PrettyStackTrace.h" 24 #include "clang/Basic/SourceManager.h" 25 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 26 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/LoopWidening.h" 30 #include "llvm/ADT/Statistic.h" 31 #include "llvm/Support/SaveAndRestore.h" 32 #include "llvm/Support/raw_ostream.h" 33 34 #ifndef NDEBUG 35 #include "llvm/Support/GraphWriter.h" 36 #endif 37 38 using namespace clang; 39 using namespace ento; 40 using llvm::APSInt; 41 42 #define DEBUG_TYPE "ExprEngine" 43 44 STATISTIC(NumRemoveDeadBindings, 45 "The # of times RemoveDeadBindings is called"); 46 STATISTIC(NumMaxBlockCountReached, 47 "The # of aborted paths due to reaching the maximum block count in " 48 "a top level function"); 49 STATISTIC(NumMaxBlockCountReachedInInlined, 50 "The # of aborted paths due to reaching the maximum block count in " 51 "an inlined function"); 52 STATISTIC(NumTimesRetriedWithoutInlining, 53 "The # of times we re-evaluated a call without inlining"); 54 55 typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *> 56 CXXBindTemporaryContext; 57 58 // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated. 59 // The StackFrameContext assures that nested calls due to inlined recursive 60 // functions do not interfere. 61 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet, 62 llvm::ImmutableSet<CXXBindTemporaryContext>) 63 64 //===----------------------------------------------------------------------===// 65 // Engine construction and deletion. 66 //===----------------------------------------------------------------------===// 67 68 static const char* TagProviderName = "ExprEngine"; 69 70 ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled, 71 SetOfConstDecls *VisitedCalleesIn, 72 FunctionSummariesTy *FS, 73 InliningModes HowToInlineIn) 74 : AMgr(mgr), 75 AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()), 76 Engine(*this, FS), 77 G(Engine.getGraph()), 78 StateMgr(getContext(), mgr.getStoreManagerCreator(), 79 mgr.getConstraintManagerCreator(), G.getAllocator(), 80 this), 81 SymMgr(StateMgr.getSymbolManager()), 82 svalBuilder(StateMgr.getSValBuilder()), 83 currStmtIdx(0), currBldrCtx(nullptr), 84 ObjCNoRet(mgr.getASTContext()), 85 ObjCGCEnabled(gcEnabled), BR(mgr, *this), 86 VisitedCallees(VisitedCalleesIn), 87 HowToInline(HowToInlineIn) 88 { 89 unsigned TrimInterval = mgr.options.getGraphTrimInterval(); 90 if (TrimInterval != 0) { 91 // Enable eager node reclaimation when constructing the ExplodedGraph. 92 G.enableNodeReclamation(TrimInterval); 93 } 94 } 95 96 ExprEngine::~ExprEngine() { 97 BR.FlushReports(); 98 } 99 100 //===----------------------------------------------------------------------===// 101 // Utility methods. 102 //===----------------------------------------------------------------------===// 103 104 ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) { 105 ProgramStateRef state = StateMgr.getInitialState(InitLoc); 106 const Decl *D = InitLoc->getDecl(); 107 108 // Preconditions. 109 // FIXME: It would be nice if we had a more general mechanism to add 110 // such preconditions. Some day. 111 do { 112 113 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 114 // Precondition: the first argument of 'main' is an integer guaranteed 115 // to be > 0. 116 const IdentifierInfo *II = FD->getIdentifier(); 117 if (!II || !(II->getName() == "main" && FD->getNumParams() > 0)) 118 break; 119 120 const ParmVarDecl *PD = FD->getParamDecl(0); 121 QualType T = PD->getType(); 122 const BuiltinType *BT = dyn_cast<BuiltinType>(T); 123 if (!BT || !BT->isInteger()) 124 break; 125 126 const MemRegion *R = state->getRegion(PD, InitLoc); 127 if (!R) 128 break; 129 130 SVal V = state->getSVal(loc::MemRegionVal(R)); 131 SVal Constraint_untested = evalBinOp(state, BO_GT, V, 132 svalBuilder.makeZeroVal(T), 133 svalBuilder.getConditionType()); 134 135 Optional<DefinedOrUnknownSVal> Constraint = 136 Constraint_untested.getAs<DefinedOrUnknownSVal>(); 137 138 if (!Constraint) 139 break; 140 141 if (ProgramStateRef newState = state->assume(*Constraint, true)) 142 state = newState; 143 } 144 break; 145 } 146 while (0); 147 148 if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 149 // Precondition: 'self' is always non-null upon entry to an Objective-C 150 // method. 151 const ImplicitParamDecl *SelfD = MD->getSelfDecl(); 152 const MemRegion *R = state->getRegion(SelfD, InitLoc); 153 SVal V = state->getSVal(loc::MemRegionVal(R)); 154 155 if (Optional<Loc> LV = V.getAs<Loc>()) { 156 // Assume that the pointer value in 'self' is non-null. 157 state = state->assume(*LV, true); 158 assert(state && "'self' cannot be null"); 159 } 160 } 161 162 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) { 163 if (!MD->isStatic()) { 164 // Precondition: 'this' is always non-null upon entry to the 165 // top-level function. This is our starting assumption for 166 // analyzing an "open" program. 167 const StackFrameContext *SFC = InitLoc->getCurrentStackFrame(); 168 if (SFC->getParent() == nullptr) { 169 loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC); 170 SVal V = state->getSVal(L); 171 if (Optional<Loc> LV = V.getAs<Loc>()) { 172 state = state->assume(*LV, true); 173 assert(state && "'this' cannot be null"); 174 } 175 } 176 } 177 } 178 179 return state; 180 } 181 182 ProgramStateRef 183 ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State, 184 const LocationContext *LC, 185 const Expr *Ex, 186 const Expr *Result) { 187 SVal V = State->getSVal(Ex, LC); 188 if (!Result) { 189 // If we don't have an explicit result expression, we're in "if needed" 190 // mode. Only create a region if the current value is a NonLoc. 191 if (!V.getAs<NonLoc>()) 192 return State; 193 Result = Ex; 194 } else { 195 // We need to create a region no matter what. For sanity, make sure we don't 196 // try to stuff a Loc into a non-pointer temporary region. 197 assert(!V.getAs<Loc>() || Loc::isLocType(Result->getType()) || 198 Result->getType()->isMemberPointerType()); 199 } 200 201 ProgramStateManager &StateMgr = State->getStateManager(); 202 MemRegionManager &MRMgr = StateMgr.getRegionManager(); 203 StoreManager &StoreMgr = StateMgr.getStoreManager(); 204 205 // MaterializeTemporaryExpr may appear out of place, after a few field and 206 // base-class accesses have been made to the object, even though semantically 207 // it is the whole object that gets materialized and lifetime-extended. 208 // 209 // For example: 210 // 211 // `-MaterializeTemporaryExpr 212 // `-MemberExpr 213 // `-CXXTemporaryObjectExpr 214 // 215 // instead of the more natural 216 // 217 // `-MemberExpr 218 // `-MaterializeTemporaryExpr 219 // `-CXXTemporaryObjectExpr 220 // 221 // Use the usual methods for obtaining the expression of the base object, 222 // and record the adjustments that we need to make to obtain the sub-object 223 // that the whole expression 'Ex' refers to. This trick is usual, 224 // in the sense that CodeGen takes a similar route. 225 226 SmallVector<const Expr *, 2> CommaLHSs; 227 SmallVector<SubobjectAdjustment, 2> Adjustments; 228 229 const Expr *Init = Ex->skipRValueSubobjectAdjustments(CommaLHSs, Adjustments); 230 231 const TypedValueRegion *TR = nullptr; 232 if (const MaterializeTemporaryExpr *MT = 233 dyn_cast<MaterializeTemporaryExpr>(Result)) { 234 StorageDuration SD = MT->getStorageDuration(); 235 // If this object is bound to a reference with static storage duration, we 236 // put it in a different region to prevent "address leakage" warnings. 237 if (SD == SD_Static || SD == SD_Thread) 238 TR = MRMgr.getCXXStaticTempObjectRegion(Init); 239 } 240 if (!TR) 241 TR = MRMgr.getCXXTempObjectRegion(Init, LC); 242 243 SVal Reg = loc::MemRegionVal(TR); 244 245 // Make the necessary adjustments to obtain the sub-object. 246 for (auto I = Adjustments.rbegin(), E = Adjustments.rend(); I != E; ++I) { 247 const SubobjectAdjustment &Adj = *I; 248 switch (Adj.Kind) { 249 case SubobjectAdjustment::DerivedToBaseAdjustment: 250 Reg = StoreMgr.evalDerivedToBase(Reg, Adj.DerivedToBase.BasePath); 251 break; 252 case SubobjectAdjustment::FieldAdjustment: 253 Reg = StoreMgr.getLValueField(Adj.Field, Reg); 254 break; 255 case SubobjectAdjustment::MemberPointerAdjustment: 256 // FIXME: Unimplemented. 257 State->bindDefault(Reg, UnknownVal()); 258 return State; 259 } 260 } 261 262 // Try to recover some path sensitivity in case we couldn't compute the value. 263 if (V.isUnknown()) 264 V = getSValBuilder().conjureSymbolVal(Result, LC, TR->getValueType(), 265 currBldrCtx->blockCount()); 266 // Bind the value of the expression to the sub-object region, and then bind 267 // the sub-object region to our expression. 268 State = State->bindLoc(Reg, V); 269 State = State->BindExpr(Result, LC, Reg); 270 return State; 271 } 272 273 //===----------------------------------------------------------------------===// 274 // Top-level transfer function logic (Dispatcher). 275 //===----------------------------------------------------------------------===// 276 277 /// evalAssume - Called by ConstraintManager. Used to call checker-specific 278 /// logic for handling assumptions on symbolic values. 279 ProgramStateRef ExprEngine::processAssume(ProgramStateRef state, 280 SVal cond, bool assumption) { 281 return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption); 282 } 283 284 ProgramStateRef 285 ExprEngine::processRegionChanges(ProgramStateRef state, 286 const InvalidatedSymbols *invalidated, 287 ArrayRef<const MemRegion *> Explicits, 288 ArrayRef<const MemRegion *> Regions, 289 const CallEvent *Call) { 290 return getCheckerManager().runCheckersForRegionChanges(state, invalidated, 291 Explicits, Regions, Call); 292 } 293 294 void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State, 295 const char *NL, const char *Sep) { 296 getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep); 297 } 298 299 void ExprEngine::processEndWorklist(bool hasWorkRemaining) { 300 getCheckerManager().runCheckersForEndAnalysis(G, BR, *this); 301 } 302 303 void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred, 304 unsigned StmtIdx, NodeBuilderContext *Ctx) { 305 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 306 currStmtIdx = StmtIdx; 307 currBldrCtx = Ctx; 308 309 switch (E.getKind()) { 310 case CFGElement::Statement: 311 ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred); 312 return; 313 case CFGElement::Initializer: 314 ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred); 315 return; 316 case CFGElement::NewAllocator: 317 ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(), 318 Pred); 319 return; 320 case CFGElement::AutomaticObjectDtor: 321 case CFGElement::DeleteDtor: 322 case CFGElement::BaseDtor: 323 case CFGElement::MemberDtor: 324 case CFGElement::TemporaryDtor: 325 ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred); 326 return; 327 } 328 } 329 330 static bool shouldRemoveDeadBindings(AnalysisManager &AMgr, 331 const CFGStmt S, 332 const ExplodedNode *Pred, 333 const LocationContext *LC) { 334 335 // Are we never purging state values? 336 if (AMgr.options.AnalysisPurgeOpt == PurgeNone) 337 return false; 338 339 // Is this the beginning of a basic block? 340 if (Pred->getLocation().getAs<BlockEntrance>()) 341 return true; 342 343 // Is this on a non-expression? 344 if (!isa<Expr>(S.getStmt())) 345 return true; 346 347 // Run before processing a call. 348 if (CallEvent::isCallStmt(S.getStmt())) 349 return true; 350 351 // Is this an expression that is consumed by another expression? If so, 352 // postpone cleaning out the state. 353 ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap(); 354 return !PM.isConsumedExpr(cast<Expr>(S.getStmt())); 355 } 356 357 void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out, 358 const Stmt *ReferenceStmt, 359 const LocationContext *LC, 360 const Stmt *DiagnosticStmt, 361 ProgramPoint::Kind K) { 362 assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind || 363 ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt)) 364 && "PostStmt is not generally supported by the SymbolReaper yet"); 365 assert(LC && "Must pass the current (or expiring) LocationContext"); 366 367 if (!DiagnosticStmt) { 368 DiagnosticStmt = ReferenceStmt; 369 assert(DiagnosticStmt && "Required for clearing a LocationContext"); 370 } 371 372 NumRemoveDeadBindings++; 373 ProgramStateRef CleanedState = Pred->getState(); 374 375 // LC is the location context being destroyed, but SymbolReaper wants a 376 // location context that is still live. (If this is the top-level stack 377 // frame, this will be null.) 378 if (!ReferenceStmt) { 379 assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind && 380 "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext"); 381 LC = LC->getParent(); 382 } 383 384 const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr; 385 SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager()); 386 387 getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper); 388 389 // Create a state in which dead bindings are removed from the environment 390 // and the store. TODO: The function should just return new env and store, 391 // not a new state. 392 CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper); 393 394 // Process any special transfer function for dead symbols. 395 // A tag to track convenience transitions, which can be removed at cleanup. 396 static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node"); 397 if (!SymReaper.hasDeadSymbols()) { 398 // Generate a CleanedNode that has the environment and store cleaned 399 // up. Since no symbols are dead, we can optimize and not clean out 400 // the constraint manager. 401 StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx); 402 Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K); 403 404 } else { 405 // Call checkers with the non-cleaned state so that they could query the 406 // values of the soon to be dead symbols. 407 ExplodedNodeSet CheckedSet; 408 getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper, 409 DiagnosticStmt, *this, K); 410 411 // For each node in CheckedSet, generate CleanedNodes that have the 412 // environment, the store, and the constraints cleaned up but have the 413 // user-supplied states as the predecessors. 414 StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx); 415 for (ExplodedNodeSet::const_iterator 416 I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { 417 ProgramStateRef CheckerState = (*I)->getState(); 418 419 // The constraint manager has not been cleaned up yet, so clean up now. 420 CheckerState = getConstraintManager().removeDeadBindings(CheckerState, 421 SymReaper); 422 423 assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) && 424 "Checkers are not allowed to modify the Environment as a part of " 425 "checkDeadSymbols processing."); 426 assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) && 427 "Checkers are not allowed to modify the Store as a part of " 428 "checkDeadSymbols processing."); 429 430 // Create a state based on CleanedState with CheckerState GDM and 431 // generate a transition to that state. 432 ProgramStateRef CleanedCheckerSt = 433 StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState); 434 Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K); 435 } 436 } 437 } 438 439 void ExprEngine::ProcessStmt(const CFGStmt S, 440 ExplodedNode *Pred) { 441 // Reclaim any unnecessary nodes in the ExplodedGraph. 442 G.reclaimRecentlyAllocatedNodes(); 443 444 const Stmt *currStmt = S.getStmt(); 445 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 446 currStmt->getLocStart(), 447 "Error evaluating statement"); 448 449 // Remove dead bindings and symbols. 450 ExplodedNodeSet CleanedStates; 451 if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){ 452 removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext()); 453 } else 454 CleanedStates.Add(Pred); 455 456 // Visit the statement. 457 ExplodedNodeSet Dst; 458 for (ExplodedNodeSet::iterator I = CleanedStates.begin(), 459 E = CleanedStates.end(); I != E; ++I) { 460 ExplodedNodeSet DstI; 461 // Visit the statement. 462 Visit(currStmt, *I, DstI); 463 Dst.insert(DstI); 464 } 465 466 // Enqueue the new nodes onto the work list. 467 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 468 } 469 470 void ExprEngine::ProcessInitializer(const CFGInitializer Init, 471 ExplodedNode *Pred) { 472 const CXXCtorInitializer *BMI = Init.getInitializer(); 473 474 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 475 BMI->getSourceLocation(), 476 "Error evaluating initializer"); 477 478 // We don't clean up dead bindings here. 479 const StackFrameContext *stackFrame = 480 cast<StackFrameContext>(Pred->getLocationContext()); 481 const CXXConstructorDecl *decl = 482 cast<CXXConstructorDecl>(stackFrame->getDecl()); 483 484 ProgramStateRef State = Pred->getState(); 485 SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame)); 486 487 ExplodedNodeSet Tmp(Pred); 488 SVal FieldLoc; 489 490 // Evaluate the initializer, if necessary 491 if (BMI->isAnyMemberInitializer()) { 492 // Constructors build the object directly in the field, 493 // but non-objects must be copied in from the initializer. 494 if (auto *CtorExpr = findDirectConstructorForCurrentCFGElement()) { 495 assert(BMI->getInit()->IgnoreImplicit() == CtorExpr); 496 (void)CtorExpr; 497 // The field was directly constructed, so there is no need to bind. 498 } else { 499 const Expr *Init = BMI->getInit()->IgnoreImplicit(); 500 const ValueDecl *Field; 501 if (BMI->isIndirectMemberInitializer()) { 502 Field = BMI->getIndirectMember(); 503 FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal); 504 } else { 505 Field = BMI->getMember(); 506 FieldLoc = State->getLValue(BMI->getMember(), thisVal); 507 } 508 509 SVal InitVal; 510 if (Init->getType()->isArrayType()) { 511 // Handle arrays of trivial type. We can represent this with a 512 // primitive load/copy from the base array region. 513 const ArraySubscriptExpr *ASE; 514 while ((ASE = dyn_cast<ArraySubscriptExpr>(Init))) 515 Init = ASE->getBase()->IgnoreImplicit(); 516 517 SVal LValue = State->getSVal(Init, stackFrame); 518 if (Optional<Loc> LValueLoc = LValue.getAs<Loc>()) 519 InitVal = State->getSVal(*LValueLoc); 520 521 // If we fail to get the value for some reason, use a symbolic value. 522 if (InitVal.isUnknownOrUndef()) { 523 SValBuilder &SVB = getSValBuilder(); 524 InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame, 525 Field->getType(), 526 currBldrCtx->blockCount()); 527 } 528 } else { 529 InitVal = State->getSVal(BMI->getInit(), stackFrame); 530 } 531 532 assert(Tmp.size() == 1 && "have not generated any new nodes yet"); 533 assert(*Tmp.begin() == Pred && "have not generated any new nodes yet"); 534 Tmp.clear(); 535 536 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 537 evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP); 538 } 539 } else { 540 assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer()); 541 // We already did all the work when visiting the CXXConstructExpr. 542 } 543 544 // Construct PostInitializer nodes whether the state changed or not, 545 // so that the diagnostics don't get confused. 546 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 547 ExplodedNodeSet Dst; 548 NodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 549 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) { 550 ExplodedNode *N = *I; 551 Bldr.generateNode(PP, N->getState(), N); 552 } 553 554 // Enqueue the new nodes onto the work list. 555 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 556 } 557 558 void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D, 559 ExplodedNode *Pred) { 560 ExplodedNodeSet Dst; 561 switch (D.getKind()) { 562 case CFGElement::AutomaticObjectDtor: 563 ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst); 564 break; 565 case CFGElement::BaseDtor: 566 ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst); 567 break; 568 case CFGElement::MemberDtor: 569 ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst); 570 break; 571 case CFGElement::TemporaryDtor: 572 ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst); 573 break; 574 case CFGElement::DeleteDtor: 575 ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst); 576 break; 577 default: 578 llvm_unreachable("Unexpected dtor kind."); 579 } 580 581 // Enqueue the new nodes onto the work list. 582 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 583 } 584 585 void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE, 586 ExplodedNode *Pred) { 587 ExplodedNodeSet Dst; 588 AnalysisManager &AMgr = getAnalysisManager(); 589 AnalyzerOptions &Opts = AMgr.options; 590 // TODO: We're not evaluating allocators for all cases just yet as 591 // we're not handling the return value correctly, which causes false 592 // positives when the alpha.cplusplus.NewDeleteLeaks check is on. 593 if (Opts.mayInlineCXXAllocator()) 594 VisitCXXNewAllocatorCall(NE, Pred, Dst); 595 else { 596 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 597 const LocationContext *LCtx = Pred->getLocationContext(); 598 PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx); 599 Bldr.generateNode(PP, Pred->getState(), Pred); 600 } 601 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 602 } 603 604 void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor, 605 ExplodedNode *Pred, 606 ExplodedNodeSet &Dst) { 607 const VarDecl *varDecl = Dtor.getVarDecl(); 608 QualType varType = varDecl->getType(); 609 610 ProgramStateRef state = Pred->getState(); 611 SVal dest = state->getLValue(varDecl, Pred->getLocationContext()); 612 const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion(); 613 614 if (varType->isReferenceType()) { 615 Region = state->getSVal(Region).getAsRegion()->getBaseRegion(); 616 varType = cast<TypedValueRegion>(Region)->getValueType(); 617 } 618 619 VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false, 620 Pred, Dst); 621 } 622 623 void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor, 624 ExplodedNode *Pred, 625 ExplodedNodeSet &Dst) { 626 ProgramStateRef State = Pred->getState(); 627 const LocationContext *LCtx = Pred->getLocationContext(); 628 const CXXDeleteExpr *DE = Dtor.getDeleteExpr(); 629 const Stmt *Arg = DE->getArgument(); 630 SVal ArgVal = State->getSVal(Arg, LCtx); 631 632 // If the argument to delete is known to be a null value, 633 // don't run destructor. 634 if (State->isNull(ArgVal).isConstrainedTrue()) { 635 QualType DTy = DE->getDestroyedType(); 636 QualType BTy = getContext().getBaseElementType(DTy); 637 const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl(); 638 const CXXDestructorDecl *Dtor = RD->getDestructor(); 639 640 PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx); 641 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 642 Bldr.generateNode(PP, Pred->getState(), Pred); 643 return; 644 } 645 646 VisitCXXDestructor(DE->getDestroyedType(), 647 ArgVal.getAsRegion(), 648 DE, /*IsBase=*/ false, 649 Pred, Dst); 650 } 651 652 void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D, 653 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 654 const LocationContext *LCtx = Pred->getLocationContext(); 655 656 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 657 Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor, 658 LCtx->getCurrentStackFrame()); 659 SVal ThisVal = Pred->getState()->getSVal(ThisPtr); 660 661 // Create the base object region. 662 const CXXBaseSpecifier *Base = D.getBaseSpecifier(); 663 QualType BaseTy = Base->getType(); 664 SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy, 665 Base->isVirtual()); 666 667 VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(), 668 CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst); 669 } 670 671 void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D, 672 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 673 const FieldDecl *Member = D.getFieldDecl(); 674 ProgramStateRef State = Pred->getState(); 675 const LocationContext *LCtx = Pred->getLocationContext(); 676 677 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 678 Loc ThisVal = getSValBuilder().getCXXThis(CurDtor, 679 LCtx->getCurrentStackFrame()); 680 SVal FieldVal = 681 State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>()); 682 683 VisitCXXDestructor(Member->getType(), 684 FieldVal.castAs<loc::MemRegionVal>().getRegion(), 685 CurDtor->getBody(), /*IsBase=*/false, Pred, Dst); 686 } 687 688 void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D, 689 ExplodedNode *Pred, 690 ExplodedNodeSet &Dst) { 691 ExplodedNodeSet CleanDtorState; 692 StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx); 693 ProgramStateRef State = Pred->getState(); 694 if (State->contains<InitializedTemporariesSet>( 695 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) { 696 // FIXME: Currently we insert temporary destructors for default parameters, 697 // but we don't insert the constructors. 698 State = State->remove<InitializedTemporariesSet>( 699 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame())); 700 } 701 StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State); 702 703 QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType(); 704 // FIXME: Currently CleanDtorState can be empty here due to temporaries being 705 // bound to default parameters. 706 assert(CleanDtorState.size() <= 1); 707 ExplodedNode *CleanPred = 708 CleanDtorState.empty() ? Pred : *CleanDtorState.begin(); 709 // FIXME: Inlining of temporary destructors is not supported yet anyway, so 710 // we just put a NULL region for now. This will need to be changed later. 711 VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(), 712 /*IsBase=*/false, CleanPred, Dst); 713 } 714 715 void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 716 NodeBuilderContext &BldCtx, 717 ExplodedNode *Pred, 718 ExplodedNodeSet &Dst, 719 const CFGBlock *DstT, 720 const CFGBlock *DstF) { 721 BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF); 722 if (Pred->getState()->contains<InitializedTemporariesSet>( 723 std::make_pair(BTE, Pred->getStackFrame()))) { 724 TempDtorBuilder.markInfeasible(false); 725 TempDtorBuilder.generateNode(Pred->getState(), true, Pred); 726 } else { 727 TempDtorBuilder.markInfeasible(true); 728 TempDtorBuilder.generateNode(Pred->getState(), false, Pred); 729 } 730 } 731 732 void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE, 733 ExplodedNodeSet &PreVisit, 734 ExplodedNodeSet &Dst) { 735 if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) { 736 // In case we don't have temporary destructors in the CFG, do not mark 737 // the initialization - we would otherwise never clean it up. 738 Dst = PreVisit; 739 return; 740 } 741 StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx); 742 for (ExplodedNode *Node : PreVisit) { 743 ProgramStateRef State = Node->getState(); 744 745 if (!State->contains<InitializedTemporariesSet>( 746 std::make_pair(BTE, Node->getStackFrame()))) { 747 // FIXME: Currently the state might already contain the marker due to 748 // incorrect handling of temporaries bound to default parameters; for 749 // those, we currently skip the CXXBindTemporaryExpr but rely on adding 750 // temporary destructor nodes. 751 State = State->add<InitializedTemporariesSet>( 752 std::make_pair(BTE, Node->getStackFrame())); 753 } 754 StmtBldr.generateNode(BTE, Node, State); 755 } 756 } 757 758 void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred, 759 ExplodedNodeSet &DstTop) { 760 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 761 S->getLocStart(), 762 "Error evaluating statement"); 763 ExplodedNodeSet Dst; 764 StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx); 765 766 assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens()); 767 768 switch (S->getStmtClass()) { 769 // C++ and ARC stuff we don't support yet. 770 case Expr::ObjCIndirectCopyRestoreExprClass: 771 case Stmt::CXXDependentScopeMemberExprClass: 772 case Stmt::CXXInheritedCtorInitExprClass: 773 case Stmt::CXXTryStmtClass: 774 case Stmt::CXXTypeidExprClass: 775 case Stmt::CXXUuidofExprClass: 776 case Stmt::CXXFoldExprClass: 777 case Stmt::MSPropertyRefExprClass: 778 case Stmt::MSPropertySubscriptExprClass: 779 case Stmt::CXXUnresolvedConstructExprClass: 780 case Stmt::DependentScopeDeclRefExprClass: 781 case Stmt::ArrayTypeTraitExprClass: 782 case Stmt::ExpressionTraitExprClass: 783 case Stmt::UnresolvedLookupExprClass: 784 case Stmt::UnresolvedMemberExprClass: 785 case Stmt::TypoExprClass: 786 case Stmt::CXXNoexceptExprClass: 787 case Stmt::PackExpansionExprClass: 788 case Stmt::SubstNonTypeTemplateParmPackExprClass: 789 case Stmt::FunctionParmPackExprClass: 790 case Stmt::CoroutineBodyStmtClass: 791 case Stmt::CoawaitExprClass: 792 case Stmt::CoreturnStmtClass: 793 case Stmt::CoyieldExprClass: 794 case Stmt::SEHTryStmtClass: 795 case Stmt::SEHExceptStmtClass: 796 case Stmt::SEHLeaveStmtClass: 797 case Stmt::SEHFinallyStmtClass: { 798 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 799 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 800 break; 801 } 802 803 case Stmt::ParenExprClass: 804 llvm_unreachable("ParenExprs already handled."); 805 case Stmt::GenericSelectionExprClass: 806 llvm_unreachable("GenericSelectionExprs already handled."); 807 // Cases that should never be evaluated simply because they shouldn't 808 // appear in the CFG. 809 case Stmt::BreakStmtClass: 810 case Stmt::CaseStmtClass: 811 case Stmt::CompoundStmtClass: 812 case Stmt::ContinueStmtClass: 813 case Stmt::CXXForRangeStmtClass: 814 case Stmt::DefaultStmtClass: 815 case Stmt::DoStmtClass: 816 case Stmt::ForStmtClass: 817 case Stmt::GotoStmtClass: 818 case Stmt::IfStmtClass: 819 case Stmt::IndirectGotoStmtClass: 820 case Stmt::LabelStmtClass: 821 case Stmt::NoStmtClass: 822 case Stmt::NullStmtClass: 823 case Stmt::SwitchStmtClass: 824 case Stmt::WhileStmtClass: 825 case Expr::MSDependentExistsStmtClass: 826 case Stmt::CapturedStmtClass: 827 case Stmt::OMPParallelDirectiveClass: 828 case Stmt::OMPSimdDirectiveClass: 829 case Stmt::OMPForDirectiveClass: 830 case Stmt::OMPForSimdDirectiveClass: 831 case Stmt::OMPSectionsDirectiveClass: 832 case Stmt::OMPSectionDirectiveClass: 833 case Stmt::OMPSingleDirectiveClass: 834 case Stmt::OMPMasterDirectiveClass: 835 case Stmt::OMPCriticalDirectiveClass: 836 case Stmt::OMPParallelForDirectiveClass: 837 case Stmt::OMPParallelForSimdDirectiveClass: 838 case Stmt::OMPParallelSectionsDirectiveClass: 839 case Stmt::OMPTaskDirectiveClass: 840 case Stmt::OMPTaskyieldDirectiveClass: 841 case Stmt::OMPBarrierDirectiveClass: 842 case Stmt::OMPTaskwaitDirectiveClass: 843 case Stmt::OMPTaskgroupDirectiveClass: 844 case Stmt::OMPFlushDirectiveClass: 845 case Stmt::OMPOrderedDirectiveClass: 846 case Stmt::OMPAtomicDirectiveClass: 847 case Stmt::OMPTargetDirectiveClass: 848 case Stmt::OMPTargetDataDirectiveClass: 849 case Stmt::OMPTargetEnterDataDirectiveClass: 850 case Stmt::OMPTargetExitDataDirectiveClass: 851 case Stmt::OMPTargetParallelDirectiveClass: 852 case Stmt::OMPTargetParallelForDirectiveClass: 853 case Stmt::OMPTargetUpdateDirectiveClass: 854 case Stmt::OMPTeamsDirectiveClass: 855 case Stmt::OMPCancellationPointDirectiveClass: 856 case Stmt::OMPCancelDirectiveClass: 857 case Stmt::OMPTaskLoopDirectiveClass: 858 case Stmt::OMPTaskLoopSimdDirectiveClass: 859 case Stmt::OMPDistributeDirectiveClass: 860 case Stmt::OMPDistributeParallelForDirectiveClass: 861 case Stmt::OMPDistributeParallelForSimdDirectiveClass: 862 case Stmt::OMPDistributeSimdDirectiveClass: 863 case Stmt::OMPTargetParallelForSimdDirectiveClass: 864 case Stmt::OMPTargetSimdDirectiveClass: 865 case Stmt::OMPTeamsDistributeDirectiveClass: 866 case Stmt::OMPTeamsDistributeSimdDirectiveClass: 867 case Stmt::OMPTeamsDistributeParallelForSimdDirectiveClass: 868 case Stmt::OMPTeamsDistributeParallelForDirectiveClass: 869 llvm_unreachable("Stmt should not be in analyzer evaluation loop"); 870 871 case Stmt::ObjCSubscriptRefExprClass: 872 case Stmt::ObjCPropertyRefExprClass: 873 llvm_unreachable("These are handled by PseudoObjectExpr"); 874 875 case Stmt::GNUNullExprClass: { 876 // GNU __null is a pointer-width integer, not an actual pointer. 877 ProgramStateRef state = Pred->getState(); 878 state = state->BindExpr(S, Pred->getLocationContext(), 879 svalBuilder.makeIntValWithPtrWidth(0, false)); 880 Bldr.generateNode(S, Pred, state); 881 break; 882 } 883 884 case Stmt::ObjCAtSynchronizedStmtClass: 885 Bldr.takeNodes(Pred); 886 VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst); 887 Bldr.addNodes(Dst); 888 break; 889 890 case Stmt::ExprWithCleanupsClass: 891 // Handled due to fully linearised CFG. 892 break; 893 894 case Stmt::CXXBindTemporaryExprClass: { 895 Bldr.takeNodes(Pred); 896 ExplodedNodeSet PreVisit; 897 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 898 ExplodedNodeSet Next; 899 VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next); 900 getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this); 901 Bldr.addNodes(Dst); 902 break; 903 } 904 905 // Cases not handled yet; but will handle some day. 906 case Stmt::DesignatedInitExprClass: 907 case Stmt::DesignatedInitUpdateExprClass: 908 case Stmt::ArrayInitLoopExprClass: 909 case Stmt::ArrayInitIndexExprClass: 910 case Stmt::ExtVectorElementExprClass: 911 case Stmt::ImaginaryLiteralClass: 912 case Stmt::ObjCAtCatchStmtClass: 913 case Stmt::ObjCAtFinallyStmtClass: 914 case Stmt::ObjCAtTryStmtClass: 915 case Stmt::ObjCAutoreleasePoolStmtClass: 916 case Stmt::ObjCEncodeExprClass: 917 case Stmt::ObjCIsaExprClass: 918 case Stmt::ObjCProtocolExprClass: 919 case Stmt::ObjCSelectorExprClass: 920 case Stmt::ParenListExprClass: 921 case Stmt::ShuffleVectorExprClass: 922 case Stmt::ConvertVectorExprClass: 923 case Stmt::VAArgExprClass: 924 case Stmt::CUDAKernelCallExprClass: 925 case Stmt::OpaqueValueExprClass: 926 case Stmt::AsTypeExprClass: 927 // Fall through. 928 929 // Cases we intentionally don't evaluate, since they don't need 930 // to be explicitly evaluated. 931 case Stmt::PredefinedExprClass: 932 case Stmt::AddrLabelExprClass: 933 case Stmt::AttributedStmtClass: 934 case Stmt::IntegerLiteralClass: 935 case Stmt::CharacterLiteralClass: 936 case Stmt::ImplicitValueInitExprClass: 937 case Stmt::CXXScalarValueInitExprClass: 938 case Stmt::CXXBoolLiteralExprClass: 939 case Stmt::ObjCBoolLiteralExprClass: 940 case Stmt::ObjCAvailabilityCheckExprClass: 941 case Stmt::FloatingLiteralClass: 942 case Stmt::NoInitExprClass: 943 case Stmt::SizeOfPackExprClass: 944 case Stmt::StringLiteralClass: 945 case Stmt::ObjCStringLiteralClass: 946 case Stmt::CXXPseudoDestructorExprClass: 947 case Stmt::SubstNonTypeTemplateParmExprClass: 948 case Stmt::CXXNullPtrLiteralExprClass: 949 case Stmt::OMPArraySectionExprClass: 950 case Stmt::TypeTraitExprClass: { 951 Bldr.takeNodes(Pred); 952 ExplodedNodeSet preVisit; 953 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 954 getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this); 955 Bldr.addNodes(Dst); 956 break; 957 } 958 959 case Stmt::CXXDefaultArgExprClass: 960 case Stmt::CXXDefaultInitExprClass: { 961 Bldr.takeNodes(Pred); 962 ExplodedNodeSet PreVisit; 963 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 964 965 ExplodedNodeSet Tmp; 966 StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx); 967 968 const Expr *ArgE; 969 if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S)) 970 ArgE = DefE->getExpr(); 971 else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S)) 972 ArgE = DefE->getExpr(); 973 else 974 llvm_unreachable("unknown constant wrapper kind"); 975 976 bool IsTemporary = false; 977 if (const MaterializeTemporaryExpr *MTE = 978 dyn_cast<MaterializeTemporaryExpr>(ArgE)) { 979 ArgE = MTE->GetTemporaryExpr(); 980 IsTemporary = true; 981 } 982 983 Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE); 984 if (!ConstantVal) 985 ConstantVal = UnknownVal(); 986 987 const LocationContext *LCtx = Pred->getLocationContext(); 988 for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end(); 989 I != E; ++I) { 990 ProgramStateRef State = (*I)->getState(); 991 State = State->BindExpr(S, LCtx, *ConstantVal); 992 if (IsTemporary) 993 State = createTemporaryRegionIfNeeded(State, LCtx, 994 cast<Expr>(S), 995 cast<Expr>(S)); 996 Bldr2.generateNode(S, *I, State); 997 } 998 999 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1000 Bldr.addNodes(Dst); 1001 break; 1002 } 1003 1004 // Cases we evaluate as opaque expressions, conjuring a symbol. 1005 case Stmt::CXXStdInitializerListExprClass: 1006 case Expr::ObjCArrayLiteralClass: 1007 case Expr::ObjCDictionaryLiteralClass: 1008 case Expr::ObjCBoxedExprClass: { 1009 Bldr.takeNodes(Pred); 1010 1011 ExplodedNodeSet preVisit; 1012 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 1013 1014 ExplodedNodeSet Tmp; 1015 StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx); 1016 1017 const Expr *Ex = cast<Expr>(S); 1018 QualType resultType = Ex->getType(); 1019 1020 for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end(); 1021 it != et; ++it) { 1022 ExplodedNode *N = *it; 1023 const LocationContext *LCtx = N->getLocationContext(); 1024 SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx, 1025 resultType, 1026 currBldrCtx->blockCount()); 1027 ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result); 1028 Bldr2.generateNode(S, N, state); 1029 } 1030 1031 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1032 Bldr.addNodes(Dst); 1033 break; 1034 } 1035 1036 case Stmt::ArraySubscriptExprClass: 1037 Bldr.takeNodes(Pred); 1038 VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst); 1039 Bldr.addNodes(Dst); 1040 break; 1041 1042 case Stmt::GCCAsmStmtClass: 1043 Bldr.takeNodes(Pred); 1044 VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst); 1045 Bldr.addNodes(Dst); 1046 break; 1047 1048 case Stmt::MSAsmStmtClass: 1049 Bldr.takeNodes(Pred); 1050 VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst); 1051 Bldr.addNodes(Dst); 1052 break; 1053 1054 case Stmt::BlockExprClass: 1055 Bldr.takeNodes(Pred); 1056 VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst); 1057 Bldr.addNodes(Dst); 1058 break; 1059 1060 case Stmt::LambdaExprClass: 1061 if (AMgr.options.shouldInlineLambdas()) { 1062 Bldr.takeNodes(Pred); 1063 VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst); 1064 Bldr.addNodes(Dst); 1065 } else { 1066 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 1067 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 1068 } 1069 break; 1070 1071 case Stmt::BinaryOperatorClass: { 1072 const BinaryOperator* B = cast<BinaryOperator>(S); 1073 if (B->isLogicalOp()) { 1074 Bldr.takeNodes(Pred); 1075 VisitLogicalExpr(B, Pred, Dst); 1076 Bldr.addNodes(Dst); 1077 break; 1078 } 1079 else if (B->getOpcode() == BO_Comma) { 1080 ProgramStateRef state = Pred->getState(); 1081 Bldr.generateNode(B, Pred, 1082 state->BindExpr(B, Pred->getLocationContext(), 1083 state->getSVal(B->getRHS(), 1084 Pred->getLocationContext()))); 1085 break; 1086 } 1087 1088 Bldr.takeNodes(Pred); 1089 1090 if (AMgr.options.eagerlyAssumeBinOpBifurcation && 1091 (B->isRelationalOp() || B->isEqualityOp())) { 1092 ExplodedNodeSet Tmp; 1093 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp); 1094 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S)); 1095 } 1096 else 1097 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1098 1099 Bldr.addNodes(Dst); 1100 break; 1101 } 1102 1103 case Stmt::CXXOperatorCallExprClass: { 1104 const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S); 1105 1106 // For instance method operators, make sure the 'this' argument has a 1107 // valid region. 1108 const Decl *Callee = OCE->getCalleeDecl(); 1109 if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) { 1110 if (MD->isInstance()) { 1111 ProgramStateRef State = Pred->getState(); 1112 const LocationContext *LCtx = Pred->getLocationContext(); 1113 ProgramStateRef NewState = 1114 createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0)); 1115 if (NewState != State) { 1116 Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr, 1117 ProgramPoint::PreStmtKind); 1118 // Did we cache out? 1119 if (!Pred) 1120 break; 1121 } 1122 } 1123 } 1124 // FALLTHROUGH 1125 } 1126 case Stmt::CallExprClass: 1127 case Stmt::CXXMemberCallExprClass: 1128 case Stmt::UserDefinedLiteralClass: { 1129 Bldr.takeNodes(Pred); 1130 VisitCallExpr(cast<CallExpr>(S), Pred, Dst); 1131 Bldr.addNodes(Dst); 1132 break; 1133 } 1134 1135 case Stmt::CXXCatchStmtClass: { 1136 Bldr.takeNodes(Pred); 1137 VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst); 1138 Bldr.addNodes(Dst); 1139 break; 1140 } 1141 1142 case Stmt::CXXTemporaryObjectExprClass: 1143 case Stmt::CXXConstructExprClass: { 1144 Bldr.takeNodes(Pred); 1145 VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst); 1146 Bldr.addNodes(Dst); 1147 break; 1148 } 1149 1150 case Stmt::CXXNewExprClass: { 1151 Bldr.takeNodes(Pred); 1152 ExplodedNodeSet PostVisit; 1153 VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit); 1154 getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this); 1155 Bldr.addNodes(Dst); 1156 break; 1157 } 1158 1159 case Stmt::CXXDeleteExprClass: { 1160 Bldr.takeNodes(Pred); 1161 ExplodedNodeSet PreVisit; 1162 const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S); 1163 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1164 1165 for (ExplodedNodeSet::iterator i = PreVisit.begin(), 1166 e = PreVisit.end(); i != e ; ++i) 1167 VisitCXXDeleteExpr(CDE, *i, Dst); 1168 1169 Bldr.addNodes(Dst); 1170 break; 1171 } 1172 // FIXME: ChooseExpr is really a constant. We need to fix 1173 // the CFG do not model them as explicit control-flow. 1174 1175 case Stmt::ChooseExprClass: { // __builtin_choose_expr 1176 Bldr.takeNodes(Pred); 1177 const ChooseExpr *C = cast<ChooseExpr>(S); 1178 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); 1179 Bldr.addNodes(Dst); 1180 break; 1181 } 1182 1183 case Stmt::CompoundAssignOperatorClass: 1184 Bldr.takeNodes(Pred); 1185 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1186 Bldr.addNodes(Dst); 1187 break; 1188 1189 case Stmt::CompoundLiteralExprClass: 1190 Bldr.takeNodes(Pred); 1191 VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst); 1192 Bldr.addNodes(Dst); 1193 break; 1194 1195 case Stmt::BinaryConditionalOperatorClass: 1196 case Stmt::ConditionalOperatorClass: { // '?' operator 1197 Bldr.takeNodes(Pred); 1198 const AbstractConditionalOperator *C 1199 = cast<AbstractConditionalOperator>(S); 1200 VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst); 1201 Bldr.addNodes(Dst); 1202 break; 1203 } 1204 1205 case Stmt::CXXThisExprClass: 1206 Bldr.takeNodes(Pred); 1207 VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst); 1208 Bldr.addNodes(Dst); 1209 break; 1210 1211 case Stmt::DeclRefExprClass: { 1212 Bldr.takeNodes(Pred); 1213 const DeclRefExpr *DE = cast<DeclRefExpr>(S); 1214 VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst); 1215 Bldr.addNodes(Dst); 1216 break; 1217 } 1218 1219 case Stmt::DeclStmtClass: 1220 Bldr.takeNodes(Pred); 1221 VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst); 1222 Bldr.addNodes(Dst); 1223 break; 1224 1225 case Stmt::ImplicitCastExprClass: 1226 case Stmt::CStyleCastExprClass: 1227 case Stmt::CXXStaticCastExprClass: 1228 case Stmt::CXXDynamicCastExprClass: 1229 case Stmt::CXXReinterpretCastExprClass: 1230 case Stmt::CXXConstCastExprClass: 1231 case Stmt::CXXFunctionalCastExprClass: 1232 case Stmt::ObjCBridgedCastExprClass: { 1233 Bldr.takeNodes(Pred); 1234 const CastExpr *C = cast<CastExpr>(S); 1235 ExplodedNodeSet dstExpr; 1236 VisitCast(C, C->getSubExpr(), Pred, dstExpr); 1237 1238 // Handle the postvisit checks. 1239 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this); 1240 Bldr.addNodes(Dst); 1241 break; 1242 } 1243 1244 case Expr::MaterializeTemporaryExprClass: { 1245 Bldr.takeNodes(Pred); 1246 const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S); 1247 CreateCXXTemporaryObject(MTE, Pred, Dst); 1248 Bldr.addNodes(Dst); 1249 break; 1250 } 1251 1252 case Stmt::InitListExprClass: 1253 Bldr.takeNodes(Pred); 1254 VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst); 1255 Bldr.addNodes(Dst); 1256 break; 1257 1258 case Stmt::MemberExprClass: 1259 Bldr.takeNodes(Pred); 1260 VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst); 1261 Bldr.addNodes(Dst); 1262 break; 1263 1264 case Stmt::AtomicExprClass: 1265 Bldr.takeNodes(Pred); 1266 VisitAtomicExpr(cast<AtomicExpr>(S), Pred, Dst); 1267 Bldr.addNodes(Dst); 1268 break; 1269 1270 case Stmt::ObjCIvarRefExprClass: 1271 Bldr.takeNodes(Pred); 1272 VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst); 1273 Bldr.addNodes(Dst); 1274 break; 1275 1276 case Stmt::ObjCForCollectionStmtClass: 1277 Bldr.takeNodes(Pred); 1278 VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst); 1279 Bldr.addNodes(Dst); 1280 break; 1281 1282 case Stmt::ObjCMessageExprClass: 1283 Bldr.takeNodes(Pred); 1284 VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst); 1285 Bldr.addNodes(Dst); 1286 break; 1287 1288 case Stmt::ObjCAtThrowStmtClass: 1289 case Stmt::CXXThrowExprClass: 1290 // FIXME: This is not complete. We basically treat @throw as 1291 // an abort. 1292 Bldr.generateSink(S, Pred, Pred->getState()); 1293 break; 1294 1295 case Stmt::ReturnStmtClass: 1296 Bldr.takeNodes(Pred); 1297 VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst); 1298 Bldr.addNodes(Dst); 1299 break; 1300 1301 case Stmt::OffsetOfExprClass: 1302 Bldr.takeNodes(Pred); 1303 VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst); 1304 Bldr.addNodes(Dst); 1305 break; 1306 1307 case Stmt::UnaryExprOrTypeTraitExprClass: 1308 Bldr.takeNodes(Pred); 1309 VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S), 1310 Pred, Dst); 1311 Bldr.addNodes(Dst); 1312 break; 1313 1314 case Stmt::StmtExprClass: { 1315 const StmtExpr *SE = cast<StmtExpr>(S); 1316 1317 if (SE->getSubStmt()->body_empty()) { 1318 // Empty statement expression. 1319 assert(SE->getType() == getContext().VoidTy 1320 && "Empty statement expression must have void type."); 1321 break; 1322 } 1323 1324 if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) { 1325 ProgramStateRef state = Pred->getState(); 1326 Bldr.generateNode(SE, Pred, 1327 state->BindExpr(SE, Pred->getLocationContext(), 1328 state->getSVal(LastExpr, 1329 Pred->getLocationContext()))); 1330 } 1331 break; 1332 } 1333 1334 case Stmt::UnaryOperatorClass: { 1335 Bldr.takeNodes(Pred); 1336 const UnaryOperator *U = cast<UnaryOperator>(S); 1337 if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) { 1338 ExplodedNodeSet Tmp; 1339 VisitUnaryOperator(U, Pred, Tmp); 1340 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U); 1341 } 1342 else 1343 VisitUnaryOperator(U, Pred, Dst); 1344 Bldr.addNodes(Dst); 1345 break; 1346 } 1347 1348 case Stmt::PseudoObjectExprClass: { 1349 Bldr.takeNodes(Pred); 1350 ProgramStateRef state = Pred->getState(); 1351 const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S); 1352 if (const Expr *Result = PE->getResultExpr()) { 1353 SVal V = state->getSVal(Result, Pred->getLocationContext()); 1354 Bldr.generateNode(S, Pred, 1355 state->BindExpr(S, Pred->getLocationContext(), V)); 1356 } 1357 else 1358 Bldr.generateNode(S, Pred, 1359 state->BindExpr(S, Pred->getLocationContext(), 1360 UnknownVal())); 1361 1362 Bldr.addNodes(Dst); 1363 break; 1364 } 1365 } 1366 } 1367 1368 bool ExprEngine::replayWithoutInlining(ExplodedNode *N, 1369 const LocationContext *CalleeLC) { 1370 const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1371 const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame(); 1372 assert(CalleeSF && CallerSF); 1373 ExplodedNode *BeforeProcessingCall = nullptr; 1374 const Stmt *CE = CalleeSF->getCallSite(); 1375 1376 // Find the first node before we started processing the call expression. 1377 while (N) { 1378 ProgramPoint L = N->getLocation(); 1379 BeforeProcessingCall = N; 1380 N = N->pred_empty() ? nullptr : *(N->pred_begin()); 1381 1382 // Skip the nodes corresponding to the inlined code. 1383 if (L.getLocationContext()->getCurrentStackFrame() != CallerSF) 1384 continue; 1385 // We reached the caller. Find the node right before we started 1386 // processing the call. 1387 if (L.isPurgeKind()) 1388 continue; 1389 if (L.getAs<PreImplicitCall>()) 1390 continue; 1391 if (L.getAs<CallEnter>()) 1392 continue; 1393 if (Optional<StmtPoint> SP = L.getAs<StmtPoint>()) 1394 if (SP->getStmt() == CE) 1395 continue; 1396 break; 1397 } 1398 1399 if (!BeforeProcessingCall) 1400 return false; 1401 1402 // TODO: Clean up the unneeded nodes. 1403 1404 // Build an Epsilon node from which we will restart the analyzes. 1405 // Note that CE is permitted to be NULL! 1406 ProgramPoint NewNodeLoc = 1407 EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE); 1408 // Add the special flag to GDM to signal retrying with no inlining. 1409 // Note, changing the state ensures that we are not going to cache out. 1410 ProgramStateRef NewNodeState = BeforeProcessingCall->getState(); 1411 NewNodeState = 1412 NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE)); 1413 1414 // Make the new node a successor of BeforeProcessingCall. 1415 bool IsNew = false; 1416 ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew); 1417 // We cached out at this point. Caching out is common due to us backtracking 1418 // from the inlined function, which might spawn several paths. 1419 if (!IsNew) 1420 return true; 1421 1422 NewNode->addPredecessor(BeforeProcessingCall, G); 1423 1424 // Add the new node to the work list. 1425 Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(), 1426 CalleeSF->getIndex()); 1427 NumTimesRetriedWithoutInlining++; 1428 return true; 1429 } 1430 1431 /// Block entrance. (Update counters). 1432 void ExprEngine::processCFGBlockEntrance(const BlockEdge &L, 1433 NodeBuilderWithSinks &nodeBuilder, 1434 ExplodedNode *Pred) { 1435 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1436 1437 // If this block is terminated by a loop and it has already been visited the 1438 // maximum number of times, widen the loop. 1439 unsigned int BlockCount = nodeBuilder.getContext().blockCount(); 1440 if (BlockCount == AMgr.options.maxBlockVisitOnPath - 1 && 1441 AMgr.options.shouldWidenLoops()) { 1442 const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator(); 1443 if (!(Term && 1444 (isa<ForStmt>(Term) || isa<WhileStmt>(Term) || isa<DoStmt>(Term)))) 1445 return; 1446 // Widen. 1447 const LocationContext *LCtx = Pred->getLocationContext(); 1448 ProgramStateRef WidenedState = 1449 getWidenedLoopState(Pred->getState(), LCtx, BlockCount, Term); 1450 nodeBuilder.generateNode(WidenedState, Pred); 1451 return; 1452 } 1453 1454 // FIXME: Refactor this into a checker. 1455 if (BlockCount >= AMgr.options.maxBlockVisitOnPath) { 1456 static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded"); 1457 const ExplodedNode *Sink = 1458 nodeBuilder.generateSink(Pred->getState(), Pred, &tag); 1459 1460 // Check if we stopped at the top level function or not. 1461 // Root node should have the location context of the top most function. 1462 const LocationContext *CalleeLC = Pred->getLocation().getLocationContext(); 1463 const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1464 const LocationContext *RootLC = 1465 (*G.roots_begin())->getLocation().getLocationContext(); 1466 if (RootLC->getCurrentStackFrame() != CalleeSF) { 1467 Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl()); 1468 1469 // Re-run the call evaluation without inlining it, by storing the 1470 // no-inlining policy in the state and enqueuing the new work item on 1471 // the list. Replay should almost never fail. Use the stats to catch it 1472 // if it does. 1473 if ((!AMgr.options.NoRetryExhausted && 1474 replayWithoutInlining(Pred, CalleeLC))) 1475 return; 1476 NumMaxBlockCountReachedInInlined++; 1477 } else 1478 NumMaxBlockCountReached++; 1479 1480 // Make sink nodes as exhausted(for stats) only if retry failed. 1481 Engine.blocksExhausted.push_back(std::make_pair(L, Sink)); 1482 } 1483 } 1484 1485 //===----------------------------------------------------------------------===// 1486 // Branch processing. 1487 //===----------------------------------------------------------------------===// 1488 1489 /// RecoverCastedSymbol - A helper function for ProcessBranch that is used 1490 /// to try to recover some path-sensitivity for casts of symbolic 1491 /// integers that promote their values (which are currently not tracked well). 1492 /// This function returns the SVal bound to Condition->IgnoreCasts if all the 1493 // cast(s) did was sign-extend the original value. 1494 static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr, 1495 ProgramStateRef state, 1496 const Stmt *Condition, 1497 const LocationContext *LCtx, 1498 ASTContext &Ctx) { 1499 1500 const Expr *Ex = dyn_cast<Expr>(Condition); 1501 if (!Ex) 1502 return UnknownVal(); 1503 1504 uint64_t bits = 0; 1505 bool bitsInit = false; 1506 1507 while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) { 1508 QualType T = CE->getType(); 1509 1510 if (!T->isIntegralOrEnumerationType()) 1511 return UnknownVal(); 1512 1513 uint64_t newBits = Ctx.getTypeSize(T); 1514 if (!bitsInit || newBits < bits) { 1515 bitsInit = true; 1516 bits = newBits; 1517 } 1518 1519 Ex = CE->getSubExpr(); 1520 } 1521 1522 // We reached a non-cast. Is it a symbolic value? 1523 QualType T = Ex->getType(); 1524 1525 if (!bitsInit || !T->isIntegralOrEnumerationType() || 1526 Ctx.getTypeSize(T) > bits) 1527 return UnknownVal(); 1528 1529 return state->getSVal(Ex, LCtx); 1530 } 1531 1532 #ifndef NDEBUG 1533 static const Stmt *getRightmostLeaf(const Stmt *Condition) { 1534 while (Condition) { 1535 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1536 if (!BO || !BO->isLogicalOp()) { 1537 return Condition; 1538 } 1539 Condition = BO->getRHS()->IgnoreParens(); 1540 } 1541 return nullptr; 1542 } 1543 #endif 1544 1545 // Returns the condition the branch at the end of 'B' depends on and whose value 1546 // has been evaluated within 'B'. 1547 // In most cases, the terminator condition of 'B' will be evaluated fully in 1548 // the last statement of 'B'; in those cases, the resolved condition is the 1549 // given 'Condition'. 1550 // If the condition of the branch is a logical binary operator tree, the CFG is 1551 // optimized: in that case, we know that the expression formed by all but the 1552 // rightmost leaf of the logical binary operator tree must be true, and thus 1553 // the branch condition is at this point equivalent to the truth value of that 1554 // rightmost leaf; the CFG block thus only evaluates this rightmost leaf 1555 // expression in its final statement. As the full condition in that case was 1556 // not evaluated, and is thus not in the SVal cache, we need to use that leaf 1557 // expression to evaluate the truth value of the condition in the current state 1558 // space. 1559 static const Stmt *ResolveCondition(const Stmt *Condition, 1560 const CFGBlock *B) { 1561 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1562 Condition = Ex->IgnoreParens(); 1563 1564 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1565 if (!BO || !BO->isLogicalOp()) 1566 return Condition; 1567 1568 assert(!B->getTerminator().isTemporaryDtorsBranch() && 1569 "Temporary destructor branches handled by processBindTemporary."); 1570 1571 // For logical operations, we still have the case where some branches 1572 // use the traditional "merge" approach and others sink the branch 1573 // directly into the basic blocks representing the logical operation. 1574 // We need to distinguish between those two cases here. 1575 1576 // The invariants are still shifting, but it is possible that the 1577 // last element in a CFGBlock is not a CFGStmt. Look for the last 1578 // CFGStmt as the value of the condition. 1579 CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend(); 1580 for (; I != E; ++I) { 1581 CFGElement Elem = *I; 1582 Optional<CFGStmt> CS = Elem.getAs<CFGStmt>(); 1583 if (!CS) 1584 continue; 1585 const Stmt *LastStmt = CS->getStmt(); 1586 assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition)); 1587 return LastStmt; 1588 } 1589 llvm_unreachable("could not resolve condition"); 1590 } 1591 1592 void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term, 1593 NodeBuilderContext& BldCtx, 1594 ExplodedNode *Pred, 1595 ExplodedNodeSet &Dst, 1596 const CFGBlock *DstT, 1597 const CFGBlock *DstF) { 1598 assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) && 1599 "CXXBindTemporaryExprs are handled by processBindTemporary."); 1600 const LocationContext *LCtx = Pred->getLocationContext(); 1601 PrettyStackTraceLocationContext StackCrashInfo(LCtx); 1602 currBldrCtx = &BldCtx; 1603 1604 // Check for NULL conditions; e.g. "for(;;)" 1605 if (!Condition) { 1606 BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF); 1607 NullCondBldr.markInfeasible(false); 1608 NullCondBldr.generateNode(Pred->getState(), true, Pred); 1609 return; 1610 } 1611 1612 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1613 Condition = Ex->IgnoreParens(); 1614 1615 Condition = ResolveCondition(Condition, BldCtx.getBlock()); 1616 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 1617 Condition->getLocStart(), 1618 "Error evaluating branch"); 1619 1620 ExplodedNodeSet CheckersOutSet; 1621 getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet, 1622 Pred, *this); 1623 // We generated only sinks. 1624 if (CheckersOutSet.empty()) 1625 return; 1626 1627 BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF); 1628 for (NodeBuilder::iterator I = CheckersOutSet.begin(), 1629 E = CheckersOutSet.end(); E != I; ++I) { 1630 ExplodedNode *PredI = *I; 1631 1632 if (PredI->isSink()) 1633 continue; 1634 1635 ProgramStateRef PrevState = PredI->getState(); 1636 SVal X = PrevState->getSVal(Condition, PredI->getLocationContext()); 1637 1638 if (X.isUnknownOrUndef()) { 1639 // Give it a chance to recover from unknown. 1640 if (const Expr *Ex = dyn_cast<Expr>(Condition)) { 1641 if (Ex->getType()->isIntegralOrEnumerationType()) { 1642 // Try to recover some path-sensitivity. Right now casts of symbolic 1643 // integers that promote their values are currently not tracked well. 1644 // If 'Condition' is such an expression, try and recover the 1645 // underlying value and use that instead. 1646 SVal recovered = RecoverCastedSymbol(getStateManager(), 1647 PrevState, Condition, 1648 PredI->getLocationContext(), 1649 getContext()); 1650 1651 if (!recovered.isUnknown()) { 1652 X = recovered; 1653 } 1654 } 1655 } 1656 } 1657 1658 // If the condition is still unknown, give up. 1659 if (X.isUnknownOrUndef()) { 1660 builder.generateNode(PrevState, true, PredI); 1661 builder.generateNode(PrevState, false, PredI); 1662 continue; 1663 } 1664 1665 DefinedSVal V = X.castAs<DefinedSVal>(); 1666 1667 ProgramStateRef StTrue, StFalse; 1668 std::tie(StTrue, StFalse) = PrevState->assume(V); 1669 1670 // Process the true branch. 1671 if (builder.isFeasible(true)) { 1672 if (StTrue) 1673 builder.generateNode(StTrue, true, PredI); 1674 else 1675 builder.markInfeasible(true); 1676 } 1677 1678 // Process the false branch. 1679 if (builder.isFeasible(false)) { 1680 if (StFalse) 1681 builder.generateNode(StFalse, false, PredI); 1682 else 1683 builder.markInfeasible(false); 1684 } 1685 } 1686 currBldrCtx = nullptr; 1687 } 1688 1689 /// The GDM component containing the set of global variables which have been 1690 /// previously initialized with explicit initializers. 1691 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet, 1692 llvm::ImmutableSet<const VarDecl *>) 1693 1694 void ExprEngine::processStaticInitializer(const DeclStmt *DS, 1695 NodeBuilderContext &BuilderCtx, 1696 ExplodedNode *Pred, 1697 clang::ento::ExplodedNodeSet &Dst, 1698 const CFGBlock *DstT, 1699 const CFGBlock *DstF) { 1700 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1701 currBldrCtx = &BuilderCtx; 1702 1703 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl()); 1704 ProgramStateRef state = Pred->getState(); 1705 bool initHasRun = state->contains<InitializedGlobalsSet>(VD); 1706 BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF); 1707 1708 if (!initHasRun) { 1709 state = state->add<InitializedGlobalsSet>(VD); 1710 } 1711 1712 builder.generateNode(state, initHasRun, Pred); 1713 builder.markInfeasible(!initHasRun); 1714 1715 currBldrCtx = nullptr; 1716 } 1717 1718 /// processIndirectGoto - Called by CoreEngine. Used to generate successor 1719 /// nodes by processing the 'effects' of a computed goto jump. 1720 void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) { 1721 1722 ProgramStateRef state = builder.getState(); 1723 SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext()); 1724 1725 // Three possibilities: 1726 // 1727 // (1) We know the computed label. 1728 // (2) The label is NULL (or some other constant), or Undefined. 1729 // (3) We have no clue about the label. Dispatch to all targets. 1730 // 1731 1732 typedef IndirectGotoNodeBuilder::iterator iterator; 1733 1734 if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) { 1735 const LabelDecl *L = LV->getLabel(); 1736 1737 for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) { 1738 if (I.getLabel() == L) { 1739 builder.generateNode(I, state); 1740 return; 1741 } 1742 } 1743 1744 llvm_unreachable("No block with label."); 1745 } 1746 1747 if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) { 1748 // Dispatch to the first target and mark it as a sink. 1749 //ExplodedNode* N = builder.generateNode(builder.begin(), state, true); 1750 // FIXME: add checker visit. 1751 // UndefBranches.insert(N); 1752 return; 1753 } 1754 1755 // This is really a catch-all. We don't support symbolics yet. 1756 // FIXME: Implement dispatch for symbolic pointers. 1757 1758 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I) 1759 builder.generateNode(I, state); 1760 } 1761 1762 #if 0 1763 static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) { 1764 const StackFrameContext* Frame = Pred.getStackFrame(); 1765 const llvm::ImmutableSet<CXXBindTemporaryContext> &Set = 1766 Pred.getState()->get<InitializedTemporariesSet>(); 1767 return std::find_if(Set.begin(), Set.end(), 1768 [&](const CXXBindTemporaryContext &Ctx) { 1769 if (Ctx.second == Frame) { 1770 Ctx.first->dump(); 1771 llvm::errs() << "\n"; 1772 } 1773 return Ctx.second == Frame; 1774 }) == Set.end(); 1775 } 1776 #endif 1777 1778 void ExprEngine::processBeginOfFunction(NodeBuilderContext &BC, 1779 ExplodedNode *Pred, 1780 ExplodedNodeSet &Dst, 1781 const BlockEdge &L) { 1782 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC); 1783 getCheckerManager().runCheckersForBeginFunction(Dst, L, Pred, *this); 1784 } 1785 1786 /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path 1787 /// nodes when the control reaches the end of a function. 1788 void ExprEngine::processEndOfFunction(NodeBuilderContext& BC, 1789 ExplodedNode *Pred, 1790 const ReturnStmt *RS) { 1791 // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)). 1792 // We currently cannot enable this assert, as lifetime extended temporaries 1793 // are not modelled correctly. 1794 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1795 StateMgr.EndPath(Pred->getState()); 1796 1797 ExplodedNodeSet Dst; 1798 if (Pred->getLocationContext()->inTopFrame()) { 1799 // Remove dead symbols. 1800 ExplodedNodeSet AfterRemovedDead; 1801 removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead); 1802 1803 // Notify checkers. 1804 for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(), 1805 E = AfterRemovedDead.end(); I != E; ++I) { 1806 getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this); 1807 } 1808 } else { 1809 getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this); 1810 } 1811 1812 Engine.enqueueEndOfFunction(Dst, RS); 1813 } 1814 1815 /// ProcessSwitch - Called by CoreEngine. Used to generate successor 1816 /// nodes by processing the 'effects' of a switch statement. 1817 void ExprEngine::processSwitch(SwitchNodeBuilder& builder) { 1818 typedef SwitchNodeBuilder::iterator iterator; 1819 ProgramStateRef state = builder.getState(); 1820 const Expr *CondE = builder.getCondition(); 1821 SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext()); 1822 1823 if (CondV_untested.isUndef()) { 1824 //ExplodedNode* N = builder.generateDefaultCaseNode(state, true); 1825 // FIXME: add checker 1826 //UndefBranches.insert(N); 1827 1828 return; 1829 } 1830 DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>(); 1831 1832 ProgramStateRef DefaultSt = state; 1833 1834 iterator I = builder.begin(), EI = builder.end(); 1835 bool defaultIsFeasible = I == EI; 1836 1837 for ( ; I != EI; ++I) { 1838 // Successor may be pruned out during CFG construction. 1839 if (!I.getBlock()) 1840 continue; 1841 1842 const CaseStmt *Case = I.getCase(); 1843 1844 // Evaluate the LHS of the case value. 1845 llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext()); 1846 assert(V1.getBitWidth() == getContext().getTypeSize(CondE->getType())); 1847 1848 // Get the RHS of the case, if it exists. 1849 llvm::APSInt V2; 1850 if (const Expr *E = Case->getRHS()) 1851 V2 = E->EvaluateKnownConstInt(getContext()); 1852 else 1853 V2 = V1; 1854 1855 ProgramStateRef StateCase; 1856 if (Optional<NonLoc> NL = CondV.getAs<NonLoc>()) 1857 std::tie(StateCase, DefaultSt) = 1858 DefaultSt->assumeInclusiveRange(*NL, V1, V2); 1859 else // UnknownVal 1860 StateCase = DefaultSt; 1861 1862 if (StateCase) 1863 builder.generateCaseStmtNode(I, StateCase); 1864 1865 // Now "assume" that the case doesn't match. Add this state 1866 // to the default state (if it is feasible). 1867 if (DefaultSt) 1868 defaultIsFeasible = true; 1869 else { 1870 defaultIsFeasible = false; 1871 break; 1872 } 1873 } 1874 1875 if (!defaultIsFeasible) 1876 return; 1877 1878 // If we have switch(enum value), the default branch is not 1879 // feasible if all of the enum constants not covered by 'case:' statements 1880 // are not feasible values for the switch condition. 1881 // 1882 // Note that this isn't as accurate as it could be. Even if there isn't 1883 // a case for a particular enum value as long as that enum value isn't 1884 // feasible then it shouldn't be considered for making 'default:' reachable. 1885 const SwitchStmt *SS = builder.getSwitch(); 1886 const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts(); 1887 if (CondExpr->getType()->getAs<EnumType>()) { 1888 if (SS->isAllEnumCasesCovered()) 1889 return; 1890 } 1891 1892 builder.generateDefaultCaseNode(DefaultSt); 1893 } 1894 1895 //===----------------------------------------------------------------------===// 1896 // Transfer functions: Loads and stores. 1897 //===----------------------------------------------------------------------===// 1898 1899 void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D, 1900 ExplodedNode *Pred, 1901 ExplodedNodeSet &Dst) { 1902 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1903 1904 ProgramStateRef state = Pred->getState(); 1905 const LocationContext *LCtx = Pred->getLocationContext(); 1906 1907 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) { 1908 // C permits "extern void v", and if you cast the address to a valid type, 1909 // you can even do things with it. We simply pretend 1910 assert(Ex->isGLValue() || VD->getType()->isVoidType()); 1911 const LocationContext *LocCtxt = Pred->getLocationContext(); 1912 const Decl *D = LocCtxt->getDecl(); 1913 const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr; 1914 const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex); 1915 SVal V; 1916 bool IsReference; 1917 if (AMgr.options.shouldInlineLambdas() && DeclRefEx && 1918 DeclRefEx->refersToEnclosingVariableOrCapture() && MD && 1919 MD->getParent()->isLambda()) { 1920 // Lookup the field of the lambda. 1921 const CXXRecordDecl *CXXRec = MD->getParent(); 1922 llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields; 1923 FieldDecl *LambdaThisCaptureField; 1924 CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField); 1925 const FieldDecl *FD = LambdaCaptureFields[VD]; 1926 if (!FD) { 1927 // When a constant is captured, sometimes no corresponding field is 1928 // created in the lambda object. 1929 assert(VD->getType().isConstQualified()); 1930 V = state->getLValue(VD, LocCtxt); 1931 IsReference = false; 1932 } else { 1933 Loc CXXThis = 1934 svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame()); 1935 SVal CXXThisVal = state->getSVal(CXXThis); 1936 V = state->getLValue(FD, CXXThisVal); 1937 IsReference = FD->getType()->isReferenceType(); 1938 } 1939 } else { 1940 V = state->getLValue(VD, LocCtxt); 1941 IsReference = VD->getType()->isReferenceType(); 1942 } 1943 1944 // For references, the 'lvalue' is the pointer address stored in the 1945 // reference region. 1946 if (IsReference) { 1947 if (const MemRegion *R = V.getAsRegion()) 1948 V = state->getSVal(R); 1949 else 1950 V = UnknownVal(); 1951 } 1952 1953 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1954 ProgramPoint::PostLValueKind); 1955 return; 1956 } 1957 if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) { 1958 assert(!Ex->isGLValue()); 1959 SVal V = svalBuilder.makeIntVal(ED->getInitVal()); 1960 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V)); 1961 return; 1962 } 1963 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 1964 SVal V = svalBuilder.getFunctionPointer(FD); 1965 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1966 ProgramPoint::PostLValueKind); 1967 return; 1968 } 1969 if (isa<FieldDecl>(D)) { 1970 // FIXME: Compute lvalue of field pointers-to-member. 1971 // Right now we just use a non-null void pointer, so that it gives proper 1972 // results in boolean contexts. 1973 SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy, 1974 currBldrCtx->blockCount()); 1975 state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true); 1976 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1977 ProgramPoint::PostLValueKind); 1978 return; 1979 } 1980 1981 llvm_unreachable("Support for this Decl not implemented."); 1982 } 1983 1984 /// VisitArraySubscriptExpr - Transfer function for array accesses 1985 void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A, 1986 ExplodedNode *Pred, 1987 ExplodedNodeSet &Dst){ 1988 1989 const Expr *Base = A->getBase()->IgnoreParens(); 1990 const Expr *Idx = A->getIdx()->IgnoreParens(); 1991 1992 ExplodedNodeSet CheckerPreStmt; 1993 getCheckerManager().runCheckersForPreStmt(CheckerPreStmt, Pred, A, *this); 1994 1995 ExplodedNodeSet EvalSet; 1996 StmtNodeBuilder Bldr(CheckerPreStmt, EvalSet, *currBldrCtx); 1997 assert(A->isGLValue() || 1998 (!AMgr.getLangOpts().CPlusPlus && 1999 A->getType().isCForbiddenLValueType())); 2000 2001 for (auto *Node : CheckerPreStmt) { 2002 const LocationContext *LCtx = Node->getLocationContext(); 2003 ProgramStateRef state = Node->getState(); 2004 SVal V = state->getLValue(A->getType(), 2005 state->getSVal(Idx, LCtx), 2006 state->getSVal(Base, LCtx)); 2007 Bldr.generateNode(A, Node, state->BindExpr(A, LCtx, V), nullptr, 2008 ProgramPoint::PostLValueKind); 2009 } 2010 2011 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, A, *this); 2012 } 2013 2014 /// VisitMemberExpr - Transfer function for member expressions. 2015 void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred, 2016 ExplodedNodeSet &Dst) { 2017 2018 // FIXME: Prechecks eventually go in ::Visit(). 2019 ExplodedNodeSet CheckedSet; 2020 getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this); 2021 2022 ExplodedNodeSet EvalSet; 2023 ValueDecl *Member = M->getMemberDecl(); 2024 2025 // Handle static member variables and enum constants accessed via 2026 // member syntax. 2027 if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) { 2028 ExplodedNodeSet Dst; 2029 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2030 I != E; ++I) { 2031 VisitCommonDeclRefExpr(M, Member, Pred, EvalSet); 2032 } 2033 } else { 2034 StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); 2035 ExplodedNodeSet Tmp; 2036 2037 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2038 I != E; ++I) { 2039 ProgramStateRef state = (*I)->getState(); 2040 const LocationContext *LCtx = (*I)->getLocationContext(); 2041 Expr *BaseExpr = M->getBase(); 2042 2043 // Handle C++ method calls. 2044 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) { 2045 if (MD->isInstance()) 2046 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2047 2048 SVal MDVal = svalBuilder.getFunctionPointer(MD); 2049 state = state->BindExpr(M, LCtx, MDVal); 2050 2051 Bldr.generateNode(M, *I, state); 2052 continue; 2053 } 2054 2055 // Handle regular struct fields / member variables. 2056 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2057 SVal baseExprVal = state->getSVal(BaseExpr, LCtx); 2058 2059 FieldDecl *field = cast<FieldDecl>(Member); 2060 SVal L = state->getLValue(field, baseExprVal); 2061 2062 if (M->isGLValue() || M->getType()->isArrayType()) { 2063 // We special-case rvalues of array type because the analyzer cannot 2064 // reason about them, since we expect all regions to be wrapped in Locs. 2065 // We instead treat these as lvalues and assume that they will decay to 2066 // pointers as soon as they are used. 2067 if (!M->isGLValue()) { 2068 assert(M->getType()->isArrayType()); 2069 const ImplicitCastExpr *PE = 2070 dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParentIgnoreParens(M)); 2071 if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) { 2072 llvm_unreachable("should always be wrapped in ArrayToPointerDecay"); 2073 } 2074 } 2075 2076 if (field->getType()->isReferenceType()) { 2077 if (const MemRegion *R = L.getAsRegion()) 2078 L = state->getSVal(R); 2079 else 2080 L = UnknownVal(); 2081 } 2082 2083 Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr, 2084 ProgramPoint::PostLValueKind); 2085 } else { 2086 Bldr.takeNodes(*I); 2087 evalLoad(Tmp, M, M, *I, state, L); 2088 Bldr.addNodes(Tmp); 2089 } 2090 } 2091 } 2092 2093 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this); 2094 } 2095 2096 void ExprEngine::VisitAtomicExpr(const AtomicExpr *AE, ExplodedNode *Pred, 2097 ExplodedNodeSet &Dst) { 2098 ExplodedNodeSet AfterPreSet; 2099 getCheckerManager().runCheckersForPreStmt(AfterPreSet, Pred, AE, *this); 2100 2101 // For now, treat all the arguments to C11 atomics as escaping. 2102 // FIXME: Ideally we should model the behavior of the atomics precisely here. 2103 2104 ExplodedNodeSet AfterInvalidateSet; 2105 StmtNodeBuilder Bldr(AfterPreSet, AfterInvalidateSet, *currBldrCtx); 2106 2107 for (ExplodedNodeSet::iterator I = AfterPreSet.begin(), E = AfterPreSet.end(); 2108 I != E; ++I) { 2109 ProgramStateRef State = (*I)->getState(); 2110 const LocationContext *LCtx = (*I)->getLocationContext(); 2111 2112 SmallVector<SVal, 8> ValuesToInvalidate; 2113 for (unsigned SI = 0, Count = AE->getNumSubExprs(); SI != Count; SI++) { 2114 const Expr *SubExpr = AE->getSubExprs()[SI]; 2115 SVal SubExprVal = State->getSVal(SubExpr, LCtx); 2116 ValuesToInvalidate.push_back(SubExprVal); 2117 } 2118 2119 State = State->invalidateRegions(ValuesToInvalidate, AE, 2120 currBldrCtx->blockCount(), 2121 LCtx, 2122 /*CausedByPointerEscape*/true, 2123 /*Symbols=*/nullptr); 2124 2125 SVal ResultVal = UnknownVal(); 2126 State = State->BindExpr(AE, LCtx, ResultVal); 2127 Bldr.generateNode(AE, *I, State, nullptr, 2128 ProgramPoint::PostStmtKind); 2129 } 2130 2131 getCheckerManager().runCheckersForPostStmt(Dst, AfterInvalidateSet, AE, *this); 2132 } 2133 2134 namespace { 2135 class CollectReachableSymbolsCallback final : public SymbolVisitor { 2136 InvalidatedSymbols Symbols; 2137 2138 public: 2139 CollectReachableSymbolsCallback(ProgramStateRef State) {} 2140 const InvalidatedSymbols &getSymbols() const { return Symbols; } 2141 2142 bool VisitSymbol(SymbolRef Sym) override { 2143 Symbols.insert(Sym); 2144 return true; 2145 } 2146 }; 2147 } // end anonymous namespace 2148 2149 // A value escapes in three possible cases: 2150 // (1) We are binding to something that is not a memory region. 2151 // (2) We are binding to a MemrRegion that does not have stack storage. 2152 // (3) We are binding to a MemRegion with stack storage that the store 2153 // does not understand. 2154 ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State, 2155 SVal Loc, SVal Val) { 2156 // Are we storing to something that causes the value to "escape"? 2157 bool escapes = true; 2158 2159 // TODO: Move to StoreManager. 2160 if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) { 2161 escapes = !regionLoc->getRegion()->hasStackStorage(); 2162 2163 if (!escapes) { 2164 // To test (3), generate a new state with the binding added. If it is 2165 // the same state, then it escapes (since the store cannot represent 2166 // the binding). 2167 // Do this only if we know that the store is not supposed to generate the 2168 // same state. 2169 SVal StoredVal = State->getSVal(regionLoc->getRegion()); 2170 if (StoredVal != Val) 2171 escapes = (State == (State->bindLoc(*regionLoc, Val))); 2172 } 2173 } 2174 2175 // If our store can represent the binding and we aren't storing to something 2176 // that doesn't have local storage then just return and have the simulation 2177 // state continue as is. 2178 if (!escapes) 2179 return State; 2180 2181 // Otherwise, find all symbols referenced by 'val' that we are tracking 2182 // and stop tracking them. 2183 CollectReachableSymbolsCallback Scanner = 2184 State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val); 2185 const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols(); 2186 State = getCheckerManager().runCheckersForPointerEscape(State, 2187 EscapedSymbols, 2188 /*CallEvent*/ nullptr, 2189 PSK_EscapeOnBind, 2190 nullptr); 2191 2192 return State; 2193 } 2194 2195 ProgramStateRef 2196 ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State, 2197 const InvalidatedSymbols *Invalidated, 2198 ArrayRef<const MemRegion *> ExplicitRegions, 2199 ArrayRef<const MemRegion *> Regions, 2200 const CallEvent *Call, 2201 RegionAndSymbolInvalidationTraits &ITraits) { 2202 2203 if (!Invalidated || Invalidated->empty()) 2204 return State; 2205 2206 if (!Call) 2207 return getCheckerManager().runCheckersForPointerEscape(State, 2208 *Invalidated, 2209 nullptr, 2210 PSK_EscapeOther, 2211 &ITraits); 2212 2213 // If the symbols were invalidated by a call, we want to find out which ones 2214 // were invalidated directly due to being arguments to the call. 2215 InvalidatedSymbols SymbolsDirectlyInvalidated; 2216 for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(), 2217 E = ExplicitRegions.end(); I != E; ++I) { 2218 if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>()) 2219 SymbolsDirectlyInvalidated.insert(R->getSymbol()); 2220 } 2221 2222 InvalidatedSymbols SymbolsIndirectlyInvalidated; 2223 for (InvalidatedSymbols::const_iterator I=Invalidated->begin(), 2224 E = Invalidated->end(); I!=E; ++I) { 2225 SymbolRef sym = *I; 2226 if (SymbolsDirectlyInvalidated.count(sym)) 2227 continue; 2228 SymbolsIndirectlyInvalidated.insert(sym); 2229 } 2230 2231 if (!SymbolsDirectlyInvalidated.empty()) 2232 State = getCheckerManager().runCheckersForPointerEscape(State, 2233 SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits); 2234 2235 // Notify about the symbols that get indirectly invalidated by the call. 2236 if (!SymbolsIndirectlyInvalidated.empty()) 2237 State = getCheckerManager().runCheckersForPointerEscape(State, 2238 SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits); 2239 2240 return State; 2241 } 2242 2243 /// evalBind - Handle the semantics of binding a value to a specific location. 2244 /// This method is used by evalStore and (soon) VisitDeclStmt, and others. 2245 void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE, 2246 ExplodedNode *Pred, 2247 SVal location, SVal Val, 2248 bool atDeclInit, const ProgramPoint *PP) { 2249 2250 const LocationContext *LC = Pred->getLocationContext(); 2251 PostStmt PS(StoreE, LC); 2252 if (!PP) 2253 PP = &PS; 2254 2255 // Do a previsit of the bind. 2256 ExplodedNodeSet CheckedSet; 2257 getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val, 2258 StoreE, *this, *PP); 2259 2260 StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx); 2261 2262 // If the location is not a 'Loc', it will already be handled by 2263 // the checkers. There is nothing left to do. 2264 if (!location.getAs<Loc>()) { 2265 const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr, 2266 /*tag*/nullptr); 2267 ProgramStateRef state = Pred->getState(); 2268 state = processPointerEscapedOnBind(state, location, Val); 2269 Bldr.generateNode(L, state, Pred); 2270 return; 2271 } 2272 2273 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2274 I!=E; ++I) { 2275 ExplodedNode *PredI = *I; 2276 ProgramStateRef state = PredI->getState(); 2277 2278 state = processPointerEscapedOnBind(state, location, Val); 2279 2280 // When binding the value, pass on the hint that this is a initialization. 2281 // For initializations, we do not need to inform clients of region 2282 // changes. 2283 state = state->bindLoc(location.castAs<Loc>(), 2284 Val, /* notifyChanges = */ !atDeclInit); 2285 2286 const MemRegion *LocReg = nullptr; 2287 if (Optional<loc::MemRegionVal> LocRegVal = 2288 location.getAs<loc::MemRegionVal>()) { 2289 LocReg = LocRegVal->getRegion(); 2290 } 2291 2292 const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr); 2293 Bldr.generateNode(L, state, PredI); 2294 } 2295 } 2296 2297 /// evalStore - Handle the semantics of a store via an assignment. 2298 /// @param Dst The node set to store generated state nodes 2299 /// @param AssignE The assignment expression if the store happens in an 2300 /// assignment. 2301 /// @param LocationE The location expression that is stored to. 2302 /// @param state The current simulation state 2303 /// @param location The location to store the value 2304 /// @param Val The value to be stored 2305 void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE, 2306 const Expr *LocationE, 2307 ExplodedNode *Pred, 2308 ProgramStateRef state, SVal location, SVal Val, 2309 const ProgramPointTag *tag) { 2310 // Proceed with the store. We use AssignE as the anchor for the PostStore 2311 // ProgramPoint if it is non-NULL, and LocationE otherwise. 2312 const Expr *StoreE = AssignE ? AssignE : LocationE; 2313 2314 // Evaluate the location (checks for bad dereferences). 2315 ExplodedNodeSet Tmp; 2316 evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false); 2317 2318 if (Tmp.empty()) 2319 return; 2320 2321 if (location.isUndef()) 2322 return; 2323 2324 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) 2325 evalBind(Dst, StoreE, *NI, location, Val, false); 2326 } 2327 2328 void ExprEngine::evalLoad(ExplodedNodeSet &Dst, 2329 const Expr *NodeEx, 2330 const Expr *BoundEx, 2331 ExplodedNode *Pred, 2332 ProgramStateRef state, 2333 SVal location, 2334 const ProgramPointTag *tag, 2335 QualType LoadTy) 2336 { 2337 assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc."); 2338 2339 // Are we loading from a region? This actually results in two loads; one 2340 // to fetch the address of the referenced value and one to fetch the 2341 // referenced value. 2342 if (const TypedValueRegion *TR = 2343 dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) { 2344 2345 QualType ValTy = TR->getValueType(); 2346 if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) { 2347 static SimpleProgramPointTag 2348 loadReferenceTag(TagProviderName, "Load Reference"); 2349 ExplodedNodeSet Tmp; 2350 evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state, 2351 location, &loadReferenceTag, 2352 getContext().getPointerType(RT->getPointeeType())); 2353 2354 // Perform the load from the referenced value. 2355 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) { 2356 state = (*I)->getState(); 2357 location = state->getSVal(BoundEx, (*I)->getLocationContext()); 2358 evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy); 2359 } 2360 return; 2361 } 2362 } 2363 2364 evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy); 2365 } 2366 2367 void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst, 2368 const Expr *NodeEx, 2369 const Expr *BoundEx, 2370 ExplodedNode *Pred, 2371 ProgramStateRef state, 2372 SVal location, 2373 const ProgramPointTag *tag, 2374 QualType LoadTy) { 2375 assert(NodeEx); 2376 assert(BoundEx); 2377 // Evaluate the location (checks for bad dereferences). 2378 ExplodedNodeSet Tmp; 2379 evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true); 2380 if (Tmp.empty()) 2381 return; 2382 2383 StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 2384 if (location.isUndef()) 2385 return; 2386 2387 // Proceed with the load. 2388 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) { 2389 state = (*NI)->getState(); 2390 const LocationContext *LCtx = (*NI)->getLocationContext(); 2391 2392 SVal V = UnknownVal(); 2393 if (location.isValid()) { 2394 if (LoadTy.isNull()) 2395 LoadTy = BoundEx->getType(); 2396 V = state->getSVal(location.castAs<Loc>(), LoadTy); 2397 } 2398 2399 Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag, 2400 ProgramPoint::PostLoadKind); 2401 } 2402 } 2403 2404 void ExprEngine::evalLocation(ExplodedNodeSet &Dst, 2405 const Stmt *NodeEx, 2406 const Stmt *BoundEx, 2407 ExplodedNode *Pred, 2408 ProgramStateRef state, 2409 SVal location, 2410 const ProgramPointTag *tag, 2411 bool isLoad) { 2412 StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx); 2413 // Early checks for performance reason. 2414 if (location.isUnknown()) { 2415 return; 2416 } 2417 2418 ExplodedNodeSet Src; 2419 BldrTop.takeNodes(Pred); 2420 StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx); 2421 if (Pred->getState() != state) { 2422 // Associate this new state with an ExplodedNode. 2423 // FIXME: If I pass null tag, the graph is incorrect, e.g for 2424 // int *p; 2425 // p = 0; 2426 // *p = 0xDEADBEEF; 2427 // "p = 0" is not noted as "Null pointer value stored to 'p'" but 2428 // instead "int *p" is noted as 2429 // "Variable 'p' initialized to a null pointer value" 2430 2431 static SimpleProgramPointTag tag(TagProviderName, "Location"); 2432 Bldr.generateNode(NodeEx, Pred, state, &tag); 2433 } 2434 ExplodedNodeSet Tmp; 2435 getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad, 2436 NodeEx, BoundEx, *this); 2437 BldrTop.addNodes(Tmp); 2438 } 2439 2440 std::pair<const ProgramPointTag *, const ProgramPointTag*> 2441 ExprEngine::geteagerlyAssumeBinOpBifurcationTags() { 2442 static SimpleProgramPointTag 2443 eagerlyAssumeBinOpBifurcationTrue(TagProviderName, 2444 "Eagerly Assume True"), 2445 eagerlyAssumeBinOpBifurcationFalse(TagProviderName, 2446 "Eagerly Assume False"); 2447 return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue, 2448 &eagerlyAssumeBinOpBifurcationFalse); 2449 } 2450 2451 void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst, 2452 ExplodedNodeSet &Src, 2453 const Expr *Ex) { 2454 StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx); 2455 2456 for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) { 2457 ExplodedNode *Pred = *I; 2458 // Test if the previous node was as the same expression. This can happen 2459 // when the expression fails to evaluate to anything meaningful and 2460 // (as an optimization) we don't generate a node. 2461 ProgramPoint P = Pred->getLocation(); 2462 if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) { 2463 continue; 2464 } 2465 2466 ProgramStateRef state = Pred->getState(); 2467 SVal V = state->getSVal(Ex, Pred->getLocationContext()); 2468 Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>(); 2469 if (SEV && SEV->isExpression()) { 2470 const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags = 2471 geteagerlyAssumeBinOpBifurcationTags(); 2472 2473 ProgramStateRef StateTrue, StateFalse; 2474 std::tie(StateTrue, StateFalse) = state->assume(*SEV); 2475 2476 // First assume that the condition is true. 2477 if (StateTrue) { 2478 SVal Val = svalBuilder.makeIntVal(1U, Ex->getType()); 2479 StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val); 2480 Bldr.generateNode(Ex, Pred, StateTrue, tags.first); 2481 } 2482 2483 // Next, assume that the condition is false. 2484 if (StateFalse) { 2485 SVal Val = svalBuilder.makeIntVal(0U, Ex->getType()); 2486 StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val); 2487 Bldr.generateNode(Ex, Pred, StateFalse, tags.second); 2488 } 2489 } 2490 } 2491 } 2492 2493 void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred, 2494 ExplodedNodeSet &Dst) { 2495 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2496 // We have processed both the inputs and the outputs. All of the outputs 2497 // should evaluate to Locs. Nuke all of their values. 2498 2499 // FIXME: Some day in the future it would be nice to allow a "plug-in" 2500 // which interprets the inline asm and stores proper results in the 2501 // outputs. 2502 2503 ProgramStateRef state = Pred->getState(); 2504 2505 for (const Expr *O : A->outputs()) { 2506 SVal X = state->getSVal(O, Pred->getLocationContext()); 2507 assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef. 2508 2509 if (Optional<Loc> LV = X.getAs<Loc>()) 2510 state = state->bindLoc(*LV, UnknownVal()); 2511 } 2512 2513 Bldr.generateNode(A, Pred, state); 2514 } 2515 2516 void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred, 2517 ExplodedNodeSet &Dst) { 2518 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2519 Bldr.generateNode(A, Pred, Pred->getState()); 2520 } 2521 2522 //===----------------------------------------------------------------------===// 2523 // Visualization. 2524 //===----------------------------------------------------------------------===// 2525 2526 #ifndef NDEBUG 2527 static ExprEngine* GraphPrintCheckerState; 2528 static SourceManager* GraphPrintSourceManager; 2529 2530 namespace llvm { 2531 template<> 2532 struct DOTGraphTraits<ExplodedNode*> : 2533 public DefaultDOTGraphTraits { 2534 2535 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {} 2536 2537 // FIXME: Since we do not cache error nodes in ExprEngine now, this does not 2538 // work. 2539 static std::string getNodeAttributes(const ExplodedNode *N, void*) { 2540 return ""; 2541 } 2542 2543 // De-duplicate some source location pretty-printing. 2544 static void printLocation(raw_ostream &Out, SourceLocation SLoc) { 2545 if (SLoc.isFileID()) { 2546 Out << "\\lline=" 2547 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2548 << " col=" 2549 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc) 2550 << "\\l"; 2551 } 2552 } 2553 static void printLocation2(raw_ostream &Out, SourceLocation SLoc) { 2554 if (SLoc.isFileID() && GraphPrintSourceManager->isInMainFile(SLoc)) 2555 Out << "line " << GraphPrintSourceManager->getExpansionLineNumber(SLoc); 2556 else 2557 SLoc.print(Out, *GraphPrintSourceManager); 2558 } 2559 2560 static std::string getNodeLabel(const ExplodedNode *N, void*){ 2561 2562 std::string sbuf; 2563 llvm::raw_string_ostream Out(sbuf); 2564 2565 // Program Location. 2566 ProgramPoint Loc = N->getLocation(); 2567 2568 switch (Loc.getKind()) { 2569 case ProgramPoint::BlockEntranceKind: { 2570 Out << "Block Entrance: B" 2571 << Loc.castAs<BlockEntrance>().getBlock()->getBlockID(); 2572 break; 2573 } 2574 2575 case ProgramPoint::BlockExitKind: 2576 assert (false); 2577 break; 2578 2579 case ProgramPoint::CallEnterKind: 2580 Out << "CallEnter"; 2581 break; 2582 2583 case ProgramPoint::CallExitBeginKind: 2584 Out << "CallExitBegin"; 2585 break; 2586 2587 case ProgramPoint::CallExitEndKind: 2588 Out << "CallExitEnd"; 2589 break; 2590 2591 case ProgramPoint::PostStmtPurgeDeadSymbolsKind: 2592 Out << "PostStmtPurgeDeadSymbols"; 2593 break; 2594 2595 case ProgramPoint::PreStmtPurgeDeadSymbolsKind: 2596 Out << "PreStmtPurgeDeadSymbols"; 2597 break; 2598 2599 case ProgramPoint::EpsilonKind: 2600 Out << "Epsilon Point"; 2601 break; 2602 2603 case ProgramPoint::PreImplicitCallKind: { 2604 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2605 Out << "PreCall: "; 2606 2607 // FIXME: Get proper printing options. 2608 PC.getDecl()->print(Out, LangOptions()); 2609 printLocation(Out, PC.getLocation()); 2610 break; 2611 } 2612 2613 case ProgramPoint::PostImplicitCallKind: { 2614 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2615 Out << "PostCall: "; 2616 2617 // FIXME: Get proper printing options. 2618 PC.getDecl()->print(Out, LangOptions()); 2619 printLocation(Out, PC.getLocation()); 2620 break; 2621 } 2622 2623 case ProgramPoint::PostInitializerKind: { 2624 Out << "PostInitializer: "; 2625 const CXXCtorInitializer *Init = 2626 Loc.castAs<PostInitializer>().getInitializer(); 2627 if (const FieldDecl *FD = Init->getAnyMember()) 2628 Out << *FD; 2629 else { 2630 QualType Ty = Init->getTypeSourceInfo()->getType(); 2631 Ty = Ty.getLocalUnqualifiedType(); 2632 LangOptions LO; // FIXME. 2633 Ty.print(Out, LO); 2634 } 2635 break; 2636 } 2637 2638 case ProgramPoint::BlockEdgeKind: { 2639 const BlockEdge &E = Loc.castAs<BlockEdge>(); 2640 Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B" 2641 << E.getDst()->getBlockID() << ')'; 2642 2643 if (const Stmt *T = E.getSrc()->getTerminator()) { 2644 SourceLocation SLoc = T->getLocStart(); 2645 2646 Out << "\\|Terminator: "; 2647 LangOptions LO; // FIXME. 2648 E.getSrc()->printTerminator(Out, LO); 2649 2650 if (SLoc.isFileID()) { 2651 Out << "\\lline=" 2652 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2653 << " col=" 2654 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc); 2655 } 2656 2657 if (isa<SwitchStmt>(T)) { 2658 const Stmt *Label = E.getDst()->getLabel(); 2659 2660 if (Label) { 2661 if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) { 2662 Out << "\\lcase "; 2663 LangOptions LO; // FIXME. 2664 if (C->getLHS()) 2665 C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO)); 2666 2667 if (const Stmt *RHS = C->getRHS()) { 2668 Out << " .. "; 2669 RHS->printPretty(Out, nullptr, PrintingPolicy(LO)); 2670 } 2671 2672 Out << ":"; 2673 } 2674 else { 2675 assert (isa<DefaultStmt>(Label)); 2676 Out << "\\ldefault:"; 2677 } 2678 } 2679 else 2680 Out << "\\l(implicit) default:"; 2681 } 2682 else if (isa<IndirectGotoStmt>(T)) { 2683 // FIXME 2684 } 2685 else { 2686 Out << "\\lCondition: "; 2687 if (*E.getSrc()->succ_begin() == E.getDst()) 2688 Out << "true"; 2689 else 2690 Out << "false"; 2691 } 2692 2693 Out << "\\l"; 2694 } 2695 2696 break; 2697 } 2698 2699 default: { 2700 const Stmt *S = Loc.castAs<StmtPoint>().getStmt(); 2701 assert(S != nullptr && "Expecting non-null Stmt"); 2702 2703 Out << S->getStmtClassName() << ' ' << (const void*) S << ' '; 2704 LangOptions LO; // FIXME. 2705 S->printPretty(Out, nullptr, PrintingPolicy(LO)); 2706 printLocation(Out, S->getLocStart()); 2707 2708 if (Loc.getAs<PreStmt>()) 2709 Out << "\\lPreStmt\\l;"; 2710 else if (Loc.getAs<PostLoad>()) 2711 Out << "\\lPostLoad\\l;"; 2712 else if (Loc.getAs<PostStore>()) 2713 Out << "\\lPostStore\\l"; 2714 else if (Loc.getAs<PostLValue>()) 2715 Out << "\\lPostLValue\\l"; 2716 2717 break; 2718 } 2719 } 2720 2721 ProgramStateRef state = N->getState(); 2722 Out << "\\|StateID: " << (const void*) state.get() 2723 << " NodeID: " << (const void*) N << "\\|"; 2724 2725 // Analysis stack backtrace. 2726 Out << "Location context stack (from current to outer):\\l"; 2727 const LocationContext *LC = Loc.getLocationContext(); 2728 unsigned Idx = 0; 2729 for (; LC; LC = LC->getParent(), ++Idx) { 2730 Out << Idx << ". (" << (const void *)LC << ") "; 2731 switch (LC->getKind()) { 2732 case LocationContext::StackFrame: 2733 if (const NamedDecl *D = dyn_cast<NamedDecl>(LC->getDecl())) 2734 Out << "Calling " << D->getQualifiedNameAsString(); 2735 else 2736 Out << "Calling anonymous code"; 2737 if (const Stmt *S = cast<StackFrameContext>(LC)->getCallSite()) { 2738 Out << " at "; 2739 printLocation2(Out, S->getLocStart()); 2740 } 2741 break; 2742 case LocationContext::Block: 2743 Out << "Invoking block"; 2744 if (const Decl *D = cast<BlockInvocationContext>(LC)->getBlockDecl()) { 2745 Out << " defined at "; 2746 printLocation2(Out, D->getLocStart()); 2747 } 2748 break; 2749 case LocationContext::Scope: 2750 Out << "Entering scope"; 2751 // FIXME: Add more info once ScopeContext is activated. 2752 break; 2753 } 2754 Out << "\\l"; 2755 } 2756 Out << "\\l"; 2757 2758 state->printDOT(Out); 2759 2760 Out << "\\l"; 2761 2762 if (const ProgramPointTag *tag = Loc.getTag()) { 2763 Out << "\\|Tag: " << tag->getTagDescription(); 2764 Out << "\\l"; 2765 } 2766 return Out.str(); 2767 } 2768 }; 2769 } // end llvm namespace 2770 #endif 2771 2772 void ExprEngine::ViewGraph(bool trim) { 2773 #ifndef NDEBUG 2774 if (trim) { 2775 std::vector<const ExplodedNode*> Src; 2776 2777 // Flush any outstanding reports to make sure we cover all the nodes. 2778 // This does not cause them to get displayed. 2779 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I) 2780 const_cast<BugType*>(*I)->FlushReports(BR); 2781 2782 // Iterate through the reports and get their nodes. 2783 for (BugReporter::EQClasses_iterator 2784 EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) { 2785 ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode()); 2786 if (N) Src.push_back(N); 2787 } 2788 2789 ViewGraph(Src); 2790 } 2791 else { 2792 GraphPrintCheckerState = this; 2793 GraphPrintSourceManager = &getContext().getSourceManager(); 2794 2795 llvm::ViewGraph(*G.roots_begin(), "ExprEngine"); 2796 2797 GraphPrintCheckerState = nullptr; 2798 GraphPrintSourceManager = nullptr; 2799 } 2800 #endif 2801 } 2802 2803 void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) { 2804 #ifndef NDEBUG 2805 GraphPrintCheckerState = this; 2806 GraphPrintSourceManager = &getContext().getSourceManager(); 2807 2808 std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes)); 2809 2810 if (!TrimmedG.get()) 2811 llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n"; 2812 else 2813 llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine"); 2814 2815 GraphPrintCheckerState = nullptr; 2816 GraphPrintSourceManager = nullptr; 2817 #endif 2818 } 2819