1 //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-= 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a meta-engine for path-sensitive dataflow analysis that 11 // is built on GREngine, but provides the boilerplate to execute transfer 12 // functions and build the ExplodedGraph at the expression level. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 17 #include "PrettyStackTraceLocationContext.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/AST/ParentMap.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/AST/StmtObjC.h" 22 #include "clang/Basic/Builtins.h" 23 #include "clang/Basic/PrettyStackTrace.h" 24 #include "clang/Basic/SourceManager.h" 25 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 26 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/LoopWidening.h" 30 #include "llvm/ADT/Statistic.h" 31 #include "llvm/Support/SaveAndRestore.h" 32 #include "llvm/Support/raw_ostream.h" 33 34 #ifndef NDEBUG 35 #include "llvm/Support/GraphWriter.h" 36 #endif 37 38 using namespace clang; 39 using namespace ento; 40 using llvm::APSInt; 41 42 #define DEBUG_TYPE "ExprEngine" 43 44 STATISTIC(NumRemoveDeadBindings, 45 "The # of times RemoveDeadBindings is called"); 46 STATISTIC(NumMaxBlockCountReached, 47 "The # of aborted paths due to reaching the maximum block count in " 48 "a top level function"); 49 STATISTIC(NumMaxBlockCountReachedInInlined, 50 "The # of aborted paths due to reaching the maximum block count in " 51 "an inlined function"); 52 STATISTIC(NumTimesRetriedWithoutInlining, 53 "The # of times we re-evaluated a call without inlining"); 54 55 typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *> 56 CXXBindTemporaryContext; 57 58 // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated. 59 // The StackFrameContext assures that nested calls due to inlined recursive 60 // functions do not interfere. 61 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet, 62 llvm::ImmutableSet<CXXBindTemporaryContext>) 63 64 //===----------------------------------------------------------------------===// 65 // Engine construction and deletion. 66 //===----------------------------------------------------------------------===// 67 68 static const char* TagProviderName = "ExprEngine"; 69 70 ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled, 71 SetOfConstDecls *VisitedCalleesIn, 72 FunctionSummariesTy *FS, 73 InliningModes HowToInlineIn) 74 : AMgr(mgr), 75 AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()), 76 Engine(*this, FS), 77 G(Engine.getGraph()), 78 StateMgr(getContext(), mgr.getStoreManagerCreator(), 79 mgr.getConstraintManagerCreator(), G.getAllocator(), 80 this), 81 SymMgr(StateMgr.getSymbolManager()), 82 svalBuilder(StateMgr.getSValBuilder()), 83 currStmtIdx(0), currBldrCtx(nullptr), 84 ObjCNoRet(mgr.getASTContext()), 85 ObjCGCEnabled(gcEnabled), BR(mgr, *this), 86 VisitedCallees(VisitedCalleesIn), 87 HowToInline(HowToInlineIn) 88 { 89 unsigned TrimInterval = mgr.options.getGraphTrimInterval(); 90 if (TrimInterval != 0) { 91 // Enable eager node reclaimation when constructing the ExplodedGraph. 92 G.enableNodeReclamation(TrimInterval); 93 } 94 } 95 96 ExprEngine::~ExprEngine() { 97 BR.FlushReports(); 98 } 99 100 //===----------------------------------------------------------------------===// 101 // Utility methods. 102 //===----------------------------------------------------------------------===// 103 104 ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) { 105 ProgramStateRef state = StateMgr.getInitialState(InitLoc); 106 const Decl *D = InitLoc->getDecl(); 107 108 // Preconditions. 109 // FIXME: It would be nice if we had a more general mechanism to add 110 // such preconditions. Some day. 111 do { 112 113 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 114 // Precondition: the first argument of 'main' is an integer guaranteed 115 // to be > 0. 116 const IdentifierInfo *II = FD->getIdentifier(); 117 if (!II || !(II->getName() == "main" && FD->getNumParams() > 0)) 118 break; 119 120 const ParmVarDecl *PD = FD->getParamDecl(0); 121 QualType T = PD->getType(); 122 const BuiltinType *BT = dyn_cast<BuiltinType>(T); 123 if (!BT || !BT->isInteger()) 124 break; 125 126 const MemRegion *R = state->getRegion(PD, InitLoc); 127 if (!R) 128 break; 129 130 SVal V = state->getSVal(loc::MemRegionVal(R)); 131 SVal Constraint_untested = evalBinOp(state, BO_GT, V, 132 svalBuilder.makeZeroVal(T), 133 svalBuilder.getConditionType()); 134 135 Optional<DefinedOrUnknownSVal> Constraint = 136 Constraint_untested.getAs<DefinedOrUnknownSVal>(); 137 138 if (!Constraint) 139 break; 140 141 if (ProgramStateRef newState = state->assume(*Constraint, true)) 142 state = newState; 143 } 144 break; 145 } 146 while (0); 147 148 if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 149 // Precondition: 'self' is always non-null upon entry to an Objective-C 150 // method. 151 const ImplicitParamDecl *SelfD = MD->getSelfDecl(); 152 const MemRegion *R = state->getRegion(SelfD, InitLoc); 153 SVal V = state->getSVal(loc::MemRegionVal(R)); 154 155 if (Optional<Loc> LV = V.getAs<Loc>()) { 156 // Assume that the pointer value in 'self' is non-null. 157 state = state->assume(*LV, true); 158 assert(state && "'self' cannot be null"); 159 } 160 } 161 162 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) { 163 if (!MD->isStatic()) { 164 // Precondition: 'this' is always non-null upon entry to the 165 // top-level function. This is our starting assumption for 166 // analyzing an "open" program. 167 const StackFrameContext *SFC = InitLoc->getCurrentStackFrame(); 168 if (SFC->getParent() == nullptr) { 169 loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC); 170 SVal V = state->getSVal(L); 171 if (Optional<Loc> LV = V.getAs<Loc>()) { 172 state = state->assume(*LV, true); 173 assert(state && "'this' cannot be null"); 174 } 175 } 176 } 177 } 178 179 return state; 180 } 181 182 ProgramStateRef 183 ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State, 184 const LocationContext *LC, 185 const Expr *Ex, 186 const Expr *Result) { 187 SVal V = State->getSVal(Ex, LC); 188 if (!Result) { 189 // If we don't have an explicit result expression, we're in "if needed" 190 // mode. Only create a region if the current value is a NonLoc. 191 if (!V.getAs<NonLoc>()) 192 return State; 193 Result = Ex; 194 } else { 195 // We need to create a region no matter what. For sanity, make sure we don't 196 // try to stuff a Loc into a non-pointer temporary region. 197 assert(!V.getAs<Loc>() || Loc::isLocType(Result->getType()) || 198 Result->getType()->isMemberPointerType()); 199 } 200 201 ProgramStateManager &StateMgr = State->getStateManager(); 202 MemRegionManager &MRMgr = StateMgr.getRegionManager(); 203 StoreManager &StoreMgr = StateMgr.getStoreManager(); 204 205 // MaterializeTemporaryExpr may appear out of place, after a few field and 206 // base-class accesses have been made to the object, even though semantically 207 // it is the whole object that gets materialized and lifetime-extended. 208 // 209 // For example: 210 // 211 // `-MaterializeTemporaryExpr 212 // `-MemberExpr 213 // `-CXXTemporaryObjectExpr 214 // 215 // instead of the more natural 216 // 217 // `-MemberExpr 218 // `-MaterializeTemporaryExpr 219 // `-CXXTemporaryObjectExpr 220 // 221 // Use the usual methods for obtaining the expression of the base object, 222 // and record the adjustments that we need to make to obtain the sub-object 223 // that the whole expression 'Ex' refers to. This trick is usual, 224 // in the sense that CodeGen takes a similar route. 225 226 SmallVector<const Expr *, 2> CommaLHSs; 227 SmallVector<SubobjectAdjustment, 2> Adjustments; 228 229 const Expr *Init = Ex->skipRValueSubobjectAdjustments(CommaLHSs, Adjustments); 230 231 const TypedValueRegion *TR = nullptr; 232 if (const MaterializeTemporaryExpr *MT = 233 dyn_cast<MaterializeTemporaryExpr>(Result)) { 234 StorageDuration SD = MT->getStorageDuration(); 235 // If this object is bound to a reference with static storage duration, we 236 // put it in a different region to prevent "address leakage" warnings. 237 if (SD == SD_Static || SD == SD_Thread) 238 TR = MRMgr.getCXXStaticTempObjectRegion(Init); 239 } 240 if (!TR) 241 TR = MRMgr.getCXXTempObjectRegion(Init, LC); 242 243 SVal Reg = loc::MemRegionVal(TR); 244 245 // Make the necessary adjustments to obtain the sub-object. 246 for (auto I = Adjustments.rbegin(), E = Adjustments.rend(); I != E; ++I) { 247 const SubobjectAdjustment &Adj = *I; 248 switch (Adj.Kind) { 249 case SubobjectAdjustment::DerivedToBaseAdjustment: 250 Reg = StoreMgr.evalDerivedToBase(Reg, Adj.DerivedToBase.BasePath); 251 break; 252 case SubobjectAdjustment::FieldAdjustment: 253 Reg = StoreMgr.getLValueField(Adj.Field, Reg); 254 break; 255 case SubobjectAdjustment::MemberPointerAdjustment: 256 // FIXME: Unimplemented. 257 State->bindDefault(Reg, UnknownVal(), LC); 258 return State; 259 } 260 } 261 262 // Try to recover some path sensitivity in case we couldn't compute the value. 263 if (V.isUnknown()) 264 V = getSValBuilder().conjureSymbolVal(Result, LC, TR->getValueType(), 265 currBldrCtx->blockCount()); 266 // Bind the value of the expression to the sub-object region, and then bind 267 // the sub-object region to our expression. 268 State = State->bindLoc(Reg, V, LC); 269 State = State->BindExpr(Result, LC, Reg); 270 return State; 271 } 272 273 //===----------------------------------------------------------------------===// 274 // Top-level transfer function logic (Dispatcher). 275 //===----------------------------------------------------------------------===// 276 277 /// evalAssume - Called by ConstraintManager. Used to call checker-specific 278 /// logic for handling assumptions on symbolic values. 279 ProgramStateRef ExprEngine::processAssume(ProgramStateRef state, 280 SVal cond, bool assumption) { 281 return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption); 282 } 283 284 ProgramStateRef 285 ExprEngine::processRegionChanges(ProgramStateRef state, 286 const InvalidatedSymbols *invalidated, 287 ArrayRef<const MemRegion *> Explicits, 288 ArrayRef<const MemRegion *> Regions, 289 const LocationContext *LCtx, 290 const CallEvent *Call) { 291 return getCheckerManager().runCheckersForRegionChanges(state, invalidated, 292 Explicits, Regions, 293 LCtx, Call); 294 } 295 296 void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State, 297 const char *NL, const char *Sep) { 298 getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep); 299 } 300 301 void ExprEngine::processEndWorklist(bool hasWorkRemaining) { 302 getCheckerManager().runCheckersForEndAnalysis(G, BR, *this); 303 } 304 305 void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred, 306 unsigned StmtIdx, NodeBuilderContext *Ctx) { 307 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 308 currStmtIdx = StmtIdx; 309 currBldrCtx = Ctx; 310 311 switch (E.getKind()) { 312 case CFGElement::Statement: 313 ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred); 314 return; 315 case CFGElement::Initializer: 316 ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred); 317 return; 318 case CFGElement::NewAllocator: 319 ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(), 320 Pred); 321 return; 322 case CFGElement::AutomaticObjectDtor: 323 case CFGElement::DeleteDtor: 324 case CFGElement::BaseDtor: 325 case CFGElement::MemberDtor: 326 case CFGElement::TemporaryDtor: 327 ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred); 328 return; 329 } 330 } 331 332 static bool shouldRemoveDeadBindings(AnalysisManager &AMgr, 333 const CFGStmt S, 334 const ExplodedNode *Pred, 335 const LocationContext *LC) { 336 337 // Are we never purging state values? 338 if (AMgr.options.AnalysisPurgeOpt == PurgeNone) 339 return false; 340 341 // Is this the beginning of a basic block? 342 if (Pred->getLocation().getAs<BlockEntrance>()) 343 return true; 344 345 // Is this on a non-expression? 346 if (!isa<Expr>(S.getStmt())) 347 return true; 348 349 // Run before processing a call. 350 if (CallEvent::isCallStmt(S.getStmt())) 351 return true; 352 353 // Is this an expression that is consumed by another expression? If so, 354 // postpone cleaning out the state. 355 ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap(); 356 return !PM.isConsumedExpr(cast<Expr>(S.getStmt())); 357 } 358 359 void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out, 360 const Stmt *ReferenceStmt, 361 const LocationContext *LC, 362 const Stmt *DiagnosticStmt, 363 ProgramPoint::Kind K) { 364 assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind || 365 ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt)) 366 && "PostStmt is not generally supported by the SymbolReaper yet"); 367 assert(LC && "Must pass the current (or expiring) LocationContext"); 368 369 if (!DiagnosticStmt) { 370 DiagnosticStmt = ReferenceStmt; 371 assert(DiagnosticStmt && "Required for clearing a LocationContext"); 372 } 373 374 NumRemoveDeadBindings++; 375 ProgramStateRef CleanedState = Pred->getState(); 376 377 // LC is the location context being destroyed, but SymbolReaper wants a 378 // location context that is still live. (If this is the top-level stack 379 // frame, this will be null.) 380 if (!ReferenceStmt) { 381 assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind && 382 "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext"); 383 LC = LC->getParent(); 384 } 385 386 const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr; 387 SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager()); 388 389 getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper); 390 391 // Create a state in which dead bindings are removed from the environment 392 // and the store. TODO: The function should just return new env and store, 393 // not a new state. 394 CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper); 395 396 // Process any special transfer function for dead symbols. 397 // A tag to track convenience transitions, which can be removed at cleanup. 398 static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node"); 399 if (!SymReaper.hasDeadSymbols()) { 400 // Generate a CleanedNode that has the environment and store cleaned 401 // up. Since no symbols are dead, we can optimize and not clean out 402 // the constraint manager. 403 StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx); 404 Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K); 405 406 } else { 407 // Call checkers with the non-cleaned state so that they could query the 408 // values of the soon to be dead symbols. 409 ExplodedNodeSet CheckedSet; 410 getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper, 411 DiagnosticStmt, *this, K); 412 413 // For each node in CheckedSet, generate CleanedNodes that have the 414 // environment, the store, and the constraints cleaned up but have the 415 // user-supplied states as the predecessors. 416 StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx); 417 for (ExplodedNodeSet::const_iterator 418 I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { 419 ProgramStateRef CheckerState = (*I)->getState(); 420 421 // The constraint manager has not been cleaned up yet, so clean up now. 422 CheckerState = getConstraintManager().removeDeadBindings(CheckerState, 423 SymReaper); 424 425 assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) && 426 "Checkers are not allowed to modify the Environment as a part of " 427 "checkDeadSymbols processing."); 428 assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) && 429 "Checkers are not allowed to modify the Store as a part of " 430 "checkDeadSymbols processing."); 431 432 // Create a state based on CleanedState with CheckerState GDM and 433 // generate a transition to that state. 434 ProgramStateRef CleanedCheckerSt = 435 StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState); 436 Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K); 437 } 438 } 439 } 440 441 void ExprEngine::ProcessStmt(const CFGStmt S, 442 ExplodedNode *Pred) { 443 // Reclaim any unnecessary nodes in the ExplodedGraph. 444 G.reclaimRecentlyAllocatedNodes(); 445 446 const Stmt *currStmt = S.getStmt(); 447 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 448 currStmt->getLocStart(), 449 "Error evaluating statement"); 450 451 // Remove dead bindings and symbols. 452 ExplodedNodeSet CleanedStates; 453 if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){ 454 removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext()); 455 } else 456 CleanedStates.Add(Pred); 457 458 // Visit the statement. 459 ExplodedNodeSet Dst; 460 for (ExplodedNodeSet::iterator I = CleanedStates.begin(), 461 E = CleanedStates.end(); I != E; ++I) { 462 ExplodedNodeSet DstI; 463 // Visit the statement. 464 Visit(currStmt, *I, DstI); 465 Dst.insert(DstI); 466 } 467 468 // Enqueue the new nodes onto the work list. 469 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 470 } 471 472 void ExprEngine::ProcessInitializer(const CFGInitializer Init, 473 ExplodedNode *Pred) { 474 const CXXCtorInitializer *BMI = Init.getInitializer(); 475 476 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 477 BMI->getSourceLocation(), 478 "Error evaluating initializer"); 479 480 // We don't clean up dead bindings here. 481 const StackFrameContext *stackFrame = 482 cast<StackFrameContext>(Pred->getLocationContext()); 483 const CXXConstructorDecl *decl = 484 cast<CXXConstructorDecl>(stackFrame->getDecl()); 485 486 ProgramStateRef State = Pred->getState(); 487 SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame)); 488 489 ExplodedNodeSet Tmp(Pred); 490 SVal FieldLoc; 491 492 // Evaluate the initializer, if necessary 493 if (BMI->isAnyMemberInitializer()) { 494 // Constructors build the object directly in the field, 495 // but non-objects must be copied in from the initializer. 496 if (auto *CtorExpr = findDirectConstructorForCurrentCFGElement()) { 497 assert(BMI->getInit()->IgnoreImplicit() == CtorExpr); 498 (void)CtorExpr; 499 // The field was directly constructed, so there is no need to bind. 500 } else { 501 const Expr *Init = BMI->getInit()->IgnoreImplicit(); 502 const ValueDecl *Field; 503 if (BMI->isIndirectMemberInitializer()) { 504 Field = BMI->getIndirectMember(); 505 FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal); 506 } else { 507 Field = BMI->getMember(); 508 FieldLoc = State->getLValue(BMI->getMember(), thisVal); 509 } 510 511 SVal InitVal; 512 if (Init->getType()->isArrayType()) { 513 // Handle arrays of trivial type. We can represent this with a 514 // primitive load/copy from the base array region. 515 const ArraySubscriptExpr *ASE; 516 while ((ASE = dyn_cast<ArraySubscriptExpr>(Init))) 517 Init = ASE->getBase()->IgnoreImplicit(); 518 519 SVal LValue = State->getSVal(Init, stackFrame); 520 if (!Field->getType()->isReferenceType()) 521 if (Optional<Loc> LValueLoc = LValue.getAs<Loc>()) 522 InitVal = State->getSVal(*LValueLoc); 523 524 // If we fail to get the value for some reason, use a symbolic value. 525 if (InitVal.isUnknownOrUndef()) { 526 SValBuilder &SVB = getSValBuilder(); 527 InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame, 528 Field->getType(), 529 currBldrCtx->blockCount()); 530 } 531 } else { 532 InitVal = State->getSVal(BMI->getInit(), stackFrame); 533 } 534 535 assert(Tmp.size() == 1 && "have not generated any new nodes yet"); 536 assert(*Tmp.begin() == Pred && "have not generated any new nodes yet"); 537 Tmp.clear(); 538 539 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 540 evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP); 541 } 542 } else { 543 assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer()); 544 // We already did all the work when visiting the CXXConstructExpr. 545 } 546 547 // Construct PostInitializer nodes whether the state changed or not, 548 // so that the diagnostics don't get confused. 549 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 550 ExplodedNodeSet Dst; 551 NodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 552 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) { 553 ExplodedNode *N = *I; 554 Bldr.generateNode(PP, N->getState(), N); 555 } 556 557 // Enqueue the new nodes onto the work list. 558 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 559 } 560 561 void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D, 562 ExplodedNode *Pred) { 563 ExplodedNodeSet Dst; 564 switch (D.getKind()) { 565 case CFGElement::AutomaticObjectDtor: 566 ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst); 567 break; 568 case CFGElement::BaseDtor: 569 ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst); 570 break; 571 case CFGElement::MemberDtor: 572 ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst); 573 break; 574 case CFGElement::TemporaryDtor: 575 ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst); 576 break; 577 case CFGElement::DeleteDtor: 578 ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst); 579 break; 580 default: 581 llvm_unreachable("Unexpected dtor kind."); 582 } 583 584 // Enqueue the new nodes onto the work list. 585 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 586 } 587 588 void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE, 589 ExplodedNode *Pred) { 590 ExplodedNodeSet Dst; 591 AnalysisManager &AMgr = getAnalysisManager(); 592 AnalyzerOptions &Opts = AMgr.options; 593 // TODO: We're not evaluating allocators for all cases just yet as 594 // we're not handling the return value correctly, which causes false 595 // positives when the alpha.cplusplus.NewDeleteLeaks check is on. 596 if (Opts.mayInlineCXXAllocator()) 597 VisitCXXNewAllocatorCall(NE, Pred, Dst); 598 else { 599 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 600 const LocationContext *LCtx = Pred->getLocationContext(); 601 PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx); 602 Bldr.generateNode(PP, Pred->getState(), Pred); 603 } 604 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 605 } 606 607 void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor, 608 ExplodedNode *Pred, 609 ExplodedNodeSet &Dst) { 610 const VarDecl *varDecl = Dtor.getVarDecl(); 611 QualType varType = varDecl->getType(); 612 613 ProgramStateRef state = Pred->getState(); 614 SVal dest = state->getLValue(varDecl, Pred->getLocationContext()); 615 const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion(); 616 617 if (varType->isReferenceType()) { 618 const MemRegion *ValueRegion = state->getSVal(Region).getAsRegion(); 619 if (!ValueRegion) { 620 // FIXME: This should not happen. The language guarantees a presence 621 // of a valid initializer here, so the reference shall not be undefined. 622 // It seems that we're calling destructors over variables that 623 // were not initialized yet. 624 return; 625 } 626 Region = ValueRegion->getBaseRegion(); 627 varType = cast<TypedValueRegion>(Region)->getValueType(); 628 } 629 630 VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false, 631 Pred, Dst); 632 } 633 634 void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor, 635 ExplodedNode *Pred, 636 ExplodedNodeSet &Dst) { 637 ProgramStateRef State = Pred->getState(); 638 const LocationContext *LCtx = Pred->getLocationContext(); 639 const CXXDeleteExpr *DE = Dtor.getDeleteExpr(); 640 const Stmt *Arg = DE->getArgument(); 641 SVal ArgVal = State->getSVal(Arg, LCtx); 642 643 // If the argument to delete is known to be a null value, 644 // don't run destructor. 645 if (State->isNull(ArgVal).isConstrainedTrue()) { 646 QualType DTy = DE->getDestroyedType(); 647 QualType BTy = getContext().getBaseElementType(DTy); 648 const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl(); 649 const CXXDestructorDecl *Dtor = RD->getDestructor(); 650 651 PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx); 652 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 653 Bldr.generateNode(PP, Pred->getState(), Pred); 654 return; 655 } 656 657 VisitCXXDestructor(DE->getDestroyedType(), 658 ArgVal.getAsRegion(), 659 DE, /*IsBase=*/ false, 660 Pred, Dst); 661 } 662 663 void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D, 664 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 665 const LocationContext *LCtx = Pred->getLocationContext(); 666 667 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 668 Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor, 669 LCtx->getCurrentStackFrame()); 670 SVal ThisVal = Pred->getState()->getSVal(ThisPtr); 671 672 // Create the base object region. 673 const CXXBaseSpecifier *Base = D.getBaseSpecifier(); 674 QualType BaseTy = Base->getType(); 675 SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy, 676 Base->isVirtual()); 677 678 VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(), 679 CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst); 680 } 681 682 void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D, 683 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 684 const FieldDecl *Member = D.getFieldDecl(); 685 ProgramStateRef State = Pred->getState(); 686 const LocationContext *LCtx = Pred->getLocationContext(); 687 688 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 689 Loc ThisVal = getSValBuilder().getCXXThis(CurDtor, 690 LCtx->getCurrentStackFrame()); 691 SVal FieldVal = 692 State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>()); 693 694 VisitCXXDestructor(Member->getType(), 695 FieldVal.castAs<loc::MemRegionVal>().getRegion(), 696 CurDtor->getBody(), /*IsBase=*/false, Pred, Dst); 697 } 698 699 void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D, 700 ExplodedNode *Pred, 701 ExplodedNodeSet &Dst) { 702 ExplodedNodeSet CleanDtorState; 703 StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx); 704 ProgramStateRef State = Pred->getState(); 705 if (State->contains<InitializedTemporariesSet>( 706 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) { 707 // FIXME: Currently we insert temporary destructors for default parameters, 708 // but we don't insert the constructors. 709 State = State->remove<InitializedTemporariesSet>( 710 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame())); 711 } 712 StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State); 713 714 QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType(); 715 // FIXME: Currently CleanDtorState can be empty here due to temporaries being 716 // bound to default parameters. 717 assert(CleanDtorState.size() <= 1); 718 ExplodedNode *CleanPred = 719 CleanDtorState.empty() ? Pred : *CleanDtorState.begin(); 720 // FIXME: Inlining of temporary destructors is not supported yet anyway, so 721 // we just put a NULL region for now. This will need to be changed later. 722 VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(), 723 /*IsBase=*/false, CleanPred, Dst); 724 } 725 726 void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 727 NodeBuilderContext &BldCtx, 728 ExplodedNode *Pred, 729 ExplodedNodeSet &Dst, 730 const CFGBlock *DstT, 731 const CFGBlock *DstF) { 732 BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF); 733 if (Pred->getState()->contains<InitializedTemporariesSet>( 734 std::make_pair(BTE, Pred->getStackFrame()))) { 735 TempDtorBuilder.markInfeasible(false); 736 TempDtorBuilder.generateNode(Pred->getState(), true, Pred); 737 } else { 738 TempDtorBuilder.markInfeasible(true); 739 TempDtorBuilder.generateNode(Pred->getState(), false, Pred); 740 } 741 } 742 743 void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE, 744 ExplodedNodeSet &PreVisit, 745 ExplodedNodeSet &Dst) { 746 if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) { 747 // In case we don't have temporary destructors in the CFG, do not mark 748 // the initialization - we would otherwise never clean it up. 749 Dst = PreVisit; 750 return; 751 } 752 StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx); 753 for (ExplodedNode *Node : PreVisit) { 754 ProgramStateRef State = Node->getState(); 755 756 if (!State->contains<InitializedTemporariesSet>( 757 std::make_pair(BTE, Node->getStackFrame()))) { 758 // FIXME: Currently the state might already contain the marker due to 759 // incorrect handling of temporaries bound to default parameters; for 760 // those, we currently skip the CXXBindTemporaryExpr but rely on adding 761 // temporary destructor nodes. 762 State = State->add<InitializedTemporariesSet>( 763 std::make_pair(BTE, Node->getStackFrame())); 764 } 765 StmtBldr.generateNode(BTE, Node, State); 766 } 767 } 768 769 void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred, 770 ExplodedNodeSet &DstTop) { 771 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 772 S->getLocStart(), 773 "Error evaluating statement"); 774 ExplodedNodeSet Dst; 775 StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx); 776 777 assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens()); 778 779 switch (S->getStmtClass()) { 780 // C++, OpenMP and ARC stuff we don't support yet. 781 case Expr::ObjCIndirectCopyRestoreExprClass: 782 case Stmt::CXXDependentScopeMemberExprClass: 783 case Stmt::CXXInheritedCtorInitExprClass: 784 case Stmt::CXXTryStmtClass: 785 case Stmt::CXXTypeidExprClass: 786 case Stmt::CXXUuidofExprClass: 787 case Stmt::CXXFoldExprClass: 788 case Stmt::MSPropertyRefExprClass: 789 case Stmt::MSPropertySubscriptExprClass: 790 case Stmt::CXXUnresolvedConstructExprClass: 791 case Stmt::DependentScopeDeclRefExprClass: 792 case Stmt::ArrayTypeTraitExprClass: 793 case Stmt::ExpressionTraitExprClass: 794 case Stmt::UnresolvedLookupExprClass: 795 case Stmt::UnresolvedMemberExprClass: 796 case Stmt::TypoExprClass: 797 case Stmt::CXXNoexceptExprClass: 798 case Stmt::PackExpansionExprClass: 799 case Stmt::SubstNonTypeTemplateParmPackExprClass: 800 case Stmt::FunctionParmPackExprClass: 801 case Stmt::CoroutineBodyStmtClass: 802 case Stmt::CoawaitExprClass: 803 case Stmt::DependentCoawaitExprClass: 804 case Stmt::CoreturnStmtClass: 805 case Stmt::CoyieldExprClass: 806 case Stmt::SEHTryStmtClass: 807 case Stmt::SEHExceptStmtClass: 808 case Stmt::SEHLeaveStmtClass: 809 case Stmt::SEHFinallyStmtClass: 810 case Stmt::OMPParallelDirectiveClass: 811 case Stmt::OMPSimdDirectiveClass: 812 case Stmt::OMPForDirectiveClass: 813 case Stmt::OMPForSimdDirectiveClass: 814 case Stmt::OMPSectionsDirectiveClass: 815 case Stmt::OMPSectionDirectiveClass: 816 case Stmt::OMPSingleDirectiveClass: 817 case Stmt::OMPMasterDirectiveClass: 818 case Stmt::OMPCriticalDirectiveClass: 819 case Stmt::OMPParallelForDirectiveClass: 820 case Stmt::OMPParallelForSimdDirectiveClass: 821 case Stmt::OMPParallelSectionsDirectiveClass: 822 case Stmt::OMPTaskDirectiveClass: 823 case Stmt::OMPTaskyieldDirectiveClass: 824 case Stmt::OMPBarrierDirectiveClass: 825 case Stmt::OMPTaskwaitDirectiveClass: 826 case Stmt::OMPTaskgroupDirectiveClass: 827 case Stmt::OMPFlushDirectiveClass: 828 case Stmt::OMPOrderedDirectiveClass: 829 case Stmt::OMPAtomicDirectiveClass: 830 case Stmt::OMPTargetDirectiveClass: 831 case Stmt::OMPTargetDataDirectiveClass: 832 case Stmt::OMPTargetEnterDataDirectiveClass: 833 case Stmt::OMPTargetExitDataDirectiveClass: 834 case Stmt::OMPTargetParallelDirectiveClass: 835 case Stmt::OMPTargetParallelForDirectiveClass: 836 case Stmt::OMPTargetUpdateDirectiveClass: 837 case Stmt::OMPTeamsDirectiveClass: 838 case Stmt::OMPCancellationPointDirectiveClass: 839 case Stmt::OMPCancelDirectiveClass: 840 case Stmt::OMPTaskLoopDirectiveClass: 841 case Stmt::OMPTaskLoopSimdDirectiveClass: 842 case Stmt::OMPDistributeDirectiveClass: 843 case Stmt::OMPDistributeParallelForDirectiveClass: 844 case Stmt::OMPDistributeParallelForSimdDirectiveClass: 845 case Stmt::OMPDistributeSimdDirectiveClass: 846 case Stmt::OMPTargetParallelForSimdDirectiveClass: 847 case Stmt::OMPTargetSimdDirectiveClass: 848 case Stmt::OMPTeamsDistributeDirectiveClass: 849 case Stmt::OMPTeamsDistributeSimdDirectiveClass: 850 case Stmt::OMPTeamsDistributeParallelForSimdDirectiveClass: 851 case Stmt::OMPTeamsDistributeParallelForDirectiveClass: 852 case Stmt::OMPTargetTeamsDirectiveClass: 853 case Stmt::OMPTargetTeamsDistributeDirectiveClass: 854 case Stmt::OMPTargetTeamsDistributeParallelForDirectiveClass: 855 case Stmt::OMPTargetTeamsDistributeParallelForSimdDirectiveClass: 856 case Stmt::OMPTargetTeamsDistributeSimdDirectiveClass: 857 case Stmt::CapturedStmtClass: 858 { 859 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 860 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 861 break; 862 } 863 864 case Stmt::ParenExprClass: 865 llvm_unreachable("ParenExprs already handled."); 866 case Stmt::GenericSelectionExprClass: 867 llvm_unreachable("GenericSelectionExprs already handled."); 868 // Cases that should never be evaluated simply because they shouldn't 869 // appear in the CFG. 870 case Stmt::BreakStmtClass: 871 case Stmt::CaseStmtClass: 872 case Stmt::CompoundStmtClass: 873 case Stmt::ContinueStmtClass: 874 case Stmt::CXXForRangeStmtClass: 875 case Stmt::DefaultStmtClass: 876 case Stmt::DoStmtClass: 877 case Stmt::ForStmtClass: 878 case Stmt::GotoStmtClass: 879 case Stmt::IfStmtClass: 880 case Stmt::IndirectGotoStmtClass: 881 case Stmt::LabelStmtClass: 882 case Stmt::NoStmtClass: 883 case Stmt::NullStmtClass: 884 case Stmt::SwitchStmtClass: 885 case Stmt::WhileStmtClass: 886 case Expr::MSDependentExistsStmtClass: 887 llvm_unreachable("Stmt should not be in analyzer evaluation loop"); 888 889 case Stmt::ObjCSubscriptRefExprClass: 890 case Stmt::ObjCPropertyRefExprClass: 891 llvm_unreachable("These are handled by PseudoObjectExpr"); 892 893 case Stmt::GNUNullExprClass: { 894 // GNU __null is a pointer-width integer, not an actual pointer. 895 ProgramStateRef state = Pred->getState(); 896 state = state->BindExpr(S, Pred->getLocationContext(), 897 svalBuilder.makeIntValWithPtrWidth(0, false)); 898 Bldr.generateNode(S, Pred, state); 899 break; 900 } 901 902 case Stmt::ObjCAtSynchronizedStmtClass: 903 Bldr.takeNodes(Pred); 904 VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst); 905 Bldr.addNodes(Dst); 906 break; 907 908 case Stmt::ExprWithCleanupsClass: 909 // Handled due to fully linearised CFG. 910 break; 911 912 case Stmt::CXXBindTemporaryExprClass: { 913 Bldr.takeNodes(Pred); 914 ExplodedNodeSet PreVisit; 915 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 916 ExplodedNodeSet Next; 917 VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next); 918 getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this); 919 Bldr.addNodes(Dst); 920 break; 921 } 922 923 // Cases not handled yet; but will handle some day. 924 case Stmt::DesignatedInitExprClass: 925 case Stmt::DesignatedInitUpdateExprClass: 926 case Stmt::ArrayInitLoopExprClass: 927 case Stmt::ArrayInitIndexExprClass: 928 case Stmt::ExtVectorElementExprClass: 929 case Stmt::ImaginaryLiteralClass: 930 case Stmt::ObjCAtCatchStmtClass: 931 case Stmt::ObjCAtFinallyStmtClass: 932 case Stmt::ObjCAtTryStmtClass: 933 case Stmt::ObjCAutoreleasePoolStmtClass: 934 case Stmt::ObjCEncodeExprClass: 935 case Stmt::ObjCIsaExprClass: 936 case Stmt::ObjCProtocolExprClass: 937 case Stmt::ObjCSelectorExprClass: 938 case Stmt::ParenListExprClass: 939 case Stmt::ShuffleVectorExprClass: 940 case Stmt::ConvertVectorExprClass: 941 case Stmt::VAArgExprClass: 942 case Stmt::CUDAKernelCallExprClass: 943 case Stmt::OpaqueValueExprClass: 944 case Stmt::AsTypeExprClass: 945 // Fall through. 946 947 // Cases we intentionally don't evaluate, since they don't need 948 // to be explicitly evaluated. 949 case Stmt::PredefinedExprClass: 950 case Stmt::AddrLabelExprClass: 951 case Stmt::AttributedStmtClass: 952 case Stmt::IntegerLiteralClass: 953 case Stmt::CharacterLiteralClass: 954 case Stmt::ImplicitValueInitExprClass: 955 case Stmt::CXXScalarValueInitExprClass: 956 case Stmt::CXXBoolLiteralExprClass: 957 case Stmt::ObjCBoolLiteralExprClass: 958 case Stmt::ObjCAvailabilityCheckExprClass: 959 case Stmt::FloatingLiteralClass: 960 case Stmt::NoInitExprClass: 961 case Stmt::SizeOfPackExprClass: 962 case Stmt::StringLiteralClass: 963 case Stmt::ObjCStringLiteralClass: 964 case Stmt::CXXPseudoDestructorExprClass: 965 case Stmt::SubstNonTypeTemplateParmExprClass: 966 case Stmt::CXXNullPtrLiteralExprClass: 967 case Stmt::OMPArraySectionExprClass: 968 case Stmt::TypeTraitExprClass: { 969 Bldr.takeNodes(Pred); 970 ExplodedNodeSet preVisit; 971 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 972 getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this); 973 Bldr.addNodes(Dst); 974 break; 975 } 976 977 case Stmt::CXXDefaultArgExprClass: 978 case Stmt::CXXDefaultInitExprClass: { 979 Bldr.takeNodes(Pred); 980 ExplodedNodeSet PreVisit; 981 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 982 983 ExplodedNodeSet Tmp; 984 StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx); 985 986 const Expr *ArgE; 987 if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S)) 988 ArgE = DefE->getExpr(); 989 else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S)) 990 ArgE = DefE->getExpr(); 991 else 992 llvm_unreachable("unknown constant wrapper kind"); 993 994 bool IsTemporary = false; 995 if (const MaterializeTemporaryExpr *MTE = 996 dyn_cast<MaterializeTemporaryExpr>(ArgE)) { 997 ArgE = MTE->GetTemporaryExpr(); 998 IsTemporary = true; 999 } 1000 1001 Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE); 1002 if (!ConstantVal) 1003 ConstantVal = UnknownVal(); 1004 1005 const LocationContext *LCtx = Pred->getLocationContext(); 1006 for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end(); 1007 I != E; ++I) { 1008 ProgramStateRef State = (*I)->getState(); 1009 State = State->BindExpr(S, LCtx, *ConstantVal); 1010 if (IsTemporary) 1011 State = createTemporaryRegionIfNeeded(State, LCtx, 1012 cast<Expr>(S), 1013 cast<Expr>(S)); 1014 Bldr2.generateNode(S, *I, State); 1015 } 1016 1017 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1018 Bldr.addNodes(Dst); 1019 break; 1020 } 1021 1022 // Cases we evaluate as opaque expressions, conjuring a symbol. 1023 case Stmt::CXXStdInitializerListExprClass: 1024 case Expr::ObjCArrayLiteralClass: 1025 case Expr::ObjCDictionaryLiteralClass: 1026 case Expr::ObjCBoxedExprClass: { 1027 Bldr.takeNodes(Pred); 1028 1029 ExplodedNodeSet preVisit; 1030 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 1031 1032 ExplodedNodeSet Tmp; 1033 StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx); 1034 1035 const Expr *Ex = cast<Expr>(S); 1036 QualType resultType = Ex->getType(); 1037 1038 for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end(); 1039 it != et; ++it) { 1040 ExplodedNode *N = *it; 1041 const LocationContext *LCtx = N->getLocationContext(); 1042 SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx, 1043 resultType, 1044 currBldrCtx->blockCount()); 1045 ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result); 1046 Bldr2.generateNode(S, N, state); 1047 } 1048 1049 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1050 Bldr.addNodes(Dst); 1051 break; 1052 } 1053 1054 case Stmt::ArraySubscriptExprClass: 1055 Bldr.takeNodes(Pred); 1056 VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst); 1057 Bldr.addNodes(Dst); 1058 break; 1059 1060 case Stmt::GCCAsmStmtClass: 1061 Bldr.takeNodes(Pred); 1062 VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst); 1063 Bldr.addNodes(Dst); 1064 break; 1065 1066 case Stmt::MSAsmStmtClass: 1067 Bldr.takeNodes(Pred); 1068 VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst); 1069 Bldr.addNodes(Dst); 1070 break; 1071 1072 case Stmt::BlockExprClass: 1073 Bldr.takeNodes(Pred); 1074 VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst); 1075 Bldr.addNodes(Dst); 1076 break; 1077 1078 case Stmt::LambdaExprClass: 1079 if (AMgr.options.shouldInlineLambdas()) { 1080 Bldr.takeNodes(Pred); 1081 VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst); 1082 Bldr.addNodes(Dst); 1083 } else { 1084 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 1085 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 1086 } 1087 break; 1088 1089 case Stmt::BinaryOperatorClass: { 1090 const BinaryOperator* B = cast<BinaryOperator>(S); 1091 if (B->isLogicalOp()) { 1092 Bldr.takeNodes(Pred); 1093 VisitLogicalExpr(B, Pred, Dst); 1094 Bldr.addNodes(Dst); 1095 break; 1096 } 1097 else if (B->getOpcode() == BO_Comma) { 1098 ProgramStateRef state = Pred->getState(); 1099 Bldr.generateNode(B, Pred, 1100 state->BindExpr(B, Pred->getLocationContext(), 1101 state->getSVal(B->getRHS(), 1102 Pred->getLocationContext()))); 1103 break; 1104 } 1105 1106 Bldr.takeNodes(Pred); 1107 1108 if (AMgr.options.eagerlyAssumeBinOpBifurcation && 1109 (B->isRelationalOp() || B->isEqualityOp())) { 1110 ExplodedNodeSet Tmp; 1111 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp); 1112 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S)); 1113 } 1114 else 1115 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1116 1117 Bldr.addNodes(Dst); 1118 break; 1119 } 1120 1121 case Stmt::CXXOperatorCallExprClass: { 1122 const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S); 1123 1124 // For instance method operators, make sure the 'this' argument has a 1125 // valid region. 1126 const Decl *Callee = OCE->getCalleeDecl(); 1127 if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) { 1128 if (MD->isInstance()) { 1129 ProgramStateRef State = Pred->getState(); 1130 const LocationContext *LCtx = Pred->getLocationContext(); 1131 ProgramStateRef NewState = 1132 createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0)); 1133 if (NewState != State) { 1134 Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr, 1135 ProgramPoint::PreStmtKind); 1136 // Did we cache out? 1137 if (!Pred) 1138 break; 1139 } 1140 } 1141 } 1142 // FALLTHROUGH 1143 } 1144 case Stmt::CallExprClass: 1145 case Stmt::CXXMemberCallExprClass: 1146 case Stmt::UserDefinedLiteralClass: { 1147 Bldr.takeNodes(Pred); 1148 VisitCallExpr(cast<CallExpr>(S), Pred, Dst); 1149 Bldr.addNodes(Dst); 1150 break; 1151 } 1152 1153 case Stmt::CXXCatchStmtClass: { 1154 Bldr.takeNodes(Pred); 1155 VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst); 1156 Bldr.addNodes(Dst); 1157 break; 1158 } 1159 1160 case Stmt::CXXTemporaryObjectExprClass: 1161 case Stmt::CXXConstructExprClass: { 1162 Bldr.takeNodes(Pred); 1163 VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst); 1164 Bldr.addNodes(Dst); 1165 break; 1166 } 1167 1168 case Stmt::CXXNewExprClass: { 1169 Bldr.takeNodes(Pred); 1170 ExplodedNodeSet PostVisit; 1171 VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit); 1172 getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this); 1173 Bldr.addNodes(Dst); 1174 break; 1175 } 1176 1177 case Stmt::CXXDeleteExprClass: { 1178 Bldr.takeNodes(Pred); 1179 ExplodedNodeSet PreVisit; 1180 const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S); 1181 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1182 1183 for (ExplodedNodeSet::iterator i = PreVisit.begin(), 1184 e = PreVisit.end(); i != e ; ++i) 1185 VisitCXXDeleteExpr(CDE, *i, Dst); 1186 1187 Bldr.addNodes(Dst); 1188 break; 1189 } 1190 // FIXME: ChooseExpr is really a constant. We need to fix 1191 // the CFG do not model them as explicit control-flow. 1192 1193 case Stmt::ChooseExprClass: { // __builtin_choose_expr 1194 Bldr.takeNodes(Pred); 1195 const ChooseExpr *C = cast<ChooseExpr>(S); 1196 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); 1197 Bldr.addNodes(Dst); 1198 break; 1199 } 1200 1201 case Stmt::CompoundAssignOperatorClass: 1202 Bldr.takeNodes(Pred); 1203 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1204 Bldr.addNodes(Dst); 1205 break; 1206 1207 case Stmt::CompoundLiteralExprClass: 1208 Bldr.takeNodes(Pred); 1209 VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst); 1210 Bldr.addNodes(Dst); 1211 break; 1212 1213 case Stmt::BinaryConditionalOperatorClass: 1214 case Stmt::ConditionalOperatorClass: { // '?' operator 1215 Bldr.takeNodes(Pred); 1216 const AbstractConditionalOperator *C 1217 = cast<AbstractConditionalOperator>(S); 1218 VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst); 1219 Bldr.addNodes(Dst); 1220 break; 1221 } 1222 1223 case Stmt::CXXThisExprClass: 1224 Bldr.takeNodes(Pred); 1225 VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst); 1226 Bldr.addNodes(Dst); 1227 break; 1228 1229 case Stmt::DeclRefExprClass: { 1230 Bldr.takeNodes(Pred); 1231 const DeclRefExpr *DE = cast<DeclRefExpr>(S); 1232 VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst); 1233 Bldr.addNodes(Dst); 1234 break; 1235 } 1236 1237 case Stmt::DeclStmtClass: 1238 Bldr.takeNodes(Pred); 1239 VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst); 1240 Bldr.addNodes(Dst); 1241 break; 1242 1243 case Stmt::ImplicitCastExprClass: 1244 case Stmt::CStyleCastExprClass: 1245 case Stmt::CXXStaticCastExprClass: 1246 case Stmt::CXXDynamicCastExprClass: 1247 case Stmt::CXXReinterpretCastExprClass: 1248 case Stmt::CXXConstCastExprClass: 1249 case Stmt::CXXFunctionalCastExprClass: 1250 case Stmt::ObjCBridgedCastExprClass: { 1251 Bldr.takeNodes(Pred); 1252 const CastExpr *C = cast<CastExpr>(S); 1253 ExplodedNodeSet dstExpr; 1254 VisitCast(C, C->getSubExpr(), Pred, dstExpr); 1255 1256 // Handle the postvisit checks. 1257 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this); 1258 Bldr.addNodes(Dst); 1259 break; 1260 } 1261 1262 case Expr::MaterializeTemporaryExprClass: { 1263 Bldr.takeNodes(Pred); 1264 const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S); 1265 ExplodedNodeSet dstPrevisit; 1266 getCheckerManager().runCheckersForPreStmt(dstPrevisit, Pred, MTE, *this); 1267 ExplodedNodeSet dstExpr; 1268 for (ExplodedNodeSet::iterator i = dstPrevisit.begin(), 1269 e = dstPrevisit.end(); i != e ; ++i) { 1270 CreateCXXTemporaryObject(MTE, *i, dstExpr); 1271 } 1272 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, MTE, *this); 1273 Bldr.addNodes(Dst); 1274 break; 1275 } 1276 1277 case Stmt::InitListExprClass: 1278 Bldr.takeNodes(Pred); 1279 VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst); 1280 Bldr.addNodes(Dst); 1281 break; 1282 1283 case Stmt::MemberExprClass: 1284 Bldr.takeNodes(Pred); 1285 VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst); 1286 Bldr.addNodes(Dst); 1287 break; 1288 1289 case Stmt::AtomicExprClass: 1290 Bldr.takeNodes(Pred); 1291 VisitAtomicExpr(cast<AtomicExpr>(S), Pred, Dst); 1292 Bldr.addNodes(Dst); 1293 break; 1294 1295 case Stmt::ObjCIvarRefExprClass: 1296 Bldr.takeNodes(Pred); 1297 VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst); 1298 Bldr.addNodes(Dst); 1299 break; 1300 1301 case Stmt::ObjCForCollectionStmtClass: 1302 Bldr.takeNodes(Pred); 1303 VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst); 1304 Bldr.addNodes(Dst); 1305 break; 1306 1307 case Stmt::ObjCMessageExprClass: 1308 Bldr.takeNodes(Pred); 1309 VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst); 1310 Bldr.addNodes(Dst); 1311 break; 1312 1313 case Stmt::ObjCAtThrowStmtClass: 1314 case Stmt::CXXThrowExprClass: 1315 // FIXME: This is not complete. We basically treat @throw as 1316 // an abort. 1317 Bldr.generateSink(S, Pred, Pred->getState()); 1318 break; 1319 1320 case Stmt::ReturnStmtClass: 1321 Bldr.takeNodes(Pred); 1322 VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst); 1323 Bldr.addNodes(Dst); 1324 break; 1325 1326 case Stmt::OffsetOfExprClass: 1327 Bldr.takeNodes(Pred); 1328 VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst); 1329 Bldr.addNodes(Dst); 1330 break; 1331 1332 case Stmt::UnaryExprOrTypeTraitExprClass: 1333 Bldr.takeNodes(Pred); 1334 VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S), 1335 Pred, Dst); 1336 Bldr.addNodes(Dst); 1337 break; 1338 1339 case Stmt::StmtExprClass: { 1340 const StmtExpr *SE = cast<StmtExpr>(S); 1341 1342 if (SE->getSubStmt()->body_empty()) { 1343 // Empty statement expression. 1344 assert(SE->getType() == getContext().VoidTy 1345 && "Empty statement expression must have void type."); 1346 break; 1347 } 1348 1349 if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) { 1350 ProgramStateRef state = Pred->getState(); 1351 Bldr.generateNode(SE, Pred, 1352 state->BindExpr(SE, Pred->getLocationContext(), 1353 state->getSVal(LastExpr, 1354 Pred->getLocationContext()))); 1355 } 1356 break; 1357 } 1358 1359 case Stmt::UnaryOperatorClass: { 1360 Bldr.takeNodes(Pred); 1361 const UnaryOperator *U = cast<UnaryOperator>(S); 1362 if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) { 1363 ExplodedNodeSet Tmp; 1364 VisitUnaryOperator(U, Pred, Tmp); 1365 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U); 1366 } 1367 else 1368 VisitUnaryOperator(U, Pred, Dst); 1369 Bldr.addNodes(Dst); 1370 break; 1371 } 1372 1373 case Stmt::PseudoObjectExprClass: { 1374 Bldr.takeNodes(Pred); 1375 ProgramStateRef state = Pred->getState(); 1376 const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S); 1377 if (const Expr *Result = PE->getResultExpr()) { 1378 SVal V = state->getSVal(Result, Pred->getLocationContext()); 1379 Bldr.generateNode(S, Pred, 1380 state->BindExpr(S, Pred->getLocationContext(), V)); 1381 } 1382 else 1383 Bldr.generateNode(S, Pred, 1384 state->BindExpr(S, Pred->getLocationContext(), 1385 UnknownVal())); 1386 1387 Bldr.addNodes(Dst); 1388 break; 1389 } 1390 } 1391 } 1392 1393 bool ExprEngine::replayWithoutInlining(ExplodedNode *N, 1394 const LocationContext *CalleeLC) { 1395 const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1396 const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame(); 1397 assert(CalleeSF && CallerSF); 1398 ExplodedNode *BeforeProcessingCall = nullptr; 1399 const Stmt *CE = CalleeSF->getCallSite(); 1400 1401 // Find the first node before we started processing the call expression. 1402 while (N) { 1403 ProgramPoint L = N->getLocation(); 1404 BeforeProcessingCall = N; 1405 N = N->pred_empty() ? nullptr : *(N->pred_begin()); 1406 1407 // Skip the nodes corresponding to the inlined code. 1408 if (L.getLocationContext()->getCurrentStackFrame() != CallerSF) 1409 continue; 1410 // We reached the caller. Find the node right before we started 1411 // processing the call. 1412 if (L.isPurgeKind()) 1413 continue; 1414 if (L.getAs<PreImplicitCall>()) 1415 continue; 1416 if (L.getAs<CallEnter>()) 1417 continue; 1418 if (Optional<StmtPoint> SP = L.getAs<StmtPoint>()) 1419 if (SP->getStmt() == CE) 1420 continue; 1421 break; 1422 } 1423 1424 if (!BeforeProcessingCall) 1425 return false; 1426 1427 // TODO: Clean up the unneeded nodes. 1428 1429 // Build an Epsilon node from which we will restart the analyzes. 1430 // Note that CE is permitted to be NULL! 1431 ProgramPoint NewNodeLoc = 1432 EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE); 1433 // Add the special flag to GDM to signal retrying with no inlining. 1434 // Note, changing the state ensures that we are not going to cache out. 1435 ProgramStateRef NewNodeState = BeforeProcessingCall->getState(); 1436 NewNodeState = 1437 NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE)); 1438 1439 // Make the new node a successor of BeforeProcessingCall. 1440 bool IsNew = false; 1441 ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew); 1442 // We cached out at this point. Caching out is common due to us backtracking 1443 // from the inlined function, which might spawn several paths. 1444 if (!IsNew) 1445 return true; 1446 1447 NewNode->addPredecessor(BeforeProcessingCall, G); 1448 1449 // Add the new node to the work list. 1450 Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(), 1451 CalleeSF->getIndex()); 1452 NumTimesRetriedWithoutInlining++; 1453 return true; 1454 } 1455 1456 /// Block entrance. (Update counters). 1457 void ExprEngine::processCFGBlockEntrance(const BlockEdge &L, 1458 NodeBuilderWithSinks &nodeBuilder, 1459 ExplodedNode *Pred) { 1460 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1461 1462 // If this block is terminated by a loop and it has already been visited the 1463 // maximum number of times, widen the loop. 1464 unsigned int BlockCount = nodeBuilder.getContext().blockCount(); 1465 if (BlockCount == AMgr.options.maxBlockVisitOnPath - 1 && 1466 AMgr.options.shouldWidenLoops()) { 1467 const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator(); 1468 if (!(Term && 1469 (isa<ForStmt>(Term) || isa<WhileStmt>(Term) || isa<DoStmt>(Term)))) 1470 return; 1471 // Widen. 1472 const LocationContext *LCtx = Pred->getLocationContext(); 1473 ProgramStateRef WidenedState = 1474 getWidenedLoopState(Pred->getState(), LCtx, BlockCount, Term); 1475 nodeBuilder.generateNode(WidenedState, Pred); 1476 return; 1477 } 1478 1479 // FIXME: Refactor this into a checker. 1480 if (BlockCount >= AMgr.options.maxBlockVisitOnPath) { 1481 static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded"); 1482 const ExplodedNode *Sink = 1483 nodeBuilder.generateSink(Pred->getState(), Pred, &tag); 1484 1485 // Check if we stopped at the top level function or not. 1486 // Root node should have the location context of the top most function. 1487 const LocationContext *CalleeLC = Pred->getLocation().getLocationContext(); 1488 const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1489 const LocationContext *RootLC = 1490 (*G.roots_begin())->getLocation().getLocationContext(); 1491 if (RootLC->getCurrentStackFrame() != CalleeSF) { 1492 Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl()); 1493 1494 // Re-run the call evaluation without inlining it, by storing the 1495 // no-inlining policy in the state and enqueuing the new work item on 1496 // the list. Replay should almost never fail. Use the stats to catch it 1497 // if it does. 1498 if ((!AMgr.options.NoRetryExhausted && 1499 replayWithoutInlining(Pred, CalleeLC))) 1500 return; 1501 NumMaxBlockCountReachedInInlined++; 1502 } else 1503 NumMaxBlockCountReached++; 1504 1505 // Make sink nodes as exhausted(for stats) only if retry failed. 1506 Engine.blocksExhausted.push_back(std::make_pair(L, Sink)); 1507 } 1508 } 1509 1510 //===----------------------------------------------------------------------===// 1511 // Branch processing. 1512 //===----------------------------------------------------------------------===// 1513 1514 /// RecoverCastedSymbol - A helper function for ProcessBranch that is used 1515 /// to try to recover some path-sensitivity for casts of symbolic 1516 /// integers that promote their values (which are currently not tracked well). 1517 /// This function returns the SVal bound to Condition->IgnoreCasts if all the 1518 // cast(s) did was sign-extend the original value. 1519 static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr, 1520 ProgramStateRef state, 1521 const Stmt *Condition, 1522 const LocationContext *LCtx, 1523 ASTContext &Ctx) { 1524 1525 const Expr *Ex = dyn_cast<Expr>(Condition); 1526 if (!Ex) 1527 return UnknownVal(); 1528 1529 uint64_t bits = 0; 1530 bool bitsInit = false; 1531 1532 while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) { 1533 QualType T = CE->getType(); 1534 1535 if (!T->isIntegralOrEnumerationType()) 1536 return UnknownVal(); 1537 1538 uint64_t newBits = Ctx.getTypeSize(T); 1539 if (!bitsInit || newBits < bits) { 1540 bitsInit = true; 1541 bits = newBits; 1542 } 1543 1544 Ex = CE->getSubExpr(); 1545 } 1546 1547 // We reached a non-cast. Is it a symbolic value? 1548 QualType T = Ex->getType(); 1549 1550 if (!bitsInit || !T->isIntegralOrEnumerationType() || 1551 Ctx.getTypeSize(T) > bits) 1552 return UnknownVal(); 1553 1554 return state->getSVal(Ex, LCtx); 1555 } 1556 1557 #ifndef NDEBUG 1558 static const Stmt *getRightmostLeaf(const Stmt *Condition) { 1559 while (Condition) { 1560 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1561 if (!BO || !BO->isLogicalOp()) { 1562 return Condition; 1563 } 1564 Condition = BO->getRHS()->IgnoreParens(); 1565 } 1566 return nullptr; 1567 } 1568 #endif 1569 1570 // Returns the condition the branch at the end of 'B' depends on and whose value 1571 // has been evaluated within 'B'. 1572 // In most cases, the terminator condition of 'B' will be evaluated fully in 1573 // the last statement of 'B'; in those cases, the resolved condition is the 1574 // given 'Condition'. 1575 // If the condition of the branch is a logical binary operator tree, the CFG is 1576 // optimized: in that case, we know that the expression formed by all but the 1577 // rightmost leaf of the logical binary operator tree must be true, and thus 1578 // the branch condition is at this point equivalent to the truth value of that 1579 // rightmost leaf; the CFG block thus only evaluates this rightmost leaf 1580 // expression in its final statement. As the full condition in that case was 1581 // not evaluated, and is thus not in the SVal cache, we need to use that leaf 1582 // expression to evaluate the truth value of the condition in the current state 1583 // space. 1584 static const Stmt *ResolveCondition(const Stmt *Condition, 1585 const CFGBlock *B) { 1586 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1587 Condition = Ex->IgnoreParens(); 1588 1589 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1590 if (!BO || !BO->isLogicalOp()) 1591 return Condition; 1592 1593 assert(!B->getTerminator().isTemporaryDtorsBranch() && 1594 "Temporary destructor branches handled by processBindTemporary."); 1595 1596 // For logical operations, we still have the case where some branches 1597 // use the traditional "merge" approach and others sink the branch 1598 // directly into the basic blocks representing the logical operation. 1599 // We need to distinguish between those two cases here. 1600 1601 // The invariants are still shifting, but it is possible that the 1602 // last element in a CFGBlock is not a CFGStmt. Look for the last 1603 // CFGStmt as the value of the condition. 1604 CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend(); 1605 for (; I != E; ++I) { 1606 CFGElement Elem = *I; 1607 Optional<CFGStmt> CS = Elem.getAs<CFGStmt>(); 1608 if (!CS) 1609 continue; 1610 const Stmt *LastStmt = CS->getStmt(); 1611 assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition)); 1612 return LastStmt; 1613 } 1614 llvm_unreachable("could not resolve condition"); 1615 } 1616 1617 void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term, 1618 NodeBuilderContext& BldCtx, 1619 ExplodedNode *Pred, 1620 ExplodedNodeSet &Dst, 1621 const CFGBlock *DstT, 1622 const CFGBlock *DstF) { 1623 assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) && 1624 "CXXBindTemporaryExprs are handled by processBindTemporary."); 1625 const LocationContext *LCtx = Pred->getLocationContext(); 1626 PrettyStackTraceLocationContext StackCrashInfo(LCtx); 1627 currBldrCtx = &BldCtx; 1628 1629 // Check for NULL conditions; e.g. "for(;;)" 1630 if (!Condition) { 1631 BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF); 1632 NullCondBldr.markInfeasible(false); 1633 NullCondBldr.generateNode(Pred->getState(), true, Pred); 1634 return; 1635 } 1636 1637 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1638 Condition = Ex->IgnoreParens(); 1639 1640 Condition = ResolveCondition(Condition, BldCtx.getBlock()); 1641 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 1642 Condition->getLocStart(), 1643 "Error evaluating branch"); 1644 1645 ExplodedNodeSet CheckersOutSet; 1646 getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet, 1647 Pred, *this); 1648 // We generated only sinks. 1649 if (CheckersOutSet.empty()) 1650 return; 1651 1652 BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF); 1653 for (NodeBuilder::iterator I = CheckersOutSet.begin(), 1654 E = CheckersOutSet.end(); E != I; ++I) { 1655 ExplodedNode *PredI = *I; 1656 1657 if (PredI->isSink()) 1658 continue; 1659 1660 ProgramStateRef PrevState = PredI->getState(); 1661 SVal X = PrevState->getSVal(Condition, PredI->getLocationContext()); 1662 1663 if (X.isUnknownOrUndef()) { 1664 // Give it a chance to recover from unknown. 1665 if (const Expr *Ex = dyn_cast<Expr>(Condition)) { 1666 if (Ex->getType()->isIntegralOrEnumerationType()) { 1667 // Try to recover some path-sensitivity. Right now casts of symbolic 1668 // integers that promote their values are currently not tracked well. 1669 // If 'Condition' is such an expression, try and recover the 1670 // underlying value and use that instead. 1671 SVal recovered = RecoverCastedSymbol(getStateManager(), 1672 PrevState, Condition, 1673 PredI->getLocationContext(), 1674 getContext()); 1675 1676 if (!recovered.isUnknown()) { 1677 X = recovered; 1678 } 1679 } 1680 } 1681 } 1682 1683 // If the condition is still unknown, give up. 1684 if (X.isUnknownOrUndef()) { 1685 builder.generateNode(PrevState, true, PredI); 1686 builder.generateNode(PrevState, false, PredI); 1687 continue; 1688 } 1689 1690 DefinedSVal V = X.castAs<DefinedSVal>(); 1691 1692 ProgramStateRef StTrue, StFalse; 1693 std::tie(StTrue, StFalse) = PrevState->assume(V); 1694 1695 // Process the true branch. 1696 if (builder.isFeasible(true)) { 1697 if (StTrue) 1698 builder.generateNode(StTrue, true, PredI); 1699 else 1700 builder.markInfeasible(true); 1701 } 1702 1703 // Process the false branch. 1704 if (builder.isFeasible(false)) { 1705 if (StFalse) 1706 builder.generateNode(StFalse, false, PredI); 1707 else 1708 builder.markInfeasible(false); 1709 } 1710 } 1711 currBldrCtx = nullptr; 1712 } 1713 1714 /// The GDM component containing the set of global variables which have been 1715 /// previously initialized with explicit initializers. 1716 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet, 1717 llvm::ImmutableSet<const VarDecl *>) 1718 1719 void ExprEngine::processStaticInitializer(const DeclStmt *DS, 1720 NodeBuilderContext &BuilderCtx, 1721 ExplodedNode *Pred, 1722 clang::ento::ExplodedNodeSet &Dst, 1723 const CFGBlock *DstT, 1724 const CFGBlock *DstF) { 1725 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1726 currBldrCtx = &BuilderCtx; 1727 1728 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl()); 1729 ProgramStateRef state = Pred->getState(); 1730 bool initHasRun = state->contains<InitializedGlobalsSet>(VD); 1731 BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF); 1732 1733 if (!initHasRun) { 1734 state = state->add<InitializedGlobalsSet>(VD); 1735 } 1736 1737 builder.generateNode(state, initHasRun, Pred); 1738 builder.markInfeasible(!initHasRun); 1739 1740 currBldrCtx = nullptr; 1741 } 1742 1743 /// processIndirectGoto - Called by CoreEngine. Used to generate successor 1744 /// nodes by processing the 'effects' of a computed goto jump. 1745 void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) { 1746 1747 ProgramStateRef state = builder.getState(); 1748 SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext()); 1749 1750 // Three possibilities: 1751 // 1752 // (1) We know the computed label. 1753 // (2) The label is NULL (or some other constant), or Undefined. 1754 // (3) We have no clue about the label. Dispatch to all targets. 1755 // 1756 1757 typedef IndirectGotoNodeBuilder::iterator iterator; 1758 1759 if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) { 1760 const LabelDecl *L = LV->getLabel(); 1761 1762 for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) { 1763 if (I.getLabel() == L) { 1764 builder.generateNode(I, state); 1765 return; 1766 } 1767 } 1768 1769 llvm_unreachable("No block with label."); 1770 } 1771 1772 if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) { 1773 // Dispatch to the first target and mark it as a sink. 1774 //ExplodedNode* N = builder.generateNode(builder.begin(), state, true); 1775 // FIXME: add checker visit. 1776 // UndefBranches.insert(N); 1777 return; 1778 } 1779 1780 // This is really a catch-all. We don't support symbolics yet. 1781 // FIXME: Implement dispatch for symbolic pointers. 1782 1783 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I) 1784 builder.generateNode(I, state); 1785 } 1786 1787 #if 0 1788 static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) { 1789 const StackFrameContext* Frame = Pred.getStackFrame(); 1790 const llvm::ImmutableSet<CXXBindTemporaryContext> &Set = 1791 Pred.getState()->get<InitializedTemporariesSet>(); 1792 return std::find_if(Set.begin(), Set.end(), 1793 [&](const CXXBindTemporaryContext &Ctx) { 1794 if (Ctx.second == Frame) { 1795 Ctx.first->dump(); 1796 llvm::errs() << "\n"; 1797 } 1798 return Ctx.second == Frame; 1799 }) == Set.end(); 1800 } 1801 #endif 1802 1803 void ExprEngine::processBeginOfFunction(NodeBuilderContext &BC, 1804 ExplodedNode *Pred, 1805 ExplodedNodeSet &Dst, 1806 const BlockEdge &L) { 1807 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC); 1808 getCheckerManager().runCheckersForBeginFunction(Dst, L, Pred, *this); 1809 } 1810 1811 /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path 1812 /// nodes when the control reaches the end of a function. 1813 void ExprEngine::processEndOfFunction(NodeBuilderContext& BC, 1814 ExplodedNode *Pred, 1815 const ReturnStmt *RS) { 1816 // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)). 1817 // We currently cannot enable this assert, as lifetime extended temporaries 1818 // are not modelled correctly. 1819 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1820 StateMgr.EndPath(Pred->getState()); 1821 1822 ExplodedNodeSet Dst; 1823 if (Pred->getLocationContext()->inTopFrame()) { 1824 // Remove dead symbols. 1825 ExplodedNodeSet AfterRemovedDead; 1826 removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead); 1827 1828 // Notify checkers. 1829 for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(), 1830 E = AfterRemovedDead.end(); I != E; ++I) { 1831 getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this); 1832 } 1833 } else { 1834 getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this); 1835 } 1836 1837 Engine.enqueueEndOfFunction(Dst, RS); 1838 } 1839 1840 /// ProcessSwitch - Called by CoreEngine. Used to generate successor 1841 /// nodes by processing the 'effects' of a switch statement. 1842 void ExprEngine::processSwitch(SwitchNodeBuilder& builder) { 1843 typedef SwitchNodeBuilder::iterator iterator; 1844 ProgramStateRef state = builder.getState(); 1845 const Expr *CondE = builder.getCondition(); 1846 SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext()); 1847 1848 if (CondV_untested.isUndef()) { 1849 //ExplodedNode* N = builder.generateDefaultCaseNode(state, true); 1850 // FIXME: add checker 1851 //UndefBranches.insert(N); 1852 1853 return; 1854 } 1855 DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>(); 1856 1857 ProgramStateRef DefaultSt = state; 1858 1859 iterator I = builder.begin(), EI = builder.end(); 1860 bool defaultIsFeasible = I == EI; 1861 1862 for ( ; I != EI; ++I) { 1863 // Successor may be pruned out during CFG construction. 1864 if (!I.getBlock()) 1865 continue; 1866 1867 const CaseStmt *Case = I.getCase(); 1868 1869 // Evaluate the LHS of the case value. 1870 llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext()); 1871 assert(V1.getBitWidth() == getContext().getTypeSize(CondE->getType())); 1872 1873 // Get the RHS of the case, if it exists. 1874 llvm::APSInt V2; 1875 if (const Expr *E = Case->getRHS()) 1876 V2 = E->EvaluateKnownConstInt(getContext()); 1877 else 1878 V2 = V1; 1879 1880 ProgramStateRef StateCase; 1881 if (Optional<NonLoc> NL = CondV.getAs<NonLoc>()) 1882 std::tie(StateCase, DefaultSt) = 1883 DefaultSt->assumeInclusiveRange(*NL, V1, V2); 1884 else // UnknownVal 1885 StateCase = DefaultSt; 1886 1887 if (StateCase) 1888 builder.generateCaseStmtNode(I, StateCase); 1889 1890 // Now "assume" that the case doesn't match. Add this state 1891 // to the default state (if it is feasible). 1892 if (DefaultSt) 1893 defaultIsFeasible = true; 1894 else { 1895 defaultIsFeasible = false; 1896 break; 1897 } 1898 } 1899 1900 if (!defaultIsFeasible) 1901 return; 1902 1903 // If we have switch(enum value), the default branch is not 1904 // feasible if all of the enum constants not covered by 'case:' statements 1905 // are not feasible values for the switch condition. 1906 // 1907 // Note that this isn't as accurate as it could be. Even if there isn't 1908 // a case for a particular enum value as long as that enum value isn't 1909 // feasible then it shouldn't be considered for making 'default:' reachable. 1910 const SwitchStmt *SS = builder.getSwitch(); 1911 const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts(); 1912 if (CondExpr->getType()->getAs<EnumType>()) { 1913 if (SS->isAllEnumCasesCovered()) 1914 return; 1915 } 1916 1917 builder.generateDefaultCaseNode(DefaultSt); 1918 } 1919 1920 //===----------------------------------------------------------------------===// 1921 // Transfer functions: Loads and stores. 1922 //===----------------------------------------------------------------------===// 1923 1924 void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D, 1925 ExplodedNode *Pred, 1926 ExplodedNodeSet &Dst) { 1927 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1928 1929 ProgramStateRef state = Pred->getState(); 1930 const LocationContext *LCtx = Pred->getLocationContext(); 1931 1932 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) { 1933 // C permits "extern void v", and if you cast the address to a valid type, 1934 // you can even do things with it. We simply pretend 1935 assert(Ex->isGLValue() || VD->getType()->isVoidType()); 1936 const LocationContext *LocCtxt = Pred->getLocationContext(); 1937 const Decl *D = LocCtxt->getDecl(); 1938 const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr; 1939 const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex); 1940 SVal V; 1941 bool IsReference; 1942 if (AMgr.options.shouldInlineLambdas() && DeclRefEx && 1943 DeclRefEx->refersToEnclosingVariableOrCapture() && MD && 1944 MD->getParent()->isLambda()) { 1945 // Lookup the field of the lambda. 1946 const CXXRecordDecl *CXXRec = MD->getParent(); 1947 llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields; 1948 FieldDecl *LambdaThisCaptureField; 1949 CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField); 1950 const FieldDecl *FD = LambdaCaptureFields[VD]; 1951 if (!FD) { 1952 // When a constant is captured, sometimes no corresponding field is 1953 // created in the lambda object. 1954 assert(VD->getType().isConstQualified()); 1955 V = state->getLValue(VD, LocCtxt); 1956 IsReference = false; 1957 } else { 1958 Loc CXXThis = 1959 svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame()); 1960 SVal CXXThisVal = state->getSVal(CXXThis); 1961 V = state->getLValue(FD, CXXThisVal); 1962 IsReference = FD->getType()->isReferenceType(); 1963 } 1964 } else { 1965 V = state->getLValue(VD, LocCtxt); 1966 IsReference = VD->getType()->isReferenceType(); 1967 } 1968 1969 // For references, the 'lvalue' is the pointer address stored in the 1970 // reference region. 1971 if (IsReference) { 1972 if (const MemRegion *R = V.getAsRegion()) 1973 V = state->getSVal(R); 1974 else 1975 V = UnknownVal(); 1976 } 1977 1978 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1979 ProgramPoint::PostLValueKind); 1980 return; 1981 } 1982 if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) { 1983 assert(!Ex->isGLValue()); 1984 SVal V = svalBuilder.makeIntVal(ED->getInitVal()); 1985 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V)); 1986 return; 1987 } 1988 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 1989 SVal V = svalBuilder.getFunctionPointer(FD); 1990 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1991 ProgramPoint::PostLValueKind); 1992 return; 1993 } 1994 if (isa<FieldDecl>(D)) { 1995 // FIXME: Compute lvalue of field pointers-to-member. 1996 // Right now we just use a non-null void pointer, so that it gives proper 1997 // results in boolean contexts. 1998 SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy, 1999 currBldrCtx->blockCount()); 2000 state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true); 2001 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 2002 ProgramPoint::PostLValueKind); 2003 return; 2004 } 2005 2006 llvm_unreachable("Support for this Decl not implemented."); 2007 } 2008 2009 /// VisitArraySubscriptExpr - Transfer function for array accesses 2010 void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A, 2011 ExplodedNode *Pred, 2012 ExplodedNodeSet &Dst){ 2013 2014 const Expr *Base = A->getBase()->IgnoreParens(); 2015 const Expr *Idx = A->getIdx()->IgnoreParens(); 2016 2017 ExplodedNodeSet CheckerPreStmt; 2018 getCheckerManager().runCheckersForPreStmt(CheckerPreStmt, Pred, A, *this); 2019 2020 ExplodedNodeSet EvalSet; 2021 StmtNodeBuilder Bldr(CheckerPreStmt, EvalSet, *currBldrCtx); 2022 assert(A->isGLValue() || 2023 (!AMgr.getLangOpts().CPlusPlus && 2024 A->getType().isCForbiddenLValueType())); 2025 2026 for (auto *Node : CheckerPreStmt) { 2027 const LocationContext *LCtx = Node->getLocationContext(); 2028 ProgramStateRef state = Node->getState(); 2029 SVal V = state->getLValue(A->getType(), 2030 state->getSVal(Idx, LCtx), 2031 state->getSVal(Base, LCtx)); 2032 Bldr.generateNode(A, Node, state->BindExpr(A, LCtx, V), nullptr, 2033 ProgramPoint::PostLValueKind); 2034 } 2035 2036 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, A, *this); 2037 } 2038 2039 /// VisitMemberExpr - Transfer function for member expressions. 2040 void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred, 2041 ExplodedNodeSet &Dst) { 2042 2043 // FIXME: Prechecks eventually go in ::Visit(). 2044 ExplodedNodeSet CheckedSet; 2045 getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this); 2046 2047 ExplodedNodeSet EvalSet; 2048 ValueDecl *Member = M->getMemberDecl(); 2049 2050 // Handle static member variables and enum constants accessed via 2051 // member syntax. 2052 if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) { 2053 ExplodedNodeSet Dst; 2054 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2055 I != E; ++I) { 2056 VisitCommonDeclRefExpr(M, Member, Pred, EvalSet); 2057 } 2058 } else { 2059 StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); 2060 ExplodedNodeSet Tmp; 2061 2062 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2063 I != E; ++I) { 2064 ProgramStateRef state = (*I)->getState(); 2065 const LocationContext *LCtx = (*I)->getLocationContext(); 2066 Expr *BaseExpr = M->getBase(); 2067 2068 // Handle C++ method calls. 2069 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) { 2070 if (MD->isInstance()) 2071 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2072 2073 SVal MDVal = svalBuilder.getFunctionPointer(MD); 2074 state = state->BindExpr(M, LCtx, MDVal); 2075 2076 Bldr.generateNode(M, *I, state); 2077 continue; 2078 } 2079 2080 // Handle regular struct fields / member variables. 2081 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2082 SVal baseExprVal = state->getSVal(BaseExpr, LCtx); 2083 2084 FieldDecl *field = cast<FieldDecl>(Member); 2085 SVal L = state->getLValue(field, baseExprVal); 2086 2087 if (M->isGLValue() || M->getType()->isArrayType()) { 2088 // We special-case rvalues of array type because the analyzer cannot 2089 // reason about them, since we expect all regions to be wrapped in Locs. 2090 // We instead treat these as lvalues and assume that they will decay to 2091 // pointers as soon as they are used. 2092 if (!M->isGLValue()) { 2093 assert(M->getType()->isArrayType()); 2094 const ImplicitCastExpr *PE = 2095 dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParentIgnoreParens(M)); 2096 if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) { 2097 llvm_unreachable("should always be wrapped in ArrayToPointerDecay"); 2098 } 2099 } 2100 2101 if (field->getType()->isReferenceType()) { 2102 if (const MemRegion *R = L.getAsRegion()) 2103 L = state->getSVal(R); 2104 else 2105 L = UnknownVal(); 2106 } 2107 2108 Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr, 2109 ProgramPoint::PostLValueKind); 2110 } else { 2111 Bldr.takeNodes(*I); 2112 evalLoad(Tmp, M, M, *I, state, L); 2113 Bldr.addNodes(Tmp); 2114 } 2115 } 2116 } 2117 2118 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this); 2119 } 2120 2121 void ExprEngine::VisitAtomicExpr(const AtomicExpr *AE, ExplodedNode *Pred, 2122 ExplodedNodeSet &Dst) { 2123 ExplodedNodeSet AfterPreSet; 2124 getCheckerManager().runCheckersForPreStmt(AfterPreSet, Pred, AE, *this); 2125 2126 // For now, treat all the arguments to C11 atomics as escaping. 2127 // FIXME: Ideally we should model the behavior of the atomics precisely here. 2128 2129 ExplodedNodeSet AfterInvalidateSet; 2130 StmtNodeBuilder Bldr(AfterPreSet, AfterInvalidateSet, *currBldrCtx); 2131 2132 for (ExplodedNodeSet::iterator I = AfterPreSet.begin(), E = AfterPreSet.end(); 2133 I != E; ++I) { 2134 ProgramStateRef State = (*I)->getState(); 2135 const LocationContext *LCtx = (*I)->getLocationContext(); 2136 2137 SmallVector<SVal, 8> ValuesToInvalidate; 2138 for (unsigned SI = 0, Count = AE->getNumSubExprs(); SI != Count; SI++) { 2139 const Expr *SubExpr = AE->getSubExprs()[SI]; 2140 SVal SubExprVal = State->getSVal(SubExpr, LCtx); 2141 ValuesToInvalidate.push_back(SubExprVal); 2142 } 2143 2144 State = State->invalidateRegions(ValuesToInvalidate, AE, 2145 currBldrCtx->blockCount(), 2146 LCtx, 2147 /*CausedByPointerEscape*/true, 2148 /*Symbols=*/nullptr); 2149 2150 SVal ResultVal = UnknownVal(); 2151 State = State->BindExpr(AE, LCtx, ResultVal); 2152 Bldr.generateNode(AE, *I, State, nullptr, 2153 ProgramPoint::PostStmtKind); 2154 } 2155 2156 getCheckerManager().runCheckersForPostStmt(Dst, AfterInvalidateSet, AE, *this); 2157 } 2158 2159 namespace { 2160 class CollectReachableSymbolsCallback final : public SymbolVisitor { 2161 InvalidatedSymbols Symbols; 2162 2163 public: 2164 CollectReachableSymbolsCallback(ProgramStateRef State) {} 2165 const InvalidatedSymbols &getSymbols() const { return Symbols; } 2166 2167 bool VisitSymbol(SymbolRef Sym) override { 2168 Symbols.insert(Sym); 2169 return true; 2170 } 2171 }; 2172 } // end anonymous namespace 2173 2174 // A value escapes in three possible cases: 2175 // (1) We are binding to something that is not a memory region. 2176 // (2) We are binding to a MemrRegion that does not have stack storage. 2177 // (3) We are binding to a MemRegion with stack storage that the store 2178 // does not understand. 2179 ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State, 2180 SVal Loc, 2181 SVal Val, 2182 const LocationContext *LCtx) { 2183 // Are we storing to something that causes the value to "escape"? 2184 bool escapes = true; 2185 2186 // TODO: Move to StoreManager. 2187 if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) { 2188 escapes = !regionLoc->getRegion()->hasStackStorage(); 2189 2190 if (!escapes) { 2191 // To test (3), generate a new state with the binding added. If it is 2192 // the same state, then it escapes (since the store cannot represent 2193 // the binding). 2194 // Do this only if we know that the store is not supposed to generate the 2195 // same state. 2196 SVal StoredVal = State->getSVal(regionLoc->getRegion()); 2197 if (StoredVal != Val) 2198 escapes = (State == (State->bindLoc(*regionLoc, Val, LCtx))); 2199 } 2200 } 2201 2202 // If our store can represent the binding and we aren't storing to something 2203 // that doesn't have local storage then just return and have the simulation 2204 // state continue as is. 2205 if (!escapes) 2206 return State; 2207 2208 // Otherwise, find all symbols referenced by 'val' that we are tracking 2209 // and stop tracking them. 2210 CollectReachableSymbolsCallback Scanner = 2211 State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val); 2212 const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols(); 2213 State = getCheckerManager().runCheckersForPointerEscape(State, 2214 EscapedSymbols, 2215 /*CallEvent*/ nullptr, 2216 PSK_EscapeOnBind, 2217 nullptr); 2218 2219 return State; 2220 } 2221 2222 ProgramStateRef 2223 ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State, 2224 const InvalidatedSymbols *Invalidated, 2225 ArrayRef<const MemRegion *> ExplicitRegions, 2226 ArrayRef<const MemRegion *> Regions, 2227 const CallEvent *Call, 2228 RegionAndSymbolInvalidationTraits &ITraits) { 2229 2230 if (!Invalidated || Invalidated->empty()) 2231 return State; 2232 2233 if (!Call) 2234 return getCheckerManager().runCheckersForPointerEscape(State, 2235 *Invalidated, 2236 nullptr, 2237 PSK_EscapeOther, 2238 &ITraits); 2239 2240 // If the symbols were invalidated by a call, we want to find out which ones 2241 // were invalidated directly due to being arguments to the call. 2242 InvalidatedSymbols SymbolsDirectlyInvalidated; 2243 for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(), 2244 E = ExplicitRegions.end(); I != E; ++I) { 2245 if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>()) 2246 SymbolsDirectlyInvalidated.insert(R->getSymbol()); 2247 } 2248 2249 InvalidatedSymbols SymbolsIndirectlyInvalidated; 2250 for (InvalidatedSymbols::const_iterator I=Invalidated->begin(), 2251 E = Invalidated->end(); I!=E; ++I) { 2252 SymbolRef sym = *I; 2253 if (SymbolsDirectlyInvalidated.count(sym)) 2254 continue; 2255 SymbolsIndirectlyInvalidated.insert(sym); 2256 } 2257 2258 if (!SymbolsDirectlyInvalidated.empty()) 2259 State = getCheckerManager().runCheckersForPointerEscape(State, 2260 SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits); 2261 2262 // Notify about the symbols that get indirectly invalidated by the call. 2263 if (!SymbolsIndirectlyInvalidated.empty()) 2264 State = getCheckerManager().runCheckersForPointerEscape(State, 2265 SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits); 2266 2267 return State; 2268 } 2269 2270 /// evalBind - Handle the semantics of binding a value to a specific location. 2271 /// This method is used by evalStore and (soon) VisitDeclStmt, and others. 2272 void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE, 2273 ExplodedNode *Pred, 2274 SVal location, SVal Val, 2275 bool atDeclInit, const ProgramPoint *PP) { 2276 2277 const LocationContext *LC = Pred->getLocationContext(); 2278 PostStmt PS(StoreE, LC); 2279 if (!PP) 2280 PP = &PS; 2281 2282 // Do a previsit of the bind. 2283 ExplodedNodeSet CheckedSet; 2284 getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val, 2285 StoreE, *this, *PP); 2286 2287 StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx); 2288 2289 // If the location is not a 'Loc', it will already be handled by 2290 // the checkers. There is nothing left to do. 2291 if (!location.getAs<Loc>()) { 2292 const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr, 2293 /*tag*/nullptr); 2294 ProgramStateRef state = Pred->getState(); 2295 state = processPointerEscapedOnBind(state, location, Val, LC); 2296 Bldr.generateNode(L, state, Pred); 2297 return; 2298 } 2299 2300 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2301 I!=E; ++I) { 2302 ExplodedNode *PredI = *I; 2303 ProgramStateRef state = PredI->getState(); 2304 2305 state = processPointerEscapedOnBind(state, location, Val, LC); 2306 2307 // When binding the value, pass on the hint that this is a initialization. 2308 // For initializations, we do not need to inform clients of region 2309 // changes. 2310 state = state->bindLoc(location.castAs<Loc>(), 2311 Val, LC, /* notifyChanges = */ !atDeclInit); 2312 2313 const MemRegion *LocReg = nullptr; 2314 if (Optional<loc::MemRegionVal> LocRegVal = 2315 location.getAs<loc::MemRegionVal>()) { 2316 LocReg = LocRegVal->getRegion(); 2317 } 2318 2319 const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr); 2320 Bldr.generateNode(L, state, PredI); 2321 } 2322 } 2323 2324 /// evalStore - Handle the semantics of a store via an assignment. 2325 /// @param Dst The node set to store generated state nodes 2326 /// @param AssignE The assignment expression if the store happens in an 2327 /// assignment. 2328 /// @param LocationE The location expression that is stored to. 2329 /// @param state The current simulation state 2330 /// @param location The location to store the value 2331 /// @param Val The value to be stored 2332 void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE, 2333 const Expr *LocationE, 2334 ExplodedNode *Pred, 2335 ProgramStateRef state, SVal location, SVal Val, 2336 const ProgramPointTag *tag) { 2337 // Proceed with the store. We use AssignE as the anchor for the PostStore 2338 // ProgramPoint if it is non-NULL, and LocationE otherwise. 2339 const Expr *StoreE = AssignE ? AssignE : LocationE; 2340 2341 // Evaluate the location (checks for bad dereferences). 2342 ExplodedNodeSet Tmp; 2343 evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false); 2344 2345 if (Tmp.empty()) 2346 return; 2347 2348 if (location.isUndef()) 2349 return; 2350 2351 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) 2352 evalBind(Dst, StoreE, *NI, location, Val, false); 2353 } 2354 2355 void ExprEngine::evalLoad(ExplodedNodeSet &Dst, 2356 const Expr *NodeEx, 2357 const Expr *BoundEx, 2358 ExplodedNode *Pred, 2359 ProgramStateRef state, 2360 SVal location, 2361 const ProgramPointTag *tag, 2362 QualType LoadTy) 2363 { 2364 assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc."); 2365 2366 // Are we loading from a region? This actually results in two loads; one 2367 // to fetch the address of the referenced value and one to fetch the 2368 // referenced value. 2369 if (const TypedValueRegion *TR = 2370 dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) { 2371 2372 QualType ValTy = TR->getValueType(); 2373 if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) { 2374 static SimpleProgramPointTag 2375 loadReferenceTag(TagProviderName, "Load Reference"); 2376 ExplodedNodeSet Tmp; 2377 evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state, 2378 location, &loadReferenceTag, 2379 getContext().getPointerType(RT->getPointeeType())); 2380 2381 // Perform the load from the referenced value. 2382 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) { 2383 state = (*I)->getState(); 2384 location = state->getSVal(BoundEx, (*I)->getLocationContext()); 2385 evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy); 2386 } 2387 return; 2388 } 2389 } 2390 2391 evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy); 2392 } 2393 2394 void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst, 2395 const Expr *NodeEx, 2396 const Expr *BoundEx, 2397 ExplodedNode *Pred, 2398 ProgramStateRef state, 2399 SVal location, 2400 const ProgramPointTag *tag, 2401 QualType LoadTy) { 2402 assert(NodeEx); 2403 assert(BoundEx); 2404 // Evaluate the location (checks for bad dereferences). 2405 ExplodedNodeSet Tmp; 2406 evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true); 2407 if (Tmp.empty()) 2408 return; 2409 2410 StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 2411 if (location.isUndef()) 2412 return; 2413 2414 // Proceed with the load. 2415 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) { 2416 state = (*NI)->getState(); 2417 const LocationContext *LCtx = (*NI)->getLocationContext(); 2418 2419 SVal V = UnknownVal(); 2420 if (location.isValid()) { 2421 if (LoadTy.isNull()) 2422 LoadTy = BoundEx->getType(); 2423 V = state->getSVal(location.castAs<Loc>(), LoadTy); 2424 } 2425 2426 Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag, 2427 ProgramPoint::PostLoadKind); 2428 } 2429 } 2430 2431 void ExprEngine::evalLocation(ExplodedNodeSet &Dst, 2432 const Stmt *NodeEx, 2433 const Stmt *BoundEx, 2434 ExplodedNode *Pred, 2435 ProgramStateRef state, 2436 SVal location, 2437 const ProgramPointTag *tag, 2438 bool isLoad) { 2439 StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx); 2440 // Early checks for performance reason. 2441 if (location.isUnknown()) { 2442 return; 2443 } 2444 2445 ExplodedNodeSet Src; 2446 BldrTop.takeNodes(Pred); 2447 StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx); 2448 if (Pred->getState() != state) { 2449 // Associate this new state with an ExplodedNode. 2450 // FIXME: If I pass null tag, the graph is incorrect, e.g for 2451 // int *p; 2452 // p = 0; 2453 // *p = 0xDEADBEEF; 2454 // "p = 0" is not noted as "Null pointer value stored to 'p'" but 2455 // instead "int *p" is noted as 2456 // "Variable 'p' initialized to a null pointer value" 2457 2458 static SimpleProgramPointTag tag(TagProviderName, "Location"); 2459 Bldr.generateNode(NodeEx, Pred, state, &tag); 2460 } 2461 ExplodedNodeSet Tmp; 2462 getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad, 2463 NodeEx, BoundEx, *this); 2464 BldrTop.addNodes(Tmp); 2465 } 2466 2467 std::pair<const ProgramPointTag *, const ProgramPointTag*> 2468 ExprEngine::geteagerlyAssumeBinOpBifurcationTags() { 2469 static SimpleProgramPointTag 2470 eagerlyAssumeBinOpBifurcationTrue(TagProviderName, 2471 "Eagerly Assume True"), 2472 eagerlyAssumeBinOpBifurcationFalse(TagProviderName, 2473 "Eagerly Assume False"); 2474 return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue, 2475 &eagerlyAssumeBinOpBifurcationFalse); 2476 } 2477 2478 void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst, 2479 ExplodedNodeSet &Src, 2480 const Expr *Ex) { 2481 StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx); 2482 2483 for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) { 2484 ExplodedNode *Pred = *I; 2485 // Test if the previous node was as the same expression. This can happen 2486 // when the expression fails to evaluate to anything meaningful and 2487 // (as an optimization) we don't generate a node. 2488 ProgramPoint P = Pred->getLocation(); 2489 if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) { 2490 continue; 2491 } 2492 2493 ProgramStateRef state = Pred->getState(); 2494 SVal V = state->getSVal(Ex, Pred->getLocationContext()); 2495 Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>(); 2496 if (SEV && SEV->isExpression()) { 2497 const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags = 2498 geteagerlyAssumeBinOpBifurcationTags(); 2499 2500 ProgramStateRef StateTrue, StateFalse; 2501 std::tie(StateTrue, StateFalse) = state->assume(*SEV); 2502 2503 // First assume that the condition is true. 2504 if (StateTrue) { 2505 SVal Val = svalBuilder.makeIntVal(1U, Ex->getType()); 2506 StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val); 2507 Bldr.generateNode(Ex, Pred, StateTrue, tags.first); 2508 } 2509 2510 // Next, assume that the condition is false. 2511 if (StateFalse) { 2512 SVal Val = svalBuilder.makeIntVal(0U, Ex->getType()); 2513 StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val); 2514 Bldr.generateNode(Ex, Pred, StateFalse, tags.second); 2515 } 2516 } 2517 } 2518 } 2519 2520 void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred, 2521 ExplodedNodeSet &Dst) { 2522 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2523 // We have processed both the inputs and the outputs. All of the outputs 2524 // should evaluate to Locs. Nuke all of their values. 2525 2526 // FIXME: Some day in the future it would be nice to allow a "plug-in" 2527 // which interprets the inline asm and stores proper results in the 2528 // outputs. 2529 2530 ProgramStateRef state = Pred->getState(); 2531 2532 for (const Expr *O : A->outputs()) { 2533 SVal X = state->getSVal(O, Pred->getLocationContext()); 2534 assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef. 2535 2536 if (Optional<Loc> LV = X.getAs<Loc>()) 2537 state = state->bindLoc(*LV, UnknownVal(), Pred->getLocationContext()); 2538 } 2539 2540 Bldr.generateNode(A, Pred, state); 2541 } 2542 2543 void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred, 2544 ExplodedNodeSet &Dst) { 2545 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2546 Bldr.generateNode(A, Pred, Pred->getState()); 2547 } 2548 2549 //===----------------------------------------------------------------------===// 2550 // Visualization. 2551 //===----------------------------------------------------------------------===// 2552 2553 #ifndef NDEBUG 2554 static ExprEngine* GraphPrintCheckerState; 2555 static SourceManager* GraphPrintSourceManager; 2556 2557 namespace llvm { 2558 template<> 2559 struct DOTGraphTraits<ExplodedNode*> : 2560 public DefaultDOTGraphTraits { 2561 2562 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {} 2563 2564 // FIXME: Since we do not cache error nodes in ExprEngine now, this does not 2565 // work. 2566 static std::string getNodeAttributes(const ExplodedNode *N, void*) { 2567 return ""; 2568 } 2569 2570 // De-duplicate some source location pretty-printing. 2571 static void printLocation(raw_ostream &Out, SourceLocation SLoc) { 2572 if (SLoc.isFileID()) { 2573 Out << "\\lline=" 2574 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2575 << " col=" 2576 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc) 2577 << "\\l"; 2578 } 2579 } 2580 static void printLocation2(raw_ostream &Out, SourceLocation SLoc) { 2581 if (SLoc.isFileID() && GraphPrintSourceManager->isInMainFile(SLoc)) 2582 Out << "line " << GraphPrintSourceManager->getExpansionLineNumber(SLoc); 2583 else 2584 SLoc.print(Out, *GraphPrintSourceManager); 2585 } 2586 2587 static std::string getNodeLabel(const ExplodedNode *N, void*){ 2588 2589 std::string sbuf; 2590 llvm::raw_string_ostream Out(sbuf); 2591 2592 // Program Location. 2593 ProgramPoint Loc = N->getLocation(); 2594 2595 switch (Loc.getKind()) { 2596 case ProgramPoint::BlockEntranceKind: { 2597 Out << "Block Entrance: B" 2598 << Loc.castAs<BlockEntrance>().getBlock()->getBlockID(); 2599 break; 2600 } 2601 2602 case ProgramPoint::BlockExitKind: 2603 assert (false); 2604 break; 2605 2606 case ProgramPoint::CallEnterKind: 2607 Out << "CallEnter"; 2608 break; 2609 2610 case ProgramPoint::CallExitBeginKind: 2611 Out << "CallExitBegin"; 2612 break; 2613 2614 case ProgramPoint::CallExitEndKind: 2615 Out << "CallExitEnd"; 2616 break; 2617 2618 case ProgramPoint::PostStmtPurgeDeadSymbolsKind: 2619 Out << "PostStmtPurgeDeadSymbols"; 2620 break; 2621 2622 case ProgramPoint::PreStmtPurgeDeadSymbolsKind: 2623 Out << "PreStmtPurgeDeadSymbols"; 2624 break; 2625 2626 case ProgramPoint::EpsilonKind: 2627 Out << "Epsilon Point"; 2628 break; 2629 2630 case ProgramPoint::PreImplicitCallKind: { 2631 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2632 Out << "PreCall: "; 2633 2634 // FIXME: Get proper printing options. 2635 PC.getDecl()->print(Out, LangOptions()); 2636 printLocation(Out, PC.getLocation()); 2637 break; 2638 } 2639 2640 case ProgramPoint::PostImplicitCallKind: { 2641 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2642 Out << "PostCall: "; 2643 2644 // FIXME: Get proper printing options. 2645 PC.getDecl()->print(Out, LangOptions()); 2646 printLocation(Out, PC.getLocation()); 2647 break; 2648 } 2649 2650 case ProgramPoint::PostInitializerKind: { 2651 Out << "PostInitializer: "; 2652 const CXXCtorInitializer *Init = 2653 Loc.castAs<PostInitializer>().getInitializer(); 2654 if (const FieldDecl *FD = Init->getAnyMember()) 2655 Out << *FD; 2656 else { 2657 QualType Ty = Init->getTypeSourceInfo()->getType(); 2658 Ty = Ty.getLocalUnqualifiedType(); 2659 LangOptions LO; // FIXME. 2660 Ty.print(Out, LO); 2661 } 2662 break; 2663 } 2664 2665 case ProgramPoint::BlockEdgeKind: { 2666 const BlockEdge &E = Loc.castAs<BlockEdge>(); 2667 Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B" 2668 << E.getDst()->getBlockID() << ')'; 2669 2670 if (const Stmt *T = E.getSrc()->getTerminator()) { 2671 SourceLocation SLoc = T->getLocStart(); 2672 2673 Out << "\\|Terminator: "; 2674 LangOptions LO; // FIXME. 2675 E.getSrc()->printTerminator(Out, LO); 2676 2677 if (SLoc.isFileID()) { 2678 Out << "\\lline=" 2679 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2680 << " col=" 2681 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc); 2682 } 2683 2684 if (isa<SwitchStmt>(T)) { 2685 const Stmt *Label = E.getDst()->getLabel(); 2686 2687 if (Label) { 2688 if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) { 2689 Out << "\\lcase "; 2690 LangOptions LO; // FIXME. 2691 if (C->getLHS()) 2692 C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO)); 2693 2694 if (const Stmt *RHS = C->getRHS()) { 2695 Out << " .. "; 2696 RHS->printPretty(Out, nullptr, PrintingPolicy(LO)); 2697 } 2698 2699 Out << ":"; 2700 } 2701 else { 2702 assert (isa<DefaultStmt>(Label)); 2703 Out << "\\ldefault:"; 2704 } 2705 } 2706 else 2707 Out << "\\l(implicit) default:"; 2708 } 2709 else if (isa<IndirectGotoStmt>(T)) { 2710 // FIXME 2711 } 2712 else { 2713 Out << "\\lCondition: "; 2714 if (*E.getSrc()->succ_begin() == E.getDst()) 2715 Out << "true"; 2716 else 2717 Out << "false"; 2718 } 2719 2720 Out << "\\l"; 2721 } 2722 2723 break; 2724 } 2725 2726 default: { 2727 const Stmt *S = Loc.castAs<StmtPoint>().getStmt(); 2728 assert(S != nullptr && "Expecting non-null Stmt"); 2729 2730 Out << S->getStmtClassName() << ' ' << (const void*) S << ' '; 2731 LangOptions LO; // FIXME. 2732 S->printPretty(Out, nullptr, PrintingPolicy(LO)); 2733 printLocation(Out, S->getLocStart()); 2734 2735 if (Loc.getAs<PreStmt>()) 2736 Out << "\\lPreStmt\\l;"; 2737 else if (Loc.getAs<PostLoad>()) 2738 Out << "\\lPostLoad\\l;"; 2739 else if (Loc.getAs<PostStore>()) 2740 Out << "\\lPostStore\\l"; 2741 else if (Loc.getAs<PostLValue>()) 2742 Out << "\\lPostLValue\\l"; 2743 2744 break; 2745 } 2746 } 2747 2748 ProgramStateRef state = N->getState(); 2749 Out << "\\|StateID: " << (const void*) state.get() 2750 << " NodeID: " << (const void*) N << "\\|"; 2751 2752 // Analysis stack backtrace. 2753 Out << "Location context stack (from current to outer):\\l"; 2754 const LocationContext *LC = Loc.getLocationContext(); 2755 unsigned Idx = 0; 2756 for (; LC; LC = LC->getParent(), ++Idx) { 2757 Out << Idx << ". (" << (const void *)LC << ") "; 2758 switch (LC->getKind()) { 2759 case LocationContext::StackFrame: 2760 if (const NamedDecl *D = dyn_cast<NamedDecl>(LC->getDecl())) 2761 Out << "Calling " << D->getQualifiedNameAsString(); 2762 else 2763 Out << "Calling anonymous code"; 2764 if (const Stmt *S = cast<StackFrameContext>(LC)->getCallSite()) { 2765 Out << " at "; 2766 printLocation2(Out, S->getLocStart()); 2767 } 2768 break; 2769 case LocationContext::Block: 2770 Out << "Invoking block"; 2771 if (const Decl *D = cast<BlockInvocationContext>(LC)->getBlockDecl()) { 2772 Out << " defined at "; 2773 printLocation2(Out, D->getLocStart()); 2774 } 2775 break; 2776 case LocationContext::Scope: 2777 Out << "Entering scope"; 2778 // FIXME: Add more info once ScopeContext is activated. 2779 break; 2780 } 2781 Out << "\\l"; 2782 } 2783 Out << "\\l"; 2784 2785 state->printDOT(Out); 2786 2787 Out << "\\l"; 2788 2789 if (const ProgramPointTag *tag = Loc.getTag()) { 2790 Out << "\\|Tag: " << tag->getTagDescription(); 2791 Out << "\\l"; 2792 } 2793 return Out.str(); 2794 } 2795 }; 2796 } // end llvm namespace 2797 #endif 2798 2799 void ExprEngine::ViewGraph(bool trim) { 2800 #ifndef NDEBUG 2801 if (trim) { 2802 std::vector<const ExplodedNode*> Src; 2803 2804 // Flush any outstanding reports to make sure we cover all the nodes. 2805 // This does not cause them to get displayed. 2806 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I) 2807 const_cast<BugType*>(*I)->FlushReports(BR); 2808 2809 // Iterate through the reports and get their nodes. 2810 for (BugReporter::EQClasses_iterator 2811 EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) { 2812 ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode()); 2813 if (N) Src.push_back(N); 2814 } 2815 2816 ViewGraph(Src); 2817 } 2818 else { 2819 GraphPrintCheckerState = this; 2820 GraphPrintSourceManager = &getContext().getSourceManager(); 2821 2822 llvm::ViewGraph(*G.roots_begin(), "ExprEngine"); 2823 2824 GraphPrintCheckerState = nullptr; 2825 GraphPrintSourceManager = nullptr; 2826 } 2827 #endif 2828 } 2829 2830 void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) { 2831 #ifndef NDEBUG 2832 GraphPrintCheckerState = this; 2833 GraphPrintSourceManager = &getContext().getSourceManager(); 2834 2835 std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes)); 2836 2837 if (!TrimmedG.get()) 2838 llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n"; 2839 else 2840 llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine"); 2841 2842 GraphPrintCheckerState = nullptr; 2843 GraphPrintSourceManager = nullptr; 2844 #endif 2845 } 2846