1 //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-= 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a meta-engine for path-sensitive dataflow analysis that 11 // is built on GREngine, but provides the boilerplate to execute transfer 12 // functions and build the ExplodedGraph at the expression level. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 17 #include "PrettyStackTraceLocationContext.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/AST/ParentMap.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/AST/StmtObjC.h" 22 #include "clang/Basic/Builtins.h" 23 #include "clang/Basic/PrettyStackTrace.h" 24 #include "clang/Basic/SourceManager.h" 25 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 26 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/LoopWidening.h" 30 #include "llvm/ADT/Statistic.h" 31 #include "llvm/Support/SaveAndRestore.h" 32 #include "llvm/Support/raw_ostream.h" 33 34 #ifndef NDEBUG 35 #include "llvm/Support/GraphWriter.h" 36 #endif 37 38 using namespace clang; 39 using namespace ento; 40 using llvm::APSInt; 41 42 #define DEBUG_TYPE "ExprEngine" 43 44 STATISTIC(NumRemoveDeadBindings, 45 "The # of times RemoveDeadBindings is called"); 46 STATISTIC(NumMaxBlockCountReached, 47 "The # of aborted paths due to reaching the maximum block count in " 48 "a top level function"); 49 STATISTIC(NumMaxBlockCountReachedInInlined, 50 "The # of aborted paths due to reaching the maximum block count in " 51 "an inlined function"); 52 STATISTIC(NumTimesRetriedWithoutInlining, 53 "The # of times we re-evaluated a call without inlining"); 54 55 typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *> 56 CXXBindTemporaryContext; 57 58 // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated. 59 // The StackFrameContext assures that nested calls due to inlined recursive 60 // functions do not interfere. 61 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet, 62 llvm::ImmutableSet<CXXBindTemporaryContext>) 63 64 //===----------------------------------------------------------------------===// 65 // Engine construction and deletion. 66 //===----------------------------------------------------------------------===// 67 68 static const char* TagProviderName = "ExprEngine"; 69 70 ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled, 71 SetOfConstDecls *VisitedCalleesIn, 72 FunctionSummariesTy *FS, 73 InliningModes HowToInlineIn) 74 : AMgr(mgr), 75 AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()), 76 Engine(*this, FS), 77 G(Engine.getGraph()), 78 StateMgr(getContext(), mgr.getStoreManagerCreator(), 79 mgr.getConstraintManagerCreator(), G.getAllocator(), 80 this), 81 SymMgr(StateMgr.getSymbolManager()), 82 svalBuilder(StateMgr.getSValBuilder()), 83 currStmtIdx(0), currBldrCtx(nullptr), 84 ObjCNoRet(mgr.getASTContext()), 85 ObjCGCEnabled(gcEnabled), BR(mgr, *this), 86 VisitedCallees(VisitedCalleesIn), 87 HowToInline(HowToInlineIn) 88 { 89 unsigned TrimInterval = mgr.options.getGraphTrimInterval(); 90 if (TrimInterval != 0) { 91 // Enable eager node reclaimation when constructing the ExplodedGraph. 92 G.enableNodeReclamation(TrimInterval); 93 } 94 } 95 96 ExprEngine::~ExprEngine() { 97 BR.FlushReports(); 98 } 99 100 //===----------------------------------------------------------------------===// 101 // Utility methods. 102 //===----------------------------------------------------------------------===// 103 104 ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) { 105 ProgramStateRef state = StateMgr.getInitialState(InitLoc); 106 const Decl *D = InitLoc->getDecl(); 107 108 // Preconditions. 109 // FIXME: It would be nice if we had a more general mechanism to add 110 // such preconditions. Some day. 111 do { 112 113 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 114 // Precondition: the first argument of 'main' is an integer guaranteed 115 // to be > 0. 116 const IdentifierInfo *II = FD->getIdentifier(); 117 if (!II || !(II->getName() == "main" && FD->getNumParams() > 0)) 118 break; 119 120 const ParmVarDecl *PD = FD->getParamDecl(0); 121 QualType T = PD->getType(); 122 const BuiltinType *BT = dyn_cast<BuiltinType>(T); 123 if (!BT || !BT->isInteger()) 124 break; 125 126 const MemRegion *R = state->getRegion(PD, InitLoc); 127 if (!R) 128 break; 129 130 SVal V = state->getSVal(loc::MemRegionVal(R)); 131 SVal Constraint_untested = evalBinOp(state, BO_GT, V, 132 svalBuilder.makeZeroVal(T), 133 svalBuilder.getConditionType()); 134 135 Optional<DefinedOrUnknownSVal> Constraint = 136 Constraint_untested.getAs<DefinedOrUnknownSVal>(); 137 138 if (!Constraint) 139 break; 140 141 if (ProgramStateRef newState = state->assume(*Constraint, true)) 142 state = newState; 143 } 144 break; 145 } 146 while (0); 147 148 if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 149 // Precondition: 'self' is always non-null upon entry to an Objective-C 150 // method. 151 const ImplicitParamDecl *SelfD = MD->getSelfDecl(); 152 const MemRegion *R = state->getRegion(SelfD, InitLoc); 153 SVal V = state->getSVal(loc::MemRegionVal(R)); 154 155 if (Optional<Loc> LV = V.getAs<Loc>()) { 156 // Assume that the pointer value in 'self' is non-null. 157 state = state->assume(*LV, true); 158 assert(state && "'self' cannot be null"); 159 } 160 } 161 162 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) { 163 if (!MD->isStatic()) { 164 // Precondition: 'this' is always non-null upon entry to the 165 // top-level function. This is our starting assumption for 166 // analyzing an "open" program. 167 const StackFrameContext *SFC = InitLoc->getCurrentStackFrame(); 168 if (SFC->getParent() == nullptr) { 169 loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC); 170 SVal V = state->getSVal(L); 171 if (Optional<Loc> LV = V.getAs<Loc>()) { 172 state = state->assume(*LV, true); 173 assert(state && "'this' cannot be null"); 174 } 175 } 176 } 177 } 178 179 return state; 180 } 181 182 ProgramStateRef 183 ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State, 184 const LocationContext *LC, 185 const Expr *InitWithAdjustments, 186 const Expr *Result) { 187 // FIXME: This function is a hack that works around the quirky AST 188 // we're often having with respect to C++ temporaries. If only we modelled 189 // the actual execution order of statements properly in the CFG, 190 // all the hassle with adjustments would not be necessary, 191 // and perhaps the whole function would be removed. 192 SVal InitValWithAdjustments = State->getSVal(InitWithAdjustments, LC); 193 if (!Result) { 194 // If we don't have an explicit result expression, we're in "if needed" 195 // mode. Only create a region if the current value is a NonLoc. 196 if (!InitValWithAdjustments.getAs<NonLoc>()) 197 return State; 198 Result = InitWithAdjustments; 199 } else { 200 // We need to create a region no matter what. For sanity, make sure we don't 201 // try to stuff a Loc into a non-pointer temporary region. 202 assert(!InitValWithAdjustments.getAs<Loc>() || 203 Loc::isLocType(Result->getType()) || 204 Result->getType()->isMemberPointerType()); 205 } 206 207 ProgramStateManager &StateMgr = State->getStateManager(); 208 MemRegionManager &MRMgr = StateMgr.getRegionManager(); 209 StoreManager &StoreMgr = StateMgr.getStoreManager(); 210 211 // MaterializeTemporaryExpr may appear out of place, after a few field and 212 // base-class accesses have been made to the object, even though semantically 213 // it is the whole object that gets materialized and lifetime-extended. 214 // 215 // For example: 216 // 217 // `-MaterializeTemporaryExpr 218 // `-MemberExpr 219 // `-CXXTemporaryObjectExpr 220 // 221 // instead of the more natural 222 // 223 // `-MemberExpr 224 // `-MaterializeTemporaryExpr 225 // `-CXXTemporaryObjectExpr 226 // 227 // Use the usual methods for obtaining the expression of the base object, 228 // and record the adjustments that we need to make to obtain the sub-object 229 // that the whole expression 'Ex' refers to. This trick is usual, 230 // in the sense that CodeGen takes a similar route. 231 232 SmallVector<const Expr *, 2> CommaLHSs; 233 SmallVector<SubobjectAdjustment, 2> Adjustments; 234 235 const Expr *Init = InitWithAdjustments->skipRValueSubobjectAdjustments( 236 CommaLHSs, Adjustments); 237 238 const TypedValueRegion *TR = nullptr; 239 if (const MaterializeTemporaryExpr *MT = 240 dyn_cast<MaterializeTemporaryExpr>(Result)) { 241 StorageDuration SD = MT->getStorageDuration(); 242 // If this object is bound to a reference with static storage duration, we 243 // put it in a different region to prevent "address leakage" warnings. 244 if (SD == SD_Static || SD == SD_Thread) 245 TR = MRMgr.getCXXStaticTempObjectRegion(Init); 246 } 247 if (!TR) 248 TR = MRMgr.getCXXTempObjectRegion(Init, LC); 249 250 SVal Reg = loc::MemRegionVal(TR); 251 SVal BaseReg = Reg; 252 253 // Make the necessary adjustments to obtain the sub-object. 254 for (auto I = Adjustments.rbegin(), E = Adjustments.rend(); I != E; ++I) { 255 const SubobjectAdjustment &Adj = *I; 256 switch (Adj.Kind) { 257 case SubobjectAdjustment::DerivedToBaseAdjustment: 258 Reg = StoreMgr.evalDerivedToBase(Reg, Adj.DerivedToBase.BasePath); 259 break; 260 case SubobjectAdjustment::FieldAdjustment: 261 Reg = StoreMgr.getLValueField(Adj.Field, Reg); 262 break; 263 case SubobjectAdjustment::MemberPointerAdjustment: 264 // FIXME: Unimplemented. 265 State = State->bindDefault(Reg, UnknownVal(), LC); 266 return State; 267 } 268 } 269 270 // What remains is to copy the value of the object to the new region. 271 // FIXME: In other words, what we should always do is copy value of the 272 // Init expression (which corresponds to the bigger object) to the whole 273 // temporary region TR. However, this value is often no longer present 274 // in the Environment. If it has disappeared, we instead invalidate TR. 275 // Still, what we can do is assign the value of expression Ex (which 276 // corresponds to the sub-object) to the TR's sub-region Reg. At least, 277 // values inside Reg would be correct. 278 SVal InitVal = State->getSVal(Init, LC); 279 if (InitVal.isUnknown()) { 280 InitVal = getSValBuilder().conjureSymbolVal(Result, LC, Init->getType(), 281 currBldrCtx->blockCount()); 282 State = State->bindLoc(BaseReg.castAs<Loc>(), InitVal, LC, false); 283 284 // Then we'd need to take the value that certainly exists and bind it over. 285 if (InitValWithAdjustments.isUnknown()) { 286 // Try to recover some path sensitivity in case we couldn't 287 // compute the value. 288 InitValWithAdjustments = getSValBuilder().conjureSymbolVal( 289 Result, LC, InitWithAdjustments->getType(), 290 currBldrCtx->blockCount()); 291 } 292 State = 293 State->bindLoc(Reg.castAs<Loc>(), InitValWithAdjustments, LC, false); 294 } else { 295 State = State->bindLoc(BaseReg.castAs<Loc>(), InitVal, LC, false); 296 } 297 298 // The result expression would now point to the correct sub-region of the 299 // newly created temporary region. Do this last in order to getSVal of Init 300 // correctly in case (Result == Init). 301 State = State->BindExpr(Result, LC, Reg); 302 303 // Notify checkers once for two bindLoc()s. 304 State = processRegionChange(State, TR, LC); 305 306 return State; 307 } 308 309 //===----------------------------------------------------------------------===// 310 // Top-level transfer function logic (Dispatcher). 311 //===----------------------------------------------------------------------===// 312 313 /// evalAssume - Called by ConstraintManager. Used to call checker-specific 314 /// logic for handling assumptions on symbolic values. 315 ProgramStateRef ExprEngine::processAssume(ProgramStateRef state, 316 SVal cond, bool assumption) { 317 return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption); 318 } 319 320 ProgramStateRef 321 ExprEngine::processRegionChanges(ProgramStateRef state, 322 const InvalidatedSymbols *invalidated, 323 ArrayRef<const MemRegion *> Explicits, 324 ArrayRef<const MemRegion *> Regions, 325 const LocationContext *LCtx, 326 const CallEvent *Call) { 327 return getCheckerManager().runCheckersForRegionChanges(state, invalidated, 328 Explicits, Regions, 329 LCtx, Call); 330 } 331 332 void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State, 333 const char *NL, const char *Sep) { 334 getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep); 335 } 336 337 void ExprEngine::processEndWorklist(bool hasWorkRemaining) { 338 getCheckerManager().runCheckersForEndAnalysis(G, BR, *this); 339 } 340 341 void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred, 342 unsigned StmtIdx, NodeBuilderContext *Ctx) { 343 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 344 currStmtIdx = StmtIdx; 345 currBldrCtx = Ctx; 346 347 switch (E.getKind()) { 348 case CFGElement::Statement: 349 ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred); 350 return; 351 case CFGElement::Initializer: 352 ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred); 353 return; 354 case CFGElement::NewAllocator: 355 ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(), 356 Pred); 357 return; 358 case CFGElement::AutomaticObjectDtor: 359 case CFGElement::DeleteDtor: 360 case CFGElement::BaseDtor: 361 case CFGElement::MemberDtor: 362 case CFGElement::TemporaryDtor: 363 ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred); 364 return; 365 } 366 } 367 368 static bool shouldRemoveDeadBindings(AnalysisManager &AMgr, 369 const CFGStmt S, 370 const ExplodedNode *Pred, 371 const LocationContext *LC) { 372 373 // Are we never purging state values? 374 if (AMgr.options.AnalysisPurgeOpt == PurgeNone) 375 return false; 376 377 // Is this the beginning of a basic block? 378 if (Pred->getLocation().getAs<BlockEntrance>()) 379 return true; 380 381 // Is this on a non-expression? 382 if (!isa<Expr>(S.getStmt())) 383 return true; 384 385 // Run before processing a call. 386 if (CallEvent::isCallStmt(S.getStmt())) 387 return true; 388 389 // Is this an expression that is consumed by another expression? If so, 390 // postpone cleaning out the state. 391 ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap(); 392 return !PM.isConsumedExpr(cast<Expr>(S.getStmt())); 393 } 394 395 void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out, 396 const Stmt *ReferenceStmt, 397 const LocationContext *LC, 398 const Stmt *DiagnosticStmt, 399 ProgramPoint::Kind K) { 400 assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind || 401 ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt)) 402 && "PostStmt is not generally supported by the SymbolReaper yet"); 403 assert(LC && "Must pass the current (or expiring) LocationContext"); 404 405 if (!DiagnosticStmt) { 406 DiagnosticStmt = ReferenceStmt; 407 assert(DiagnosticStmt && "Required for clearing a LocationContext"); 408 } 409 410 NumRemoveDeadBindings++; 411 ProgramStateRef CleanedState = Pred->getState(); 412 413 // LC is the location context being destroyed, but SymbolReaper wants a 414 // location context that is still live. (If this is the top-level stack 415 // frame, this will be null.) 416 if (!ReferenceStmt) { 417 assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind && 418 "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext"); 419 LC = LC->getParent(); 420 } 421 422 const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr; 423 SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager()); 424 425 getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper); 426 427 // Create a state in which dead bindings are removed from the environment 428 // and the store. TODO: The function should just return new env and store, 429 // not a new state. 430 CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper); 431 432 // Process any special transfer function for dead symbols. 433 // A tag to track convenience transitions, which can be removed at cleanup. 434 static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node"); 435 if (!SymReaper.hasDeadSymbols()) { 436 // Generate a CleanedNode that has the environment and store cleaned 437 // up. Since no symbols are dead, we can optimize and not clean out 438 // the constraint manager. 439 StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx); 440 Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K); 441 442 } else { 443 // Call checkers with the non-cleaned state so that they could query the 444 // values of the soon to be dead symbols. 445 ExplodedNodeSet CheckedSet; 446 getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper, 447 DiagnosticStmt, *this, K); 448 449 // For each node in CheckedSet, generate CleanedNodes that have the 450 // environment, the store, and the constraints cleaned up but have the 451 // user-supplied states as the predecessors. 452 StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx); 453 for (ExplodedNodeSet::const_iterator 454 I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { 455 ProgramStateRef CheckerState = (*I)->getState(); 456 457 // The constraint manager has not been cleaned up yet, so clean up now. 458 CheckerState = getConstraintManager().removeDeadBindings(CheckerState, 459 SymReaper); 460 461 assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) && 462 "Checkers are not allowed to modify the Environment as a part of " 463 "checkDeadSymbols processing."); 464 assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) && 465 "Checkers are not allowed to modify the Store as a part of " 466 "checkDeadSymbols processing."); 467 468 // Create a state based on CleanedState with CheckerState GDM and 469 // generate a transition to that state. 470 ProgramStateRef CleanedCheckerSt = 471 StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState); 472 Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K); 473 } 474 } 475 } 476 477 void ExprEngine::ProcessStmt(const CFGStmt S, 478 ExplodedNode *Pred) { 479 // Reclaim any unnecessary nodes in the ExplodedGraph. 480 G.reclaimRecentlyAllocatedNodes(); 481 482 const Stmt *currStmt = S.getStmt(); 483 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 484 currStmt->getLocStart(), 485 "Error evaluating statement"); 486 487 // Remove dead bindings and symbols. 488 ExplodedNodeSet CleanedStates; 489 if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){ 490 removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext()); 491 } else 492 CleanedStates.Add(Pred); 493 494 // Visit the statement. 495 ExplodedNodeSet Dst; 496 for (ExplodedNodeSet::iterator I = CleanedStates.begin(), 497 E = CleanedStates.end(); I != E; ++I) { 498 ExplodedNodeSet DstI; 499 // Visit the statement. 500 Visit(currStmt, *I, DstI); 501 Dst.insert(DstI); 502 } 503 504 // Enqueue the new nodes onto the work list. 505 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 506 } 507 508 void ExprEngine::ProcessInitializer(const CFGInitializer Init, 509 ExplodedNode *Pred) { 510 const CXXCtorInitializer *BMI = Init.getInitializer(); 511 512 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 513 BMI->getSourceLocation(), 514 "Error evaluating initializer"); 515 516 // We don't clean up dead bindings here. 517 const StackFrameContext *stackFrame = 518 cast<StackFrameContext>(Pred->getLocationContext()); 519 const CXXConstructorDecl *decl = 520 cast<CXXConstructorDecl>(stackFrame->getDecl()); 521 522 ProgramStateRef State = Pred->getState(); 523 SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame)); 524 525 ExplodedNodeSet Tmp(Pred); 526 SVal FieldLoc; 527 528 // Evaluate the initializer, if necessary 529 if (BMI->isAnyMemberInitializer()) { 530 // Constructors build the object directly in the field, 531 // but non-objects must be copied in from the initializer. 532 if (auto *CtorExpr = findDirectConstructorForCurrentCFGElement()) { 533 assert(BMI->getInit()->IgnoreImplicit() == CtorExpr); 534 (void)CtorExpr; 535 // The field was directly constructed, so there is no need to bind. 536 } else { 537 const Expr *Init = BMI->getInit()->IgnoreImplicit(); 538 const ValueDecl *Field; 539 if (BMI->isIndirectMemberInitializer()) { 540 Field = BMI->getIndirectMember(); 541 FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal); 542 } else { 543 Field = BMI->getMember(); 544 FieldLoc = State->getLValue(BMI->getMember(), thisVal); 545 } 546 547 SVal InitVal; 548 if (Init->getType()->isArrayType()) { 549 // Handle arrays of trivial type. We can represent this with a 550 // primitive load/copy from the base array region. 551 const ArraySubscriptExpr *ASE; 552 while ((ASE = dyn_cast<ArraySubscriptExpr>(Init))) 553 Init = ASE->getBase()->IgnoreImplicit(); 554 555 SVal LValue = State->getSVal(Init, stackFrame); 556 if (!Field->getType()->isReferenceType()) 557 if (Optional<Loc> LValueLoc = LValue.getAs<Loc>()) 558 InitVal = State->getSVal(*LValueLoc); 559 560 // If we fail to get the value for some reason, use a symbolic value. 561 if (InitVal.isUnknownOrUndef()) { 562 SValBuilder &SVB = getSValBuilder(); 563 InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame, 564 Field->getType(), 565 currBldrCtx->blockCount()); 566 } 567 } else { 568 InitVal = State->getSVal(BMI->getInit(), stackFrame); 569 } 570 571 assert(Tmp.size() == 1 && "have not generated any new nodes yet"); 572 assert(*Tmp.begin() == Pred && "have not generated any new nodes yet"); 573 Tmp.clear(); 574 575 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 576 evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP); 577 } 578 } else { 579 assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer()); 580 // We already did all the work when visiting the CXXConstructExpr. 581 } 582 583 // Construct PostInitializer nodes whether the state changed or not, 584 // so that the diagnostics don't get confused. 585 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 586 ExplodedNodeSet Dst; 587 NodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 588 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) { 589 ExplodedNode *N = *I; 590 Bldr.generateNode(PP, N->getState(), N); 591 } 592 593 // Enqueue the new nodes onto the work list. 594 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 595 } 596 597 void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D, 598 ExplodedNode *Pred) { 599 ExplodedNodeSet Dst; 600 switch (D.getKind()) { 601 case CFGElement::AutomaticObjectDtor: 602 ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst); 603 break; 604 case CFGElement::BaseDtor: 605 ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst); 606 break; 607 case CFGElement::MemberDtor: 608 ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst); 609 break; 610 case CFGElement::TemporaryDtor: 611 ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst); 612 break; 613 case CFGElement::DeleteDtor: 614 ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst); 615 break; 616 default: 617 llvm_unreachable("Unexpected dtor kind."); 618 } 619 620 // Enqueue the new nodes onto the work list. 621 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 622 } 623 624 void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE, 625 ExplodedNode *Pred) { 626 ExplodedNodeSet Dst; 627 AnalysisManager &AMgr = getAnalysisManager(); 628 AnalyzerOptions &Opts = AMgr.options; 629 // TODO: We're not evaluating allocators for all cases just yet as 630 // we're not handling the return value correctly, which causes false 631 // positives when the alpha.cplusplus.NewDeleteLeaks check is on. 632 if (Opts.mayInlineCXXAllocator()) 633 VisitCXXNewAllocatorCall(NE, Pred, Dst); 634 else { 635 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 636 const LocationContext *LCtx = Pred->getLocationContext(); 637 PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx); 638 Bldr.generateNode(PP, Pred->getState(), Pred); 639 } 640 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 641 } 642 643 void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor, 644 ExplodedNode *Pred, 645 ExplodedNodeSet &Dst) { 646 const VarDecl *varDecl = Dtor.getVarDecl(); 647 QualType varType = varDecl->getType(); 648 649 ProgramStateRef state = Pred->getState(); 650 SVal dest = state->getLValue(varDecl, Pred->getLocationContext()); 651 const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion(); 652 653 if (varType->isReferenceType()) { 654 const MemRegion *ValueRegion = state->getSVal(Region).getAsRegion(); 655 if (!ValueRegion) { 656 // FIXME: This should not happen. The language guarantees a presence 657 // of a valid initializer here, so the reference shall not be undefined. 658 // It seems that we're calling destructors over variables that 659 // were not initialized yet. 660 return; 661 } 662 Region = ValueRegion->getBaseRegion(); 663 varType = cast<TypedValueRegion>(Region)->getValueType(); 664 } 665 666 VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false, 667 Pred, Dst); 668 } 669 670 void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor, 671 ExplodedNode *Pred, 672 ExplodedNodeSet &Dst) { 673 ProgramStateRef State = Pred->getState(); 674 const LocationContext *LCtx = Pred->getLocationContext(); 675 const CXXDeleteExpr *DE = Dtor.getDeleteExpr(); 676 const Stmt *Arg = DE->getArgument(); 677 SVal ArgVal = State->getSVal(Arg, LCtx); 678 679 // If the argument to delete is known to be a null value, 680 // don't run destructor. 681 if (State->isNull(ArgVal).isConstrainedTrue()) { 682 QualType DTy = DE->getDestroyedType(); 683 QualType BTy = getContext().getBaseElementType(DTy); 684 const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl(); 685 const CXXDestructorDecl *Dtor = RD->getDestructor(); 686 687 PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx); 688 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 689 Bldr.generateNode(PP, Pred->getState(), Pred); 690 return; 691 } 692 693 VisitCXXDestructor(DE->getDestroyedType(), 694 ArgVal.getAsRegion(), 695 DE, /*IsBase=*/ false, 696 Pred, Dst); 697 } 698 699 void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D, 700 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 701 const LocationContext *LCtx = Pred->getLocationContext(); 702 703 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 704 Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor, 705 LCtx->getCurrentStackFrame()); 706 SVal ThisVal = Pred->getState()->getSVal(ThisPtr); 707 708 // Create the base object region. 709 const CXXBaseSpecifier *Base = D.getBaseSpecifier(); 710 QualType BaseTy = Base->getType(); 711 SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy, 712 Base->isVirtual()); 713 714 VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(), 715 CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst); 716 } 717 718 void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D, 719 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 720 const FieldDecl *Member = D.getFieldDecl(); 721 ProgramStateRef State = Pred->getState(); 722 const LocationContext *LCtx = Pred->getLocationContext(); 723 724 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 725 Loc ThisVal = getSValBuilder().getCXXThis(CurDtor, 726 LCtx->getCurrentStackFrame()); 727 SVal FieldVal = 728 State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>()); 729 730 VisitCXXDestructor(Member->getType(), 731 FieldVal.castAs<loc::MemRegionVal>().getRegion(), 732 CurDtor->getBody(), /*IsBase=*/false, Pred, Dst); 733 } 734 735 void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D, 736 ExplodedNode *Pred, 737 ExplodedNodeSet &Dst) { 738 ExplodedNodeSet CleanDtorState; 739 StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx); 740 ProgramStateRef State = Pred->getState(); 741 if (State->contains<InitializedTemporariesSet>( 742 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) { 743 // FIXME: Currently we insert temporary destructors for default parameters, 744 // but we don't insert the constructors. 745 State = State->remove<InitializedTemporariesSet>( 746 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame())); 747 } 748 StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State); 749 750 QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType(); 751 // FIXME: Currently CleanDtorState can be empty here due to temporaries being 752 // bound to default parameters. 753 assert(CleanDtorState.size() <= 1); 754 ExplodedNode *CleanPred = 755 CleanDtorState.empty() ? Pred : *CleanDtorState.begin(); 756 // FIXME: Inlining of temporary destructors is not supported yet anyway, so 757 // we just put a NULL region for now. This will need to be changed later. 758 VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(), 759 /*IsBase=*/false, CleanPred, Dst); 760 } 761 762 void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 763 NodeBuilderContext &BldCtx, 764 ExplodedNode *Pred, 765 ExplodedNodeSet &Dst, 766 const CFGBlock *DstT, 767 const CFGBlock *DstF) { 768 BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF); 769 if (Pred->getState()->contains<InitializedTemporariesSet>( 770 std::make_pair(BTE, Pred->getStackFrame()))) { 771 TempDtorBuilder.markInfeasible(false); 772 TempDtorBuilder.generateNode(Pred->getState(), true, Pred); 773 } else { 774 TempDtorBuilder.markInfeasible(true); 775 TempDtorBuilder.generateNode(Pred->getState(), false, Pred); 776 } 777 } 778 779 void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE, 780 ExplodedNodeSet &PreVisit, 781 ExplodedNodeSet &Dst) { 782 if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) { 783 // In case we don't have temporary destructors in the CFG, do not mark 784 // the initialization - we would otherwise never clean it up. 785 Dst = PreVisit; 786 return; 787 } 788 StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx); 789 for (ExplodedNode *Node : PreVisit) { 790 ProgramStateRef State = Node->getState(); 791 792 if (!State->contains<InitializedTemporariesSet>( 793 std::make_pair(BTE, Node->getStackFrame()))) { 794 // FIXME: Currently the state might already contain the marker due to 795 // incorrect handling of temporaries bound to default parameters; for 796 // those, we currently skip the CXXBindTemporaryExpr but rely on adding 797 // temporary destructor nodes. 798 State = State->add<InitializedTemporariesSet>( 799 std::make_pair(BTE, Node->getStackFrame())); 800 } 801 StmtBldr.generateNode(BTE, Node, State); 802 } 803 } 804 805 void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred, 806 ExplodedNodeSet &DstTop) { 807 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 808 S->getLocStart(), 809 "Error evaluating statement"); 810 ExplodedNodeSet Dst; 811 StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx); 812 813 assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens()); 814 815 switch (S->getStmtClass()) { 816 // C++, OpenMP and ARC stuff we don't support yet. 817 case Expr::ObjCIndirectCopyRestoreExprClass: 818 case Stmt::CXXDependentScopeMemberExprClass: 819 case Stmt::CXXInheritedCtorInitExprClass: 820 case Stmt::CXXTryStmtClass: 821 case Stmt::CXXTypeidExprClass: 822 case Stmt::CXXUuidofExprClass: 823 case Stmt::CXXFoldExprClass: 824 case Stmt::MSPropertyRefExprClass: 825 case Stmt::MSPropertySubscriptExprClass: 826 case Stmt::CXXUnresolvedConstructExprClass: 827 case Stmt::DependentScopeDeclRefExprClass: 828 case Stmt::ArrayTypeTraitExprClass: 829 case Stmt::ExpressionTraitExprClass: 830 case Stmt::UnresolvedLookupExprClass: 831 case Stmt::UnresolvedMemberExprClass: 832 case Stmt::TypoExprClass: 833 case Stmt::CXXNoexceptExprClass: 834 case Stmt::PackExpansionExprClass: 835 case Stmt::SubstNonTypeTemplateParmPackExprClass: 836 case Stmt::FunctionParmPackExprClass: 837 case Stmt::CoroutineBodyStmtClass: 838 case Stmt::CoawaitExprClass: 839 case Stmt::DependentCoawaitExprClass: 840 case Stmt::CoreturnStmtClass: 841 case Stmt::CoyieldExprClass: 842 case Stmt::SEHTryStmtClass: 843 case Stmt::SEHExceptStmtClass: 844 case Stmt::SEHLeaveStmtClass: 845 case Stmt::SEHFinallyStmtClass: 846 case Stmt::OMPParallelDirectiveClass: 847 case Stmt::OMPSimdDirectiveClass: 848 case Stmt::OMPForDirectiveClass: 849 case Stmt::OMPForSimdDirectiveClass: 850 case Stmt::OMPSectionsDirectiveClass: 851 case Stmt::OMPSectionDirectiveClass: 852 case Stmt::OMPSingleDirectiveClass: 853 case Stmt::OMPMasterDirectiveClass: 854 case Stmt::OMPCriticalDirectiveClass: 855 case Stmt::OMPParallelForDirectiveClass: 856 case Stmt::OMPParallelForSimdDirectiveClass: 857 case Stmt::OMPParallelSectionsDirectiveClass: 858 case Stmt::OMPTaskDirectiveClass: 859 case Stmt::OMPTaskyieldDirectiveClass: 860 case Stmt::OMPBarrierDirectiveClass: 861 case Stmt::OMPTaskwaitDirectiveClass: 862 case Stmt::OMPTaskgroupDirectiveClass: 863 case Stmt::OMPFlushDirectiveClass: 864 case Stmt::OMPOrderedDirectiveClass: 865 case Stmt::OMPAtomicDirectiveClass: 866 case Stmt::OMPTargetDirectiveClass: 867 case Stmt::OMPTargetDataDirectiveClass: 868 case Stmt::OMPTargetEnterDataDirectiveClass: 869 case Stmt::OMPTargetExitDataDirectiveClass: 870 case Stmt::OMPTargetParallelDirectiveClass: 871 case Stmt::OMPTargetParallelForDirectiveClass: 872 case Stmt::OMPTargetUpdateDirectiveClass: 873 case Stmt::OMPTeamsDirectiveClass: 874 case Stmt::OMPCancellationPointDirectiveClass: 875 case Stmt::OMPCancelDirectiveClass: 876 case Stmt::OMPTaskLoopDirectiveClass: 877 case Stmt::OMPTaskLoopSimdDirectiveClass: 878 case Stmt::OMPDistributeDirectiveClass: 879 case Stmt::OMPDistributeParallelForDirectiveClass: 880 case Stmt::OMPDistributeParallelForSimdDirectiveClass: 881 case Stmt::OMPDistributeSimdDirectiveClass: 882 case Stmt::OMPTargetParallelForSimdDirectiveClass: 883 case Stmt::OMPTargetSimdDirectiveClass: 884 case Stmt::OMPTeamsDistributeDirectiveClass: 885 case Stmt::OMPTeamsDistributeSimdDirectiveClass: 886 case Stmt::OMPTeamsDistributeParallelForSimdDirectiveClass: 887 case Stmt::OMPTeamsDistributeParallelForDirectiveClass: 888 case Stmt::OMPTargetTeamsDirectiveClass: 889 case Stmt::OMPTargetTeamsDistributeDirectiveClass: 890 case Stmt::OMPTargetTeamsDistributeParallelForDirectiveClass: 891 case Stmt::OMPTargetTeamsDistributeParallelForSimdDirectiveClass: 892 case Stmt::OMPTargetTeamsDistributeSimdDirectiveClass: 893 case Stmt::CapturedStmtClass: 894 { 895 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 896 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 897 break; 898 } 899 900 case Stmt::ParenExprClass: 901 llvm_unreachable("ParenExprs already handled."); 902 case Stmt::GenericSelectionExprClass: 903 llvm_unreachable("GenericSelectionExprs already handled."); 904 // Cases that should never be evaluated simply because they shouldn't 905 // appear in the CFG. 906 case Stmt::BreakStmtClass: 907 case Stmt::CaseStmtClass: 908 case Stmt::CompoundStmtClass: 909 case Stmt::ContinueStmtClass: 910 case Stmt::CXXForRangeStmtClass: 911 case Stmt::DefaultStmtClass: 912 case Stmt::DoStmtClass: 913 case Stmt::ForStmtClass: 914 case Stmt::GotoStmtClass: 915 case Stmt::IfStmtClass: 916 case Stmt::IndirectGotoStmtClass: 917 case Stmt::LabelStmtClass: 918 case Stmt::NoStmtClass: 919 case Stmt::NullStmtClass: 920 case Stmt::SwitchStmtClass: 921 case Stmt::WhileStmtClass: 922 case Expr::MSDependentExistsStmtClass: 923 llvm_unreachable("Stmt should not be in analyzer evaluation loop"); 924 925 case Stmt::ObjCSubscriptRefExprClass: 926 case Stmt::ObjCPropertyRefExprClass: 927 llvm_unreachable("These are handled by PseudoObjectExpr"); 928 929 case Stmt::GNUNullExprClass: { 930 // GNU __null is a pointer-width integer, not an actual pointer. 931 ProgramStateRef state = Pred->getState(); 932 state = state->BindExpr(S, Pred->getLocationContext(), 933 svalBuilder.makeIntValWithPtrWidth(0, false)); 934 Bldr.generateNode(S, Pred, state); 935 break; 936 } 937 938 case Stmt::ObjCAtSynchronizedStmtClass: 939 Bldr.takeNodes(Pred); 940 VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst); 941 Bldr.addNodes(Dst); 942 break; 943 944 case Stmt::ExprWithCleanupsClass: 945 // Handled due to fully linearised CFG. 946 break; 947 948 case Stmt::CXXBindTemporaryExprClass: { 949 Bldr.takeNodes(Pred); 950 ExplodedNodeSet PreVisit; 951 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 952 ExplodedNodeSet Next; 953 VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next); 954 getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this); 955 Bldr.addNodes(Dst); 956 break; 957 } 958 959 // Cases not handled yet; but will handle some day. 960 case Stmt::DesignatedInitExprClass: 961 case Stmt::DesignatedInitUpdateExprClass: 962 case Stmt::ArrayInitLoopExprClass: 963 case Stmt::ArrayInitIndexExprClass: 964 case Stmt::ExtVectorElementExprClass: 965 case Stmt::ImaginaryLiteralClass: 966 case Stmt::ObjCAtCatchStmtClass: 967 case Stmt::ObjCAtFinallyStmtClass: 968 case Stmt::ObjCAtTryStmtClass: 969 case Stmt::ObjCAutoreleasePoolStmtClass: 970 case Stmt::ObjCEncodeExprClass: 971 case Stmt::ObjCIsaExprClass: 972 case Stmt::ObjCProtocolExprClass: 973 case Stmt::ObjCSelectorExprClass: 974 case Stmt::ParenListExprClass: 975 case Stmt::ShuffleVectorExprClass: 976 case Stmt::ConvertVectorExprClass: 977 case Stmt::VAArgExprClass: 978 case Stmt::CUDAKernelCallExprClass: 979 case Stmt::OpaqueValueExprClass: 980 case Stmt::AsTypeExprClass: 981 // Fall through. 982 983 // Cases we intentionally don't evaluate, since they don't need 984 // to be explicitly evaluated. 985 case Stmt::PredefinedExprClass: 986 case Stmt::AddrLabelExprClass: 987 case Stmt::AttributedStmtClass: 988 case Stmt::IntegerLiteralClass: 989 case Stmt::CharacterLiteralClass: 990 case Stmt::ImplicitValueInitExprClass: 991 case Stmt::CXXScalarValueInitExprClass: 992 case Stmt::CXXBoolLiteralExprClass: 993 case Stmt::ObjCBoolLiteralExprClass: 994 case Stmt::ObjCAvailabilityCheckExprClass: 995 case Stmt::FloatingLiteralClass: 996 case Stmt::NoInitExprClass: 997 case Stmt::SizeOfPackExprClass: 998 case Stmt::StringLiteralClass: 999 case Stmt::ObjCStringLiteralClass: 1000 case Stmt::CXXPseudoDestructorExprClass: 1001 case Stmt::SubstNonTypeTemplateParmExprClass: 1002 case Stmt::CXXNullPtrLiteralExprClass: 1003 case Stmt::OMPArraySectionExprClass: 1004 case Stmt::TypeTraitExprClass: { 1005 Bldr.takeNodes(Pred); 1006 ExplodedNodeSet preVisit; 1007 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 1008 getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this); 1009 Bldr.addNodes(Dst); 1010 break; 1011 } 1012 1013 case Stmt::CXXDefaultArgExprClass: 1014 case Stmt::CXXDefaultInitExprClass: { 1015 Bldr.takeNodes(Pred); 1016 ExplodedNodeSet PreVisit; 1017 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1018 1019 ExplodedNodeSet Tmp; 1020 StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx); 1021 1022 const Expr *ArgE; 1023 if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S)) 1024 ArgE = DefE->getExpr(); 1025 else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S)) 1026 ArgE = DefE->getExpr(); 1027 else 1028 llvm_unreachable("unknown constant wrapper kind"); 1029 1030 bool IsTemporary = false; 1031 if (const MaterializeTemporaryExpr *MTE = 1032 dyn_cast<MaterializeTemporaryExpr>(ArgE)) { 1033 ArgE = MTE->GetTemporaryExpr(); 1034 IsTemporary = true; 1035 } 1036 1037 Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE); 1038 if (!ConstantVal) 1039 ConstantVal = UnknownVal(); 1040 1041 const LocationContext *LCtx = Pred->getLocationContext(); 1042 for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end(); 1043 I != E; ++I) { 1044 ProgramStateRef State = (*I)->getState(); 1045 State = State->BindExpr(S, LCtx, *ConstantVal); 1046 if (IsTemporary) 1047 State = createTemporaryRegionIfNeeded(State, LCtx, 1048 cast<Expr>(S), 1049 cast<Expr>(S)); 1050 Bldr2.generateNode(S, *I, State); 1051 } 1052 1053 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1054 Bldr.addNodes(Dst); 1055 break; 1056 } 1057 1058 // Cases we evaluate as opaque expressions, conjuring a symbol. 1059 case Stmt::CXXStdInitializerListExprClass: 1060 case Expr::ObjCArrayLiteralClass: 1061 case Expr::ObjCDictionaryLiteralClass: 1062 case Expr::ObjCBoxedExprClass: { 1063 Bldr.takeNodes(Pred); 1064 1065 ExplodedNodeSet preVisit; 1066 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 1067 1068 ExplodedNodeSet Tmp; 1069 StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx); 1070 1071 const Expr *Ex = cast<Expr>(S); 1072 QualType resultType = Ex->getType(); 1073 1074 for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end(); 1075 it != et; ++it) { 1076 ExplodedNode *N = *it; 1077 const LocationContext *LCtx = N->getLocationContext(); 1078 SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx, 1079 resultType, 1080 currBldrCtx->blockCount()); 1081 ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result); 1082 Bldr2.generateNode(S, N, state); 1083 } 1084 1085 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1086 Bldr.addNodes(Dst); 1087 break; 1088 } 1089 1090 case Stmt::ArraySubscriptExprClass: 1091 Bldr.takeNodes(Pred); 1092 VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst); 1093 Bldr.addNodes(Dst); 1094 break; 1095 1096 case Stmt::GCCAsmStmtClass: 1097 Bldr.takeNodes(Pred); 1098 VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst); 1099 Bldr.addNodes(Dst); 1100 break; 1101 1102 case Stmt::MSAsmStmtClass: 1103 Bldr.takeNodes(Pred); 1104 VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst); 1105 Bldr.addNodes(Dst); 1106 break; 1107 1108 case Stmt::BlockExprClass: 1109 Bldr.takeNodes(Pred); 1110 VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst); 1111 Bldr.addNodes(Dst); 1112 break; 1113 1114 case Stmt::LambdaExprClass: 1115 if (AMgr.options.shouldInlineLambdas()) { 1116 Bldr.takeNodes(Pred); 1117 VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst); 1118 Bldr.addNodes(Dst); 1119 } else { 1120 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 1121 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 1122 } 1123 break; 1124 1125 case Stmt::BinaryOperatorClass: { 1126 const BinaryOperator* B = cast<BinaryOperator>(S); 1127 if (B->isLogicalOp()) { 1128 Bldr.takeNodes(Pred); 1129 VisitLogicalExpr(B, Pred, Dst); 1130 Bldr.addNodes(Dst); 1131 break; 1132 } 1133 else if (B->getOpcode() == BO_Comma) { 1134 ProgramStateRef state = Pred->getState(); 1135 Bldr.generateNode(B, Pred, 1136 state->BindExpr(B, Pred->getLocationContext(), 1137 state->getSVal(B->getRHS(), 1138 Pred->getLocationContext()))); 1139 break; 1140 } 1141 1142 Bldr.takeNodes(Pred); 1143 1144 if (AMgr.options.eagerlyAssumeBinOpBifurcation && 1145 (B->isRelationalOp() || B->isEqualityOp())) { 1146 ExplodedNodeSet Tmp; 1147 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp); 1148 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S)); 1149 } 1150 else 1151 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1152 1153 Bldr.addNodes(Dst); 1154 break; 1155 } 1156 1157 case Stmt::CXXOperatorCallExprClass: { 1158 const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S); 1159 1160 // For instance method operators, make sure the 'this' argument has a 1161 // valid region. 1162 const Decl *Callee = OCE->getCalleeDecl(); 1163 if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) { 1164 if (MD->isInstance()) { 1165 ProgramStateRef State = Pred->getState(); 1166 const LocationContext *LCtx = Pred->getLocationContext(); 1167 ProgramStateRef NewState = 1168 createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0)); 1169 if (NewState != State) { 1170 Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr, 1171 ProgramPoint::PreStmtKind); 1172 // Did we cache out? 1173 if (!Pred) 1174 break; 1175 } 1176 } 1177 } 1178 // FALLTHROUGH 1179 LLVM_FALLTHROUGH; 1180 } 1181 case Stmt::CallExprClass: 1182 case Stmt::CXXMemberCallExprClass: 1183 case Stmt::UserDefinedLiteralClass: { 1184 Bldr.takeNodes(Pred); 1185 VisitCallExpr(cast<CallExpr>(S), Pred, Dst); 1186 Bldr.addNodes(Dst); 1187 break; 1188 } 1189 1190 case Stmt::CXXCatchStmtClass: { 1191 Bldr.takeNodes(Pred); 1192 VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst); 1193 Bldr.addNodes(Dst); 1194 break; 1195 } 1196 1197 case Stmt::CXXTemporaryObjectExprClass: 1198 case Stmt::CXXConstructExprClass: { 1199 Bldr.takeNodes(Pred); 1200 VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst); 1201 Bldr.addNodes(Dst); 1202 break; 1203 } 1204 1205 case Stmt::CXXNewExprClass: { 1206 Bldr.takeNodes(Pred); 1207 ExplodedNodeSet PostVisit; 1208 VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit); 1209 getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this); 1210 Bldr.addNodes(Dst); 1211 break; 1212 } 1213 1214 case Stmt::CXXDeleteExprClass: { 1215 Bldr.takeNodes(Pred); 1216 ExplodedNodeSet PreVisit; 1217 const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S); 1218 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1219 1220 for (ExplodedNodeSet::iterator i = PreVisit.begin(), 1221 e = PreVisit.end(); i != e ; ++i) 1222 VisitCXXDeleteExpr(CDE, *i, Dst); 1223 1224 Bldr.addNodes(Dst); 1225 break; 1226 } 1227 // FIXME: ChooseExpr is really a constant. We need to fix 1228 // the CFG do not model them as explicit control-flow. 1229 1230 case Stmt::ChooseExprClass: { // __builtin_choose_expr 1231 Bldr.takeNodes(Pred); 1232 const ChooseExpr *C = cast<ChooseExpr>(S); 1233 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); 1234 Bldr.addNodes(Dst); 1235 break; 1236 } 1237 1238 case Stmt::CompoundAssignOperatorClass: 1239 Bldr.takeNodes(Pred); 1240 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1241 Bldr.addNodes(Dst); 1242 break; 1243 1244 case Stmt::CompoundLiteralExprClass: 1245 Bldr.takeNodes(Pred); 1246 VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst); 1247 Bldr.addNodes(Dst); 1248 break; 1249 1250 case Stmt::BinaryConditionalOperatorClass: 1251 case Stmt::ConditionalOperatorClass: { // '?' operator 1252 Bldr.takeNodes(Pred); 1253 const AbstractConditionalOperator *C 1254 = cast<AbstractConditionalOperator>(S); 1255 VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst); 1256 Bldr.addNodes(Dst); 1257 break; 1258 } 1259 1260 case Stmt::CXXThisExprClass: 1261 Bldr.takeNodes(Pred); 1262 VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst); 1263 Bldr.addNodes(Dst); 1264 break; 1265 1266 case Stmt::DeclRefExprClass: { 1267 Bldr.takeNodes(Pred); 1268 const DeclRefExpr *DE = cast<DeclRefExpr>(S); 1269 VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst); 1270 Bldr.addNodes(Dst); 1271 break; 1272 } 1273 1274 case Stmt::DeclStmtClass: 1275 Bldr.takeNodes(Pred); 1276 VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst); 1277 Bldr.addNodes(Dst); 1278 break; 1279 1280 case Stmt::ImplicitCastExprClass: 1281 case Stmt::CStyleCastExprClass: 1282 case Stmt::CXXStaticCastExprClass: 1283 case Stmt::CXXDynamicCastExprClass: 1284 case Stmt::CXXReinterpretCastExprClass: 1285 case Stmt::CXXConstCastExprClass: 1286 case Stmt::CXXFunctionalCastExprClass: 1287 case Stmt::ObjCBridgedCastExprClass: { 1288 Bldr.takeNodes(Pred); 1289 const CastExpr *C = cast<CastExpr>(S); 1290 ExplodedNodeSet dstExpr; 1291 VisitCast(C, C->getSubExpr(), Pred, dstExpr); 1292 1293 // Handle the postvisit checks. 1294 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this); 1295 Bldr.addNodes(Dst); 1296 break; 1297 } 1298 1299 case Expr::MaterializeTemporaryExprClass: { 1300 Bldr.takeNodes(Pred); 1301 const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S); 1302 ExplodedNodeSet dstPrevisit; 1303 getCheckerManager().runCheckersForPreStmt(dstPrevisit, Pred, MTE, *this); 1304 ExplodedNodeSet dstExpr; 1305 for (ExplodedNodeSet::iterator i = dstPrevisit.begin(), 1306 e = dstPrevisit.end(); i != e ; ++i) { 1307 CreateCXXTemporaryObject(MTE, *i, dstExpr); 1308 } 1309 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, MTE, *this); 1310 Bldr.addNodes(Dst); 1311 break; 1312 } 1313 1314 case Stmt::InitListExprClass: 1315 Bldr.takeNodes(Pred); 1316 VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst); 1317 Bldr.addNodes(Dst); 1318 break; 1319 1320 case Stmt::MemberExprClass: 1321 Bldr.takeNodes(Pred); 1322 VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst); 1323 Bldr.addNodes(Dst); 1324 break; 1325 1326 case Stmt::AtomicExprClass: 1327 Bldr.takeNodes(Pred); 1328 VisitAtomicExpr(cast<AtomicExpr>(S), Pred, Dst); 1329 Bldr.addNodes(Dst); 1330 break; 1331 1332 case Stmt::ObjCIvarRefExprClass: 1333 Bldr.takeNodes(Pred); 1334 VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst); 1335 Bldr.addNodes(Dst); 1336 break; 1337 1338 case Stmt::ObjCForCollectionStmtClass: 1339 Bldr.takeNodes(Pred); 1340 VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst); 1341 Bldr.addNodes(Dst); 1342 break; 1343 1344 case Stmt::ObjCMessageExprClass: 1345 Bldr.takeNodes(Pred); 1346 VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst); 1347 Bldr.addNodes(Dst); 1348 break; 1349 1350 case Stmt::ObjCAtThrowStmtClass: 1351 case Stmt::CXXThrowExprClass: 1352 // FIXME: This is not complete. We basically treat @throw as 1353 // an abort. 1354 Bldr.generateSink(S, Pred, Pred->getState()); 1355 break; 1356 1357 case Stmt::ReturnStmtClass: 1358 Bldr.takeNodes(Pred); 1359 VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst); 1360 Bldr.addNodes(Dst); 1361 break; 1362 1363 case Stmt::OffsetOfExprClass: 1364 Bldr.takeNodes(Pred); 1365 VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst); 1366 Bldr.addNodes(Dst); 1367 break; 1368 1369 case Stmt::UnaryExprOrTypeTraitExprClass: 1370 Bldr.takeNodes(Pred); 1371 VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S), 1372 Pred, Dst); 1373 Bldr.addNodes(Dst); 1374 break; 1375 1376 case Stmt::StmtExprClass: { 1377 const StmtExpr *SE = cast<StmtExpr>(S); 1378 1379 if (SE->getSubStmt()->body_empty()) { 1380 // Empty statement expression. 1381 assert(SE->getType() == getContext().VoidTy 1382 && "Empty statement expression must have void type."); 1383 break; 1384 } 1385 1386 if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) { 1387 ProgramStateRef state = Pred->getState(); 1388 Bldr.generateNode(SE, Pred, 1389 state->BindExpr(SE, Pred->getLocationContext(), 1390 state->getSVal(LastExpr, 1391 Pred->getLocationContext()))); 1392 } 1393 break; 1394 } 1395 1396 case Stmt::UnaryOperatorClass: { 1397 Bldr.takeNodes(Pred); 1398 const UnaryOperator *U = cast<UnaryOperator>(S); 1399 if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) { 1400 ExplodedNodeSet Tmp; 1401 VisitUnaryOperator(U, Pred, Tmp); 1402 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U); 1403 } 1404 else 1405 VisitUnaryOperator(U, Pred, Dst); 1406 Bldr.addNodes(Dst); 1407 break; 1408 } 1409 1410 case Stmt::PseudoObjectExprClass: { 1411 Bldr.takeNodes(Pred); 1412 ProgramStateRef state = Pred->getState(); 1413 const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S); 1414 if (const Expr *Result = PE->getResultExpr()) { 1415 SVal V = state->getSVal(Result, Pred->getLocationContext()); 1416 Bldr.generateNode(S, Pred, 1417 state->BindExpr(S, Pred->getLocationContext(), V)); 1418 } 1419 else 1420 Bldr.generateNode(S, Pred, 1421 state->BindExpr(S, Pred->getLocationContext(), 1422 UnknownVal())); 1423 1424 Bldr.addNodes(Dst); 1425 break; 1426 } 1427 } 1428 } 1429 1430 bool ExprEngine::replayWithoutInlining(ExplodedNode *N, 1431 const LocationContext *CalleeLC) { 1432 const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1433 const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame(); 1434 assert(CalleeSF && CallerSF); 1435 ExplodedNode *BeforeProcessingCall = nullptr; 1436 const Stmt *CE = CalleeSF->getCallSite(); 1437 1438 // Find the first node before we started processing the call expression. 1439 while (N) { 1440 ProgramPoint L = N->getLocation(); 1441 BeforeProcessingCall = N; 1442 N = N->pred_empty() ? nullptr : *(N->pred_begin()); 1443 1444 // Skip the nodes corresponding to the inlined code. 1445 if (L.getLocationContext()->getCurrentStackFrame() != CallerSF) 1446 continue; 1447 // We reached the caller. Find the node right before we started 1448 // processing the call. 1449 if (L.isPurgeKind()) 1450 continue; 1451 if (L.getAs<PreImplicitCall>()) 1452 continue; 1453 if (L.getAs<CallEnter>()) 1454 continue; 1455 if (Optional<StmtPoint> SP = L.getAs<StmtPoint>()) 1456 if (SP->getStmt() == CE) 1457 continue; 1458 break; 1459 } 1460 1461 if (!BeforeProcessingCall) 1462 return false; 1463 1464 // TODO: Clean up the unneeded nodes. 1465 1466 // Build an Epsilon node from which we will restart the analyzes. 1467 // Note that CE is permitted to be NULL! 1468 ProgramPoint NewNodeLoc = 1469 EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE); 1470 // Add the special flag to GDM to signal retrying with no inlining. 1471 // Note, changing the state ensures that we are not going to cache out. 1472 ProgramStateRef NewNodeState = BeforeProcessingCall->getState(); 1473 NewNodeState = 1474 NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE)); 1475 1476 // Make the new node a successor of BeforeProcessingCall. 1477 bool IsNew = false; 1478 ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew); 1479 // We cached out at this point. Caching out is common due to us backtracking 1480 // from the inlined function, which might spawn several paths. 1481 if (!IsNew) 1482 return true; 1483 1484 NewNode->addPredecessor(BeforeProcessingCall, G); 1485 1486 // Add the new node to the work list. 1487 Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(), 1488 CalleeSF->getIndex()); 1489 NumTimesRetriedWithoutInlining++; 1490 return true; 1491 } 1492 1493 /// Block entrance. (Update counters). 1494 void ExprEngine::processCFGBlockEntrance(const BlockEdge &L, 1495 NodeBuilderWithSinks &nodeBuilder, 1496 ExplodedNode *Pred) { 1497 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1498 1499 // If this block is terminated by a loop and it has already been visited the 1500 // maximum number of times, widen the loop. 1501 unsigned int BlockCount = nodeBuilder.getContext().blockCount(); 1502 if (BlockCount == AMgr.options.maxBlockVisitOnPath - 1 && 1503 AMgr.options.shouldWidenLoops()) { 1504 const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator(); 1505 if (!(Term && 1506 (isa<ForStmt>(Term) || isa<WhileStmt>(Term) || isa<DoStmt>(Term)))) 1507 return; 1508 // Widen. 1509 const LocationContext *LCtx = Pred->getLocationContext(); 1510 ProgramStateRef WidenedState = 1511 getWidenedLoopState(Pred->getState(), LCtx, BlockCount, Term); 1512 nodeBuilder.generateNode(WidenedState, Pred); 1513 return; 1514 } 1515 1516 // FIXME: Refactor this into a checker. 1517 if (BlockCount >= AMgr.options.maxBlockVisitOnPath) { 1518 static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded"); 1519 const ExplodedNode *Sink = 1520 nodeBuilder.generateSink(Pred->getState(), Pred, &tag); 1521 1522 // Check if we stopped at the top level function or not. 1523 // Root node should have the location context of the top most function. 1524 const LocationContext *CalleeLC = Pred->getLocation().getLocationContext(); 1525 const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1526 const LocationContext *RootLC = 1527 (*G.roots_begin())->getLocation().getLocationContext(); 1528 if (RootLC->getCurrentStackFrame() != CalleeSF) { 1529 Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl()); 1530 1531 // Re-run the call evaluation without inlining it, by storing the 1532 // no-inlining policy in the state and enqueuing the new work item on 1533 // the list. Replay should almost never fail. Use the stats to catch it 1534 // if it does. 1535 if ((!AMgr.options.NoRetryExhausted && 1536 replayWithoutInlining(Pred, CalleeLC))) 1537 return; 1538 NumMaxBlockCountReachedInInlined++; 1539 } else 1540 NumMaxBlockCountReached++; 1541 1542 // Make sink nodes as exhausted(for stats) only if retry failed. 1543 Engine.blocksExhausted.push_back(std::make_pair(L, Sink)); 1544 } 1545 } 1546 1547 //===----------------------------------------------------------------------===// 1548 // Branch processing. 1549 //===----------------------------------------------------------------------===// 1550 1551 /// RecoverCastedSymbol - A helper function for ProcessBranch that is used 1552 /// to try to recover some path-sensitivity for casts of symbolic 1553 /// integers that promote their values (which are currently not tracked well). 1554 /// This function returns the SVal bound to Condition->IgnoreCasts if all the 1555 // cast(s) did was sign-extend the original value. 1556 static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr, 1557 ProgramStateRef state, 1558 const Stmt *Condition, 1559 const LocationContext *LCtx, 1560 ASTContext &Ctx) { 1561 1562 const Expr *Ex = dyn_cast<Expr>(Condition); 1563 if (!Ex) 1564 return UnknownVal(); 1565 1566 uint64_t bits = 0; 1567 bool bitsInit = false; 1568 1569 while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) { 1570 QualType T = CE->getType(); 1571 1572 if (!T->isIntegralOrEnumerationType()) 1573 return UnknownVal(); 1574 1575 uint64_t newBits = Ctx.getTypeSize(T); 1576 if (!bitsInit || newBits < bits) { 1577 bitsInit = true; 1578 bits = newBits; 1579 } 1580 1581 Ex = CE->getSubExpr(); 1582 } 1583 1584 // We reached a non-cast. Is it a symbolic value? 1585 QualType T = Ex->getType(); 1586 1587 if (!bitsInit || !T->isIntegralOrEnumerationType() || 1588 Ctx.getTypeSize(T) > bits) 1589 return UnknownVal(); 1590 1591 return state->getSVal(Ex, LCtx); 1592 } 1593 1594 #ifndef NDEBUG 1595 static const Stmt *getRightmostLeaf(const Stmt *Condition) { 1596 while (Condition) { 1597 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1598 if (!BO || !BO->isLogicalOp()) { 1599 return Condition; 1600 } 1601 Condition = BO->getRHS()->IgnoreParens(); 1602 } 1603 return nullptr; 1604 } 1605 #endif 1606 1607 // Returns the condition the branch at the end of 'B' depends on and whose value 1608 // has been evaluated within 'B'. 1609 // In most cases, the terminator condition of 'B' will be evaluated fully in 1610 // the last statement of 'B'; in those cases, the resolved condition is the 1611 // given 'Condition'. 1612 // If the condition of the branch is a logical binary operator tree, the CFG is 1613 // optimized: in that case, we know that the expression formed by all but the 1614 // rightmost leaf of the logical binary operator tree must be true, and thus 1615 // the branch condition is at this point equivalent to the truth value of that 1616 // rightmost leaf; the CFG block thus only evaluates this rightmost leaf 1617 // expression in its final statement. As the full condition in that case was 1618 // not evaluated, and is thus not in the SVal cache, we need to use that leaf 1619 // expression to evaluate the truth value of the condition in the current state 1620 // space. 1621 static const Stmt *ResolveCondition(const Stmt *Condition, 1622 const CFGBlock *B) { 1623 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1624 Condition = Ex->IgnoreParens(); 1625 1626 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1627 if (!BO || !BO->isLogicalOp()) 1628 return Condition; 1629 1630 assert(!B->getTerminator().isTemporaryDtorsBranch() && 1631 "Temporary destructor branches handled by processBindTemporary."); 1632 1633 // For logical operations, we still have the case where some branches 1634 // use the traditional "merge" approach and others sink the branch 1635 // directly into the basic blocks representing the logical operation. 1636 // We need to distinguish between those two cases here. 1637 1638 // The invariants are still shifting, but it is possible that the 1639 // last element in a CFGBlock is not a CFGStmt. Look for the last 1640 // CFGStmt as the value of the condition. 1641 CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend(); 1642 for (; I != E; ++I) { 1643 CFGElement Elem = *I; 1644 Optional<CFGStmt> CS = Elem.getAs<CFGStmt>(); 1645 if (!CS) 1646 continue; 1647 const Stmt *LastStmt = CS->getStmt(); 1648 assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition)); 1649 return LastStmt; 1650 } 1651 llvm_unreachable("could not resolve condition"); 1652 } 1653 1654 void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term, 1655 NodeBuilderContext& BldCtx, 1656 ExplodedNode *Pred, 1657 ExplodedNodeSet &Dst, 1658 const CFGBlock *DstT, 1659 const CFGBlock *DstF) { 1660 assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) && 1661 "CXXBindTemporaryExprs are handled by processBindTemporary."); 1662 const LocationContext *LCtx = Pred->getLocationContext(); 1663 PrettyStackTraceLocationContext StackCrashInfo(LCtx); 1664 currBldrCtx = &BldCtx; 1665 1666 // Check for NULL conditions; e.g. "for(;;)" 1667 if (!Condition) { 1668 BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF); 1669 NullCondBldr.markInfeasible(false); 1670 NullCondBldr.generateNode(Pred->getState(), true, Pred); 1671 return; 1672 } 1673 1674 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1675 Condition = Ex->IgnoreParens(); 1676 1677 Condition = ResolveCondition(Condition, BldCtx.getBlock()); 1678 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 1679 Condition->getLocStart(), 1680 "Error evaluating branch"); 1681 1682 ExplodedNodeSet CheckersOutSet; 1683 getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet, 1684 Pred, *this); 1685 // We generated only sinks. 1686 if (CheckersOutSet.empty()) 1687 return; 1688 1689 BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF); 1690 for (NodeBuilder::iterator I = CheckersOutSet.begin(), 1691 E = CheckersOutSet.end(); E != I; ++I) { 1692 ExplodedNode *PredI = *I; 1693 1694 if (PredI->isSink()) 1695 continue; 1696 1697 ProgramStateRef PrevState = PredI->getState(); 1698 SVal X = PrevState->getSVal(Condition, PredI->getLocationContext()); 1699 1700 if (X.isUnknownOrUndef()) { 1701 // Give it a chance to recover from unknown. 1702 if (const Expr *Ex = dyn_cast<Expr>(Condition)) { 1703 if (Ex->getType()->isIntegralOrEnumerationType()) { 1704 // Try to recover some path-sensitivity. Right now casts of symbolic 1705 // integers that promote their values are currently not tracked well. 1706 // If 'Condition' is such an expression, try and recover the 1707 // underlying value and use that instead. 1708 SVal recovered = RecoverCastedSymbol(getStateManager(), 1709 PrevState, Condition, 1710 PredI->getLocationContext(), 1711 getContext()); 1712 1713 if (!recovered.isUnknown()) { 1714 X = recovered; 1715 } 1716 } 1717 } 1718 } 1719 1720 // If the condition is still unknown, give up. 1721 if (X.isUnknownOrUndef()) { 1722 builder.generateNode(PrevState, true, PredI); 1723 builder.generateNode(PrevState, false, PredI); 1724 continue; 1725 } 1726 1727 DefinedSVal V = X.castAs<DefinedSVal>(); 1728 1729 ProgramStateRef StTrue, StFalse; 1730 std::tie(StTrue, StFalse) = PrevState->assume(V); 1731 1732 // Process the true branch. 1733 if (builder.isFeasible(true)) { 1734 if (StTrue) 1735 builder.generateNode(StTrue, true, PredI); 1736 else 1737 builder.markInfeasible(true); 1738 } 1739 1740 // Process the false branch. 1741 if (builder.isFeasible(false)) { 1742 if (StFalse) 1743 builder.generateNode(StFalse, false, PredI); 1744 else 1745 builder.markInfeasible(false); 1746 } 1747 } 1748 currBldrCtx = nullptr; 1749 } 1750 1751 /// The GDM component containing the set of global variables which have been 1752 /// previously initialized with explicit initializers. 1753 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet, 1754 llvm::ImmutableSet<const VarDecl *>) 1755 1756 void ExprEngine::processStaticInitializer(const DeclStmt *DS, 1757 NodeBuilderContext &BuilderCtx, 1758 ExplodedNode *Pred, 1759 clang::ento::ExplodedNodeSet &Dst, 1760 const CFGBlock *DstT, 1761 const CFGBlock *DstF) { 1762 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1763 currBldrCtx = &BuilderCtx; 1764 1765 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl()); 1766 ProgramStateRef state = Pred->getState(); 1767 bool initHasRun = state->contains<InitializedGlobalsSet>(VD); 1768 BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF); 1769 1770 if (!initHasRun) { 1771 state = state->add<InitializedGlobalsSet>(VD); 1772 } 1773 1774 builder.generateNode(state, initHasRun, Pred); 1775 builder.markInfeasible(!initHasRun); 1776 1777 currBldrCtx = nullptr; 1778 } 1779 1780 /// processIndirectGoto - Called by CoreEngine. Used to generate successor 1781 /// nodes by processing the 'effects' of a computed goto jump. 1782 void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) { 1783 1784 ProgramStateRef state = builder.getState(); 1785 SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext()); 1786 1787 // Three possibilities: 1788 // 1789 // (1) We know the computed label. 1790 // (2) The label is NULL (or some other constant), or Undefined. 1791 // (3) We have no clue about the label. Dispatch to all targets. 1792 // 1793 1794 typedef IndirectGotoNodeBuilder::iterator iterator; 1795 1796 if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) { 1797 const LabelDecl *L = LV->getLabel(); 1798 1799 for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) { 1800 if (I.getLabel() == L) { 1801 builder.generateNode(I, state); 1802 return; 1803 } 1804 } 1805 1806 llvm_unreachable("No block with label."); 1807 } 1808 1809 if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) { 1810 // Dispatch to the first target and mark it as a sink. 1811 //ExplodedNode* N = builder.generateNode(builder.begin(), state, true); 1812 // FIXME: add checker visit. 1813 // UndefBranches.insert(N); 1814 return; 1815 } 1816 1817 // This is really a catch-all. We don't support symbolics yet. 1818 // FIXME: Implement dispatch for symbolic pointers. 1819 1820 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I) 1821 builder.generateNode(I, state); 1822 } 1823 1824 #if 0 1825 static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) { 1826 const StackFrameContext* Frame = Pred.getStackFrame(); 1827 const llvm::ImmutableSet<CXXBindTemporaryContext> &Set = 1828 Pred.getState()->get<InitializedTemporariesSet>(); 1829 return std::find_if(Set.begin(), Set.end(), 1830 [&](const CXXBindTemporaryContext &Ctx) { 1831 if (Ctx.second == Frame) { 1832 Ctx.first->dump(); 1833 llvm::errs() << "\n"; 1834 } 1835 return Ctx.second == Frame; 1836 }) == Set.end(); 1837 } 1838 #endif 1839 1840 void ExprEngine::processBeginOfFunction(NodeBuilderContext &BC, 1841 ExplodedNode *Pred, 1842 ExplodedNodeSet &Dst, 1843 const BlockEdge &L) { 1844 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC); 1845 getCheckerManager().runCheckersForBeginFunction(Dst, L, Pred, *this); 1846 } 1847 1848 /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path 1849 /// nodes when the control reaches the end of a function. 1850 void ExprEngine::processEndOfFunction(NodeBuilderContext& BC, 1851 ExplodedNode *Pred, 1852 const ReturnStmt *RS) { 1853 // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)). 1854 // We currently cannot enable this assert, as lifetime extended temporaries 1855 // are not modelled correctly. 1856 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1857 StateMgr.EndPath(Pred->getState()); 1858 1859 ExplodedNodeSet Dst; 1860 if (Pred->getLocationContext()->inTopFrame()) { 1861 // Remove dead symbols. 1862 ExplodedNodeSet AfterRemovedDead; 1863 removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead); 1864 1865 // Notify checkers. 1866 for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(), 1867 E = AfterRemovedDead.end(); I != E; ++I) { 1868 getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this); 1869 } 1870 } else { 1871 getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this); 1872 } 1873 1874 Engine.enqueueEndOfFunction(Dst, RS); 1875 } 1876 1877 /// ProcessSwitch - Called by CoreEngine. Used to generate successor 1878 /// nodes by processing the 'effects' of a switch statement. 1879 void ExprEngine::processSwitch(SwitchNodeBuilder& builder) { 1880 typedef SwitchNodeBuilder::iterator iterator; 1881 ProgramStateRef state = builder.getState(); 1882 const Expr *CondE = builder.getCondition(); 1883 SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext()); 1884 1885 if (CondV_untested.isUndef()) { 1886 //ExplodedNode* N = builder.generateDefaultCaseNode(state, true); 1887 // FIXME: add checker 1888 //UndefBranches.insert(N); 1889 1890 return; 1891 } 1892 DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>(); 1893 1894 ProgramStateRef DefaultSt = state; 1895 1896 iterator I = builder.begin(), EI = builder.end(); 1897 bool defaultIsFeasible = I == EI; 1898 1899 for ( ; I != EI; ++I) { 1900 // Successor may be pruned out during CFG construction. 1901 if (!I.getBlock()) 1902 continue; 1903 1904 const CaseStmt *Case = I.getCase(); 1905 1906 // Evaluate the LHS of the case value. 1907 llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext()); 1908 assert(V1.getBitWidth() == getContext().getIntWidth(CondE->getType())); 1909 1910 // Get the RHS of the case, if it exists. 1911 llvm::APSInt V2; 1912 if (const Expr *E = Case->getRHS()) 1913 V2 = E->EvaluateKnownConstInt(getContext()); 1914 else 1915 V2 = V1; 1916 1917 ProgramStateRef StateCase; 1918 if (Optional<NonLoc> NL = CondV.getAs<NonLoc>()) 1919 std::tie(StateCase, DefaultSt) = 1920 DefaultSt->assumeInclusiveRange(*NL, V1, V2); 1921 else // UnknownVal 1922 StateCase = DefaultSt; 1923 1924 if (StateCase) 1925 builder.generateCaseStmtNode(I, StateCase); 1926 1927 // Now "assume" that the case doesn't match. Add this state 1928 // to the default state (if it is feasible). 1929 if (DefaultSt) 1930 defaultIsFeasible = true; 1931 else { 1932 defaultIsFeasible = false; 1933 break; 1934 } 1935 } 1936 1937 if (!defaultIsFeasible) 1938 return; 1939 1940 // If we have switch(enum value), the default branch is not 1941 // feasible if all of the enum constants not covered by 'case:' statements 1942 // are not feasible values for the switch condition. 1943 // 1944 // Note that this isn't as accurate as it could be. Even if there isn't 1945 // a case for a particular enum value as long as that enum value isn't 1946 // feasible then it shouldn't be considered for making 'default:' reachable. 1947 const SwitchStmt *SS = builder.getSwitch(); 1948 const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts(); 1949 if (CondExpr->getType()->getAs<EnumType>()) { 1950 if (SS->isAllEnumCasesCovered()) 1951 return; 1952 } 1953 1954 builder.generateDefaultCaseNode(DefaultSt); 1955 } 1956 1957 //===----------------------------------------------------------------------===// 1958 // Transfer functions: Loads and stores. 1959 //===----------------------------------------------------------------------===// 1960 1961 void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D, 1962 ExplodedNode *Pred, 1963 ExplodedNodeSet &Dst) { 1964 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1965 1966 ProgramStateRef state = Pred->getState(); 1967 const LocationContext *LCtx = Pred->getLocationContext(); 1968 1969 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) { 1970 // C permits "extern void v", and if you cast the address to a valid type, 1971 // you can even do things with it. We simply pretend 1972 assert(Ex->isGLValue() || VD->getType()->isVoidType()); 1973 const LocationContext *LocCtxt = Pred->getLocationContext(); 1974 const Decl *D = LocCtxt->getDecl(); 1975 const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr; 1976 const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex); 1977 SVal V; 1978 bool IsReference; 1979 if (AMgr.options.shouldInlineLambdas() && DeclRefEx && 1980 DeclRefEx->refersToEnclosingVariableOrCapture() && MD && 1981 MD->getParent()->isLambda()) { 1982 // Lookup the field of the lambda. 1983 const CXXRecordDecl *CXXRec = MD->getParent(); 1984 llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields; 1985 FieldDecl *LambdaThisCaptureField; 1986 CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField); 1987 const FieldDecl *FD = LambdaCaptureFields[VD]; 1988 if (!FD) { 1989 // When a constant is captured, sometimes no corresponding field is 1990 // created in the lambda object. 1991 assert(VD->getType().isConstQualified()); 1992 V = state->getLValue(VD, LocCtxt); 1993 IsReference = false; 1994 } else { 1995 Loc CXXThis = 1996 svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame()); 1997 SVal CXXThisVal = state->getSVal(CXXThis); 1998 V = state->getLValue(FD, CXXThisVal); 1999 IsReference = FD->getType()->isReferenceType(); 2000 } 2001 } else { 2002 V = state->getLValue(VD, LocCtxt); 2003 IsReference = VD->getType()->isReferenceType(); 2004 } 2005 2006 // For references, the 'lvalue' is the pointer address stored in the 2007 // reference region. 2008 if (IsReference) { 2009 if (const MemRegion *R = V.getAsRegion()) 2010 V = state->getSVal(R); 2011 else 2012 V = UnknownVal(); 2013 } 2014 2015 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 2016 ProgramPoint::PostLValueKind); 2017 return; 2018 } 2019 if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) { 2020 assert(!Ex->isGLValue()); 2021 SVal V = svalBuilder.makeIntVal(ED->getInitVal()); 2022 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V)); 2023 return; 2024 } 2025 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 2026 SVal V = svalBuilder.getFunctionPointer(FD); 2027 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 2028 ProgramPoint::PostLValueKind); 2029 return; 2030 } 2031 if (isa<FieldDecl>(D)) { 2032 // FIXME: Compute lvalue of field pointers-to-member. 2033 // Right now we just use a non-null void pointer, so that it gives proper 2034 // results in boolean contexts. 2035 SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy, 2036 currBldrCtx->blockCount()); 2037 state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true); 2038 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 2039 ProgramPoint::PostLValueKind); 2040 return; 2041 } 2042 2043 llvm_unreachable("Support for this Decl not implemented."); 2044 } 2045 2046 /// VisitArraySubscriptExpr - Transfer function for array accesses 2047 void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A, 2048 ExplodedNode *Pred, 2049 ExplodedNodeSet &Dst){ 2050 2051 const Expr *Base = A->getBase()->IgnoreParens(); 2052 const Expr *Idx = A->getIdx()->IgnoreParens(); 2053 2054 ExplodedNodeSet CheckerPreStmt; 2055 getCheckerManager().runCheckersForPreStmt(CheckerPreStmt, Pred, A, *this); 2056 2057 ExplodedNodeSet EvalSet; 2058 StmtNodeBuilder Bldr(CheckerPreStmt, EvalSet, *currBldrCtx); 2059 assert(A->isGLValue() || 2060 (!AMgr.getLangOpts().CPlusPlus && 2061 A->getType().isCForbiddenLValueType())); 2062 2063 for (auto *Node : CheckerPreStmt) { 2064 const LocationContext *LCtx = Node->getLocationContext(); 2065 ProgramStateRef state = Node->getState(); 2066 SVal V = state->getLValue(A->getType(), 2067 state->getSVal(Idx, LCtx), 2068 state->getSVal(Base, LCtx)); 2069 Bldr.generateNode(A, Node, state->BindExpr(A, LCtx, V), nullptr, 2070 ProgramPoint::PostLValueKind); 2071 } 2072 2073 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, A, *this); 2074 } 2075 2076 /// VisitMemberExpr - Transfer function for member expressions. 2077 void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred, 2078 ExplodedNodeSet &Dst) { 2079 2080 // FIXME: Prechecks eventually go in ::Visit(). 2081 ExplodedNodeSet CheckedSet; 2082 getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this); 2083 2084 ExplodedNodeSet EvalSet; 2085 ValueDecl *Member = M->getMemberDecl(); 2086 2087 // Handle static member variables and enum constants accessed via 2088 // member syntax. 2089 if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) { 2090 ExplodedNodeSet Dst; 2091 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2092 I != E; ++I) { 2093 VisitCommonDeclRefExpr(M, Member, Pred, EvalSet); 2094 } 2095 } else { 2096 StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); 2097 ExplodedNodeSet Tmp; 2098 2099 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2100 I != E; ++I) { 2101 ProgramStateRef state = (*I)->getState(); 2102 const LocationContext *LCtx = (*I)->getLocationContext(); 2103 Expr *BaseExpr = M->getBase(); 2104 2105 // Handle C++ method calls. 2106 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) { 2107 if (MD->isInstance()) 2108 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2109 2110 SVal MDVal = svalBuilder.getFunctionPointer(MD); 2111 state = state->BindExpr(M, LCtx, MDVal); 2112 2113 Bldr.generateNode(M, *I, state); 2114 continue; 2115 } 2116 2117 // Handle regular struct fields / member variables. 2118 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2119 SVal baseExprVal = state->getSVal(BaseExpr, LCtx); 2120 2121 FieldDecl *field = cast<FieldDecl>(Member); 2122 SVal L = state->getLValue(field, baseExprVal); 2123 2124 if (M->isGLValue() || M->getType()->isArrayType()) { 2125 // We special-case rvalues of array type because the analyzer cannot 2126 // reason about them, since we expect all regions to be wrapped in Locs. 2127 // We instead treat these as lvalues and assume that they will decay to 2128 // pointers as soon as they are used. 2129 if (!M->isGLValue()) { 2130 assert(M->getType()->isArrayType()); 2131 const ImplicitCastExpr *PE = 2132 dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParentIgnoreParens(M)); 2133 if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) { 2134 llvm_unreachable("should always be wrapped in ArrayToPointerDecay"); 2135 } 2136 } 2137 2138 if (field->getType()->isReferenceType()) { 2139 if (const MemRegion *R = L.getAsRegion()) 2140 L = state->getSVal(R); 2141 else 2142 L = UnknownVal(); 2143 } 2144 2145 Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr, 2146 ProgramPoint::PostLValueKind); 2147 } else { 2148 Bldr.takeNodes(*I); 2149 evalLoad(Tmp, M, M, *I, state, L); 2150 Bldr.addNodes(Tmp); 2151 } 2152 } 2153 } 2154 2155 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this); 2156 } 2157 2158 void ExprEngine::VisitAtomicExpr(const AtomicExpr *AE, ExplodedNode *Pred, 2159 ExplodedNodeSet &Dst) { 2160 ExplodedNodeSet AfterPreSet; 2161 getCheckerManager().runCheckersForPreStmt(AfterPreSet, Pred, AE, *this); 2162 2163 // For now, treat all the arguments to C11 atomics as escaping. 2164 // FIXME: Ideally we should model the behavior of the atomics precisely here. 2165 2166 ExplodedNodeSet AfterInvalidateSet; 2167 StmtNodeBuilder Bldr(AfterPreSet, AfterInvalidateSet, *currBldrCtx); 2168 2169 for (ExplodedNodeSet::iterator I = AfterPreSet.begin(), E = AfterPreSet.end(); 2170 I != E; ++I) { 2171 ProgramStateRef State = (*I)->getState(); 2172 const LocationContext *LCtx = (*I)->getLocationContext(); 2173 2174 SmallVector<SVal, 8> ValuesToInvalidate; 2175 for (unsigned SI = 0, Count = AE->getNumSubExprs(); SI != Count; SI++) { 2176 const Expr *SubExpr = AE->getSubExprs()[SI]; 2177 SVal SubExprVal = State->getSVal(SubExpr, LCtx); 2178 ValuesToInvalidate.push_back(SubExprVal); 2179 } 2180 2181 State = State->invalidateRegions(ValuesToInvalidate, AE, 2182 currBldrCtx->blockCount(), 2183 LCtx, 2184 /*CausedByPointerEscape*/true, 2185 /*Symbols=*/nullptr); 2186 2187 SVal ResultVal = UnknownVal(); 2188 State = State->BindExpr(AE, LCtx, ResultVal); 2189 Bldr.generateNode(AE, *I, State, nullptr, 2190 ProgramPoint::PostStmtKind); 2191 } 2192 2193 getCheckerManager().runCheckersForPostStmt(Dst, AfterInvalidateSet, AE, *this); 2194 } 2195 2196 namespace { 2197 class CollectReachableSymbolsCallback final : public SymbolVisitor { 2198 InvalidatedSymbols Symbols; 2199 2200 public: 2201 CollectReachableSymbolsCallback(ProgramStateRef State) {} 2202 const InvalidatedSymbols &getSymbols() const { return Symbols; } 2203 2204 bool VisitSymbol(SymbolRef Sym) override { 2205 Symbols.insert(Sym); 2206 return true; 2207 } 2208 }; 2209 } // end anonymous namespace 2210 2211 // A value escapes in three possible cases: 2212 // (1) We are binding to something that is not a memory region. 2213 // (2) We are binding to a MemrRegion that does not have stack storage. 2214 // (3) We are binding to a MemRegion with stack storage that the store 2215 // does not understand. 2216 ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State, 2217 SVal Loc, 2218 SVal Val, 2219 const LocationContext *LCtx) { 2220 // Are we storing to something that causes the value to "escape"? 2221 bool escapes = true; 2222 2223 // TODO: Move to StoreManager. 2224 if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) { 2225 escapes = !regionLoc->getRegion()->hasStackStorage(); 2226 2227 if (!escapes) { 2228 // To test (3), generate a new state with the binding added. If it is 2229 // the same state, then it escapes (since the store cannot represent 2230 // the binding). 2231 // Do this only if we know that the store is not supposed to generate the 2232 // same state. 2233 SVal StoredVal = State->getSVal(regionLoc->getRegion()); 2234 if (StoredVal != Val) 2235 escapes = (State == (State->bindLoc(*regionLoc, Val, LCtx))); 2236 } 2237 } 2238 2239 // If our store can represent the binding and we aren't storing to something 2240 // that doesn't have local storage then just return and have the simulation 2241 // state continue as is. 2242 if (!escapes) 2243 return State; 2244 2245 // Otherwise, find all symbols referenced by 'val' that we are tracking 2246 // and stop tracking them. 2247 CollectReachableSymbolsCallback Scanner = 2248 State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val); 2249 const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols(); 2250 State = getCheckerManager().runCheckersForPointerEscape(State, 2251 EscapedSymbols, 2252 /*CallEvent*/ nullptr, 2253 PSK_EscapeOnBind, 2254 nullptr); 2255 2256 return State; 2257 } 2258 2259 ProgramStateRef 2260 ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State, 2261 const InvalidatedSymbols *Invalidated, 2262 ArrayRef<const MemRegion *> ExplicitRegions, 2263 ArrayRef<const MemRegion *> Regions, 2264 const CallEvent *Call, 2265 RegionAndSymbolInvalidationTraits &ITraits) { 2266 2267 if (!Invalidated || Invalidated->empty()) 2268 return State; 2269 2270 if (!Call) 2271 return getCheckerManager().runCheckersForPointerEscape(State, 2272 *Invalidated, 2273 nullptr, 2274 PSK_EscapeOther, 2275 &ITraits); 2276 2277 // If the symbols were invalidated by a call, we want to find out which ones 2278 // were invalidated directly due to being arguments to the call. 2279 InvalidatedSymbols SymbolsDirectlyInvalidated; 2280 for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(), 2281 E = ExplicitRegions.end(); I != E; ++I) { 2282 if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>()) 2283 SymbolsDirectlyInvalidated.insert(R->getSymbol()); 2284 } 2285 2286 InvalidatedSymbols SymbolsIndirectlyInvalidated; 2287 for (InvalidatedSymbols::const_iterator I=Invalidated->begin(), 2288 E = Invalidated->end(); I!=E; ++I) { 2289 SymbolRef sym = *I; 2290 if (SymbolsDirectlyInvalidated.count(sym)) 2291 continue; 2292 SymbolsIndirectlyInvalidated.insert(sym); 2293 } 2294 2295 if (!SymbolsDirectlyInvalidated.empty()) 2296 State = getCheckerManager().runCheckersForPointerEscape(State, 2297 SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits); 2298 2299 // Notify about the symbols that get indirectly invalidated by the call. 2300 if (!SymbolsIndirectlyInvalidated.empty()) 2301 State = getCheckerManager().runCheckersForPointerEscape(State, 2302 SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits); 2303 2304 return State; 2305 } 2306 2307 /// evalBind - Handle the semantics of binding a value to a specific location. 2308 /// This method is used by evalStore and (soon) VisitDeclStmt, and others. 2309 void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE, 2310 ExplodedNode *Pred, 2311 SVal location, SVal Val, 2312 bool atDeclInit, const ProgramPoint *PP) { 2313 2314 const LocationContext *LC = Pred->getLocationContext(); 2315 PostStmt PS(StoreE, LC); 2316 if (!PP) 2317 PP = &PS; 2318 2319 // Do a previsit of the bind. 2320 ExplodedNodeSet CheckedSet; 2321 getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val, 2322 StoreE, *this, *PP); 2323 2324 StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx); 2325 2326 // If the location is not a 'Loc', it will already be handled by 2327 // the checkers. There is nothing left to do. 2328 if (!location.getAs<Loc>()) { 2329 const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr, 2330 /*tag*/nullptr); 2331 ProgramStateRef state = Pred->getState(); 2332 state = processPointerEscapedOnBind(state, location, Val, LC); 2333 Bldr.generateNode(L, state, Pred); 2334 return; 2335 } 2336 2337 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2338 I!=E; ++I) { 2339 ExplodedNode *PredI = *I; 2340 ProgramStateRef state = PredI->getState(); 2341 2342 state = processPointerEscapedOnBind(state, location, Val, LC); 2343 2344 // When binding the value, pass on the hint that this is a initialization. 2345 // For initializations, we do not need to inform clients of region 2346 // changes. 2347 state = state->bindLoc(location.castAs<Loc>(), 2348 Val, LC, /* notifyChanges = */ !atDeclInit); 2349 2350 const MemRegion *LocReg = nullptr; 2351 if (Optional<loc::MemRegionVal> LocRegVal = 2352 location.getAs<loc::MemRegionVal>()) { 2353 LocReg = LocRegVal->getRegion(); 2354 } 2355 2356 const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr); 2357 Bldr.generateNode(L, state, PredI); 2358 } 2359 } 2360 2361 /// evalStore - Handle the semantics of a store via an assignment. 2362 /// @param Dst The node set to store generated state nodes 2363 /// @param AssignE The assignment expression if the store happens in an 2364 /// assignment. 2365 /// @param LocationE The location expression that is stored to. 2366 /// @param state The current simulation state 2367 /// @param location The location to store the value 2368 /// @param Val The value to be stored 2369 void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE, 2370 const Expr *LocationE, 2371 ExplodedNode *Pred, 2372 ProgramStateRef state, SVal location, SVal Val, 2373 const ProgramPointTag *tag) { 2374 // Proceed with the store. We use AssignE as the anchor for the PostStore 2375 // ProgramPoint if it is non-NULL, and LocationE otherwise. 2376 const Expr *StoreE = AssignE ? AssignE : LocationE; 2377 2378 // Evaluate the location (checks for bad dereferences). 2379 ExplodedNodeSet Tmp; 2380 evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false); 2381 2382 if (Tmp.empty()) 2383 return; 2384 2385 if (location.isUndef()) 2386 return; 2387 2388 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) 2389 evalBind(Dst, StoreE, *NI, location, Val, false); 2390 } 2391 2392 void ExprEngine::evalLoad(ExplodedNodeSet &Dst, 2393 const Expr *NodeEx, 2394 const Expr *BoundEx, 2395 ExplodedNode *Pred, 2396 ProgramStateRef state, 2397 SVal location, 2398 const ProgramPointTag *tag, 2399 QualType LoadTy) 2400 { 2401 assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc."); 2402 2403 // Are we loading from a region? This actually results in two loads; one 2404 // to fetch the address of the referenced value and one to fetch the 2405 // referenced value. 2406 if (const TypedValueRegion *TR = 2407 dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) { 2408 2409 QualType ValTy = TR->getValueType(); 2410 if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) { 2411 static SimpleProgramPointTag 2412 loadReferenceTag(TagProviderName, "Load Reference"); 2413 ExplodedNodeSet Tmp; 2414 evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state, 2415 location, &loadReferenceTag, 2416 getContext().getPointerType(RT->getPointeeType())); 2417 2418 // Perform the load from the referenced value. 2419 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) { 2420 state = (*I)->getState(); 2421 location = state->getSVal(BoundEx, (*I)->getLocationContext()); 2422 evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy); 2423 } 2424 return; 2425 } 2426 } 2427 2428 evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy); 2429 } 2430 2431 void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst, 2432 const Expr *NodeEx, 2433 const Expr *BoundEx, 2434 ExplodedNode *Pred, 2435 ProgramStateRef state, 2436 SVal location, 2437 const ProgramPointTag *tag, 2438 QualType LoadTy) { 2439 assert(NodeEx); 2440 assert(BoundEx); 2441 // Evaluate the location (checks for bad dereferences). 2442 ExplodedNodeSet Tmp; 2443 evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true); 2444 if (Tmp.empty()) 2445 return; 2446 2447 StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 2448 if (location.isUndef()) 2449 return; 2450 2451 // Proceed with the load. 2452 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) { 2453 state = (*NI)->getState(); 2454 const LocationContext *LCtx = (*NI)->getLocationContext(); 2455 2456 SVal V = UnknownVal(); 2457 if (location.isValid()) { 2458 if (LoadTy.isNull()) 2459 LoadTy = BoundEx->getType(); 2460 V = state->getSVal(location.castAs<Loc>(), LoadTy); 2461 } 2462 2463 Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag, 2464 ProgramPoint::PostLoadKind); 2465 } 2466 } 2467 2468 void ExprEngine::evalLocation(ExplodedNodeSet &Dst, 2469 const Stmt *NodeEx, 2470 const Stmt *BoundEx, 2471 ExplodedNode *Pred, 2472 ProgramStateRef state, 2473 SVal location, 2474 const ProgramPointTag *tag, 2475 bool isLoad) { 2476 StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx); 2477 // Early checks for performance reason. 2478 if (location.isUnknown()) { 2479 return; 2480 } 2481 2482 ExplodedNodeSet Src; 2483 BldrTop.takeNodes(Pred); 2484 StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx); 2485 if (Pred->getState() != state) { 2486 // Associate this new state with an ExplodedNode. 2487 // FIXME: If I pass null tag, the graph is incorrect, e.g for 2488 // int *p; 2489 // p = 0; 2490 // *p = 0xDEADBEEF; 2491 // "p = 0" is not noted as "Null pointer value stored to 'p'" but 2492 // instead "int *p" is noted as 2493 // "Variable 'p' initialized to a null pointer value" 2494 2495 static SimpleProgramPointTag tag(TagProviderName, "Location"); 2496 Bldr.generateNode(NodeEx, Pred, state, &tag); 2497 } 2498 ExplodedNodeSet Tmp; 2499 getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad, 2500 NodeEx, BoundEx, *this); 2501 BldrTop.addNodes(Tmp); 2502 } 2503 2504 std::pair<const ProgramPointTag *, const ProgramPointTag*> 2505 ExprEngine::geteagerlyAssumeBinOpBifurcationTags() { 2506 static SimpleProgramPointTag 2507 eagerlyAssumeBinOpBifurcationTrue(TagProviderName, 2508 "Eagerly Assume True"), 2509 eagerlyAssumeBinOpBifurcationFalse(TagProviderName, 2510 "Eagerly Assume False"); 2511 return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue, 2512 &eagerlyAssumeBinOpBifurcationFalse); 2513 } 2514 2515 void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst, 2516 ExplodedNodeSet &Src, 2517 const Expr *Ex) { 2518 StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx); 2519 2520 for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) { 2521 ExplodedNode *Pred = *I; 2522 // Test if the previous node was as the same expression. This can happen 2523 // when the expression fails to evaluate to anything meaningful and 2524 // (as an optimization) we don't generate a node. 2525 ProgramPoint P = Pred->getLocation(); 2526 if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) { 2527 continue; 2528 } 2529 2530 ProgramStateRef state = Pred->getState(); 2531 SVal V = state->getSVal(Ex, Pred->getLocationContext()); 2532 Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>(); 2533 if (SEV && SEV->isExpression()) { 2534 const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags = 2535 geteagerlyAssumeBinOpBifurcationTags(); 2536 2537 ProgramStateRef StateTrue, StateFalse; 2538 std::tie(StateTrue, StateFalse) = state->assume(*SEV); 2539 2540 // First assume that the condition is true. 2541 if (StateTrue) { 2542 SVal Val = svalBuilder.makeIntVal(1U, Ex->getType()); 2543 StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val); 2544 Bldr.generateNode(Ex, Pred, StateTrue, tags.first); 2545 } 2546 2547 // Next, assume that the condition is false. 2548 if (StateFalse) { 2549 SVal Val = svalBuilder.makeIntVal(0U, Ex->getType()); 2550 StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val); 2551 Bldr.generateNode(Ex, Pred, StateFalse, tags.second); 2552 } 2553 } 2554 } 2555 } 2556 2557 void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred, 2558 ExplodedNodeSet &Dst) { 2559 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2560 // We have processed both the inputs and the outputs. All of the outputs 2561 // should evaluate to Locs. Nuke all of their values. 2562 2563 // FIXME: Some day in the future it would be nice to allow a "plug-in" 2564 // which interprets the inline asm and stores proper results in the 2565 // outputs. 2566 2567 ProgramStateRef state = Pred->getState(); 2568 2569 for (const Expr *O : A->outputs()) { 2570 SVal X = state->getSVal(O, Pred->getLocationContext()); 2571 assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef. 2572 2573 if (Optional<Loc> LV = X.getAs<Loc>()) 2574 state = state->bindLoc(*LV, UnknownVal(), Pred->getLocationContext()); 2575 } 2576 2577 Bldr.generateNode(A, Pred, state); 2578 } 2579 2580 void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred, 2581 ExplodedNodeSet &Dst) { 2582 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2583 Bldr.generateNode(A, Pred, Pred->getState()); 2584 } 2585 2586 //===----------------------------------------------------------------------===// 2587 // Visualization. 2588 //===----------------------------------------------------------------------===// 2589 2590 #ifndef NDEBUG 2591 static ExprEngine* GraphPrintCheckerState; 2592 static SourceManager* GraphPrintSourceManager; 2593 2594 namespace llvm { 2595 template<> 2596 struct DOTGraphTraits<ExplodedNode*> : 2597 public DefaultDOTGraphTraits { 2598 2599 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {} 2600 2601 // FIXME: Since we do not cache error nodes in ExprEngine now, this does not 2602 // work. 2603 static std::string getNodeAttributes(const ExplodedNode *N, void*) { 2604 return ""; 2605 } 2606 2607 // De-duplicate some source location pretty-printing. 2608 static void printLocation(raw_ostream &Out, SourceLocation SLoc) { 2609 if (SLoc.isFileID()) { 2610 Out << "\\lline=" 2611 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2612 << " col=" 2613 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc) 2614 << "\\l"; 2615 } 2616 } 2617 static void printLocation2(raw_ostream &Out, SourceLocation SLoc) { 2618 if (SLoc.isFileID() && GraphPrintSourceManager->isInMainFile(SLoc)) 2619 Out << "line " << GraphPrintSourceManager->getExpansionLineNumber(SLoc); 2620 else 2621 SLoc.print(Out, *GraphPrintSourceManager); 2622 } 2623 2624 static std::string getNodeLabel(const ExplodedNode *N, void*){ 2625 2626 std::string sbuf; 2627 llvm::raw_string_ostream Out(sbuf); 2628 2629 // Program Location. 2630 ProgramPoint Loc = N->getLocation(); 2631 2632 switch (Loc.getKind()) { 2633 case ProgramPoint::BlockEntranceKind: { 2634 Out << "Block Entrance: B" 2635 << Loc.castAs<BlockEntrance>().getBlock()->getBlockID(); 2636 break; 2637 } 2638 2639 case ProgramPoint::BlockExitKind: 2640 assert (false); 2641 break; 2642 2643 case ProgramPoint::CallEnterKind: 2644 Out << "CallEnter"; 2645 break; 2646 2647 case ProgramPoint::CallExitBeginKind: 2648 Out << "CallExitBegin"; 2649 break; 2650 2651 case ProgramPoint::CallExitEndKind: 2652 Out << "CallExitEnd"; 2653 break; 2654 2655 case ProgramPoint::PostStmtPurgeDeadSymbolsKind: 2656 Out << "PostStmtPurgeDeadSymbols"; 2657 break; 2658 2659 case ProgramPoint::PreStmtPurgeDeadSymbolsKind: 2660 Out << "PreStmtPurgeDeadSymbols"; 2661 break; 2662 2663 case ProgramPoint::EpsilonKind: 2664 Out << "Epsilon Point"; 2665 break; 2666 2667 case ProgramPoint::PreImplicitCallKind: { 2668 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2669 Out << "PreCall: "; 2670 2671 // FIXME: Get proper printing options. 2672 PC.getDecl()->print(Out, LangOptions()); 2673 printLocation(Out, PC.getLocation()); 2674 break; 2675 } 2676 2677 case ProgramPoint::PostImplicitCallKind: { 2678 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2679 Out << "PostCall: "; 2680 2681 // FIXME: Get proper printing options. 2682 PC.getDecl()->print(Out, LangOptions()); 2683 printLocation(Out, PC.getLocation()); 2684 break; 2685 } 2686 2687 case ProgramPoint::PostInitializerKind: { 2688 Out << "PostInitializer: "; 2689 const CXXCtorInitializer *Init = 2690 Loc.castAs<PostInitializer>().getInitializer(); 2691 if (const FieldDecl *FD = Init->getAnyMember()) 2692 Out << *FD; 2693 else { 2694 QualType Ty = Init->getTypeSourceInfo()->getType(); 2695 Ty = Ty.getLocalUnqualifiedType(); 2696 LangOptions LO; // FIXME. 2697 Ty.print(Out, LO); 2698 } 2699 break; 2700 } 2701 2702 case ProgramPoint::BlockEdgeKind: { 2703 const BlockEdge &E = Loc.castAs<BlockEdge>(); 2704 Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B" 2705 << E.getDst()->getBlockID() << ')'; 2706 2707 if (const Stmt *T = E.getSrc()->getTerminator()) { 2708 SourceLocation SLoc = T->getLocStart(); 2709 2710 Out << "\\|Terminator: "; 2711 LangOptions LO; // FIXME. 2712 E.getSrc()->printTerminator(Out, LO); 2713 2714 if (SLoc.isFileID()) { 2715 Out << "\\lline=" 2716 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2717 << " col=" 2718 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc); 2719 } 2720 2721 if (isa<SwitchStmt>(T)) { 2722 const Stmt *Label = E.getDst()->getLabel(); 2723 2724 if (Label) { 2725 if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) { 2726 Out << "\\lcase "; 2727 LangOptions LO; // FIXME. 2728 if (C->getLHS()) 2729 C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO)); 2730 2731 if (const Stmt *RHS = C->getRHS()) { 2732 Out << " .. "; 2733 RHS->printPretty(Out, nullptr, PrintingPolicy(LO)); 2734 } 2735 2736 Out << ":"; 2737 } 2738 else { 2739 assert (isa<DefaultStmt>(Label)); 2740 Out << "\\ldefault:"; 2741 } 2742 } 2743 else 2744 Out << "\\l(implicit) default:"; 2745 } 2746 else if (isa<IndirectGotoStmt>(T)) { 2747 // FIXME 2748 } 2749 else { 2750 Out << "\\lCondition: "; 2751 if (*E.getSrc()->succ_begin() == E.getDst()) 2752 Out << "true"; 2753 else 2754 Out << "false"; 2755 } 2756 2757 Out << "\\l"; 2758 } 2759 2760 break; 2761 } 2762 2763 default: { 2764 const Stmt *S = Loc.castAs<StmtPoint>().getStmt(); 2765 assert(S != nullptr && "Expecting non-null Stmt"); 2766 2767 Out << S->getStmtClassName() << ' ' << (const void*) S << ' '; 2768 LangOptions LO; // FIXME. 2769 S->printPretty(Out, nullptr, PrintingPolicy(LO)); 2770 printLocation(Out, S->getLocStart()); 2771 2772 if (Loc.getAs<PreStmt>()) 2773 Out << "\\lPreStmt\\l;"; 2774 else if (Loc.getAs<PostLoad>()) 2775 Out << "\\lPostLoad\\l;"; 2776 else if (Loc.getAs<PostStore>()) 2777 Out << "\\lPostStore\\l"; 2778 else if (Loc.getAs<PostLValue>()) 2779 Out << "\\lPostLValue\\l"; 2780 2781 break; 2782 } 2783 } 2784 2785 ProgramStateRef state = N->getState(); 2786 Out << "\\|StateID: " << (const void*) state.get() 2787 << " NodeID: " << (const void*) N << "\\|"; 2788 2789 // Analysis stack backtrace. 2790 Out << "Location context stack (from current to outer):\\l"; 2791 const LocationContext *LC = Loc.getLocationContext(); 2792 unsigned Idx = 0; 2793 for (; LC; LC = LC->getParent(), ++Idx) { 2794 Out << Idx << ". (" << (const void *)LC << ") "; 2795 switch (LC->getKind()) { 2796 case LocationContext::StackFrame: 2797 if (const NamedDecl *D = dyn_cast<NamedDecl>(LC->getDecl())) 2798 Out << "Calling " << D->getQualifiedNameAsString(); 2799 else 2800 Out << "Calling anonymous code"; 2801 if (const Stmt *S = cast<StackFrameContext>(LC)->getCallSite()) { 2802 Out << " at "; 2803 printLocation2(Out, S->getLocStart()); 2804 } 2805 break; 2806 case LocationContext::Block: 2807 Out << "Invoking block"; 2808 if (const Decl *D = cast<BlockInvocationContext>(LC)->getBlockDecl()) { 2809 Out << " defined at "; 2810 printLocation2(Out, D->getLocStart()); 2811 } 2812 break; 2813 case LocationContext::Scope: 2814 Out << "Entering scope"; 2815 // FIXME: Add more info once ScopeContext is activated. 2816 break; 2817 } 2818 Out << "\\l"; 2819 } 2820 Out << "\\l"; 2821 2822 state->printDOT(Out); 2823 2824 Out << "\\l"; 2825 2826 if (const ProgramPointTag *tag = Loc.getTag()) { 2827 Out << "\\|Tag: " << tag->getTagDescription(); 2828 Out << "\\l"; 2829 } 2830 return Out.str(); 2831 } 2832 }; 2833 } // end llvm namespace 2834 #endif 2835 2836 void ExprEngine::ViewGraph(bool trim) { 2837 #ifndef NDEBUG 2838 if (trim) { 2839 std::vector<const ExplodedNode*> Src; 2840 2841 // Flush any outstanding reports to make sure we cover all the nodes. 2842 // This does not cause them to get displayed. 2843 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I) 2844 const_cast<BugType*>(*I)->FlushReports(BR); 2845 2846 // Iterate through the reports and get their nodes. 2847 for (BugReporter::EQClasses_iterator 2848 EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) { 2849 ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode()); 2850 if (N) Src.push_back(N); 2851 } 2852 2853 ViewGraph(Src); 2854 } 2855 else { 2856 GraphPrintCheckerState = this; 2857 GraphPrintSourceManager = &getContext().getSourceManager(); 2858 2859 llvm::ViewGraph(*G.roots_begin(), "ExprEngine"); 2860 2861 GraphPrintCheckerState = nullptr; 2862 GraphPrintSourceManager = nullptr; 2863 } 2864 #endif 2865 } 2866 2867 void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) { 2868 #ifndef NDEBUG 2869 GraphPrintCheckerState = this; 2870 GraphPrintSourceManager = &getContext().getSourceManager(); 2871 2872 std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes)); 2873 2874 if (!TrimmedG.get()) 2875 llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n"; 2876 else 2877 llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine"); 2878 2879 GraphPrintCheckerState = nullptr; 2880 GraphPrintSourceManager = nullptr; 2881 #endif 2882 } 2883