1 //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-= 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a meta-engine for path-sensitive dataflow analysis that 11 // is built on GREngine, but provides the boilerplate to execute transfer 12 // functions and build the ExplodedGraph at the expression level. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 17 #include "PrettyStackTraceLocationContext.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/AST/ParentMap.h" 20 #include "clang/Analysis/CFGStmtMap.h" 21 #include "clang/AST/StmtCXX.h" 22 #include "clang/AST/StmtObjC.h" 23 #include "clang/Basic/Builtins.h" 24 #include "clang/Basic/PrettyStackTrace.h" 25 #include "clang/Basic/SourceManager.h" 26 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 27 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 30 #include "clang/StaticAnalyzer/Core/PathSensitive/LoopWidening.h" 31 #include "clang/StaticAnalyzer/Core/PathSensitive/LoopUnrolling.h" 32 #include "llvm/ADT/Statistic.h" 33 #include "llvm/Support/SaveAndRestore.h" 34 #include "llvm/Support/raw_ostream.h" 35 36 #ifndef NDEBUG 37 #include "llvm/Support/GraphWriter.h" 38 #endif 39 40 using namespace clang; 41 using namespace ento; 42 using llvm::APSInt; 43 44 #define DEBUG_TYPE "ExprEngine" 45 46 STATISTIC(NumRemoveDeadBindings, 47 "The # of times RemoveDeadBindings is called"); 48 STATISTIC(NumMaxBlockCountReached, 49 "The # of aborted paths due to reaching the maximum block count in " 50 "a top level function"); 51 STATISTIC(NumMaxBlockCountReachedInInlined, 52 "The # of aborted paths due to reaching the maximum block count in " 53 "an inlined function"); 54 STATISTIC(NumTimesRetriedWithoutInlining, 55 "The # of times we re-evaluated a call without inlining"); 56 57 typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *> 58 CXXBindTemporaryContext; 59 60 // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated. 61 // The StackFrameContext assures that nested calls due to inlined recursive 62 // functions do not interfere. 63 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet, 64 llvm::ImmutableSet<CXXBindTemporaryContext>) 65 66 //===----------------------------------------------------------------------===// 67 // Engine construction and deletion. 68 //===----------------------------------------------------------------------===// 69 70 static const char* TagProviderName = "ExprEngine"; 71 72 ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled, 73 SetOfConstDecls *VisitedCalleesIn, 74 FunctionSummariesTy *FS, 75 InliningModes HowToInlineIn) 76 : AMgr(mgr), 77 AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()), 78 Engine(*this, FS), 79 G(Engine.getGraph()), 80 StateMgr(getContext(), mgr.getStoreManagerCreator(), 81 mgr.getConstraintManagerCreator(), G.getAllocator(), 82 this), 83 SymMgr(StateMgr.getSymbolManager()), 84 svalBuilder(StateMgr.getSValBuilder()), 85 currStmtIdx(0), currBldrCtx(nullptr), 86 ObjCNoRet(mgr.getASTContext()), 87 ObjCGCEnabled(gcEnabled), BR(mgr, *this), 88 VisitedCallees(VisitedCalleesIn), 89 HowToInline(HowToInlineIn) 90 { 91 unsigned TrimInterval = mgr.options.getGraphTrimInterval(); 92 if (TrimInterval != 0) { 93 // Enable eager node reclaimation when constructing the ExplodedGraph. 94 G.enableNodeReclamation(TrimInterval); 95 } 96 } 97 98 ExprEngine::~ExprEngine() { 99 BR.FlushReports(); 100 } 101 102 //===----------------------------------------------------------------------===// 103 // Utility methods. 104 //===----------------------------------------------------------------------===// 105 106 ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) { 107 ProgramStateRef state = StateMgr.getInitialState(InitLoc); 108 const Decl *D = InitLoc->getDecl(); 109 110 // Preconditions. 111 // FIXME: It would be nice if we had a more general mechanism to add 112 // such preconditions. Some day. 113 do { 114 115 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 116 // Precondition: the first argument of 'main' is an integer guaranteed 117 // to be > 0. 118 const IdentifierInfo *II = FD->getIdentifier(); 119 if (!II || !(II->getName() == "main" && FD->getNumParams() > 0)) 120 break; 121 122 const ParmVarDecl *PD = FD->getParamDecl(0); 123 QualType T = PD->getType(); 124 const BuiltinType *BT = dyn_cast<BuiltinType>(T); 125 if (!BT || !BT->isInteger()) 126 break; 127 128 const MemRegion *R = state->getRegion(PD, InitLoc); 129 if (!R) 130 break; 131 132 SVal V = state->getSVal(loc::MemRegionVal(R)); 133 SVal Constraint_untested = evalBinOp(state, BO_GT, V, 134 svalBuilder.makeZeroVal(T), 135 svalBuilder.getConditionType()); 136 137 Optional<DefinedOrUnknownSVal> Constraint = 138 Constraint_untested.getAs<DefinedOrUnknownSVal>(); 139 140 if (!Constraint) 141 break; 142 143 if (ProgramStateRef newState = state->assume(*Constraint, true)) 144 state = newState; 145 } 146 break; 147 } 148 while (0); 149 150 if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 151 // Precondition: 'self' is always non-null upon entry to an Objective-C 152 // method. 153 const ImplicitParamDecl *SelfD = MD->getSelfDecl(); 154 const MemRegion *R = state->getRegion(SelfD, InitLoc); 155 SVal V = state->getSVal(loc::MemRegionVal(R)); 156 157 if (Optional<Loc> LV = V.getAs<Loc>()) { 158 // Assume that the pointer value in 'self' is non-null. 159 state = state->assume(*LV, true); 160 assert(state && "'self' cannot be null"); 161 } 162 } 163 164 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) { 165 if (!MD->isStatic()) { 166 // Precondition: 'this' is always non-null upon entry to the 167 // top-level function. This is our starting assumption for 168 // analyzing an "open" program. 169 const StackFrameContext *SFC = InitLoc->getCurrentStackFrame(); 170 if (SFC->getParent() == nullptr) { 171 loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC); 172 SVal V = state->getSVal(L); 173 if (Optional<Loc> LV = V.getAs<Loc>()) { 174 state = state->assume(*LV, true); 175 assert(state && "'this' cannot be null"); 176 } 177 } 178 } 179 } 180 181 return state; 182 } 183 184 ProgramStateRef 185 ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State, 186 const LocationContext *LC, 187 const Expr *InitWithAdjustments, 188 const Expr *Result) { 189 // FIXME: This function is a hack that works around the quirky AST 190 // we're often having with respect to C++ temporaries. If only we modelled 191 // the actual execution order of statements properly in the CFG, 192 // all the hassle with adjustments would not be necessary, 193 // and perhaps the whole function would be removed. 194 SVal InitValWithAdjustments = State->getSVal(InitWithAdjustments, LC); 195 if (!Result) { 196 // If we don't have an explicit result expression, we're in "if needed" 197 // mode. Only create a region if the current value is a NonLoc. 198 if (!InitValWithAdjustments.getAs<NonLoc>()) 199 return State; 200 Result = InitWithAdjustments; 201 } else { 202 // We need to create a region no matter what. For sanity, make sure we don't 203 // try to stuff a Loc into a non-pointer temporary region. 204 assert(!InitValWithAdjustments.getAs<Loc>() || 205 Loc::isLocType(Result->getType()) || 206 Result->getType()->isMemberPointerType()); 207 } 208 209 ProgramStateManager &StateMgr = State->getStateManager(); 210 MemRegionManager &MRMgr = StateMgr.getRegionManager(); 211 StoreManager &StoreMgr = StateMgr.getStoreManager(); 212 213 // MaterializeTemporaryExpr may appear out of place, after a few field and 214 // base-class accesses have been made to the object, even though semantically 215 // it is the whole object that gets materialized and lifetime-extended. 216 // 217 // For example: 218 // 219 // `-MaterializeTemporaryExpr 220 // `-MemberExpr 221 // `-CXXTemporaryObjectExpr 222 // 223 // instead of the more natural 224 // 225 // `-MemberExpr 226 // `-MaterializeTemporaryExpr 227 // `-CXXTemporaryObjectExpr 228 // 229 // Use the usual methods for obtaining the expression of the base object, 230 // and record the adjustments that we need to make to obtain the sub-object 231 // that the whole expression 'Ex' refers to. This trick is usual, 232 // in the sense that CodeGen takes a similar route. 233 234 SmallVector<const Expr *, 2> CommaLHSs; 235 SmallVector<SubobjectAdjustment, 2> Adjustments; 236 237 const Expr *Init = InitWithAdjustments->skipRValueSubobjectAdjustments( 238 CommaLHSs, Adjustments); 239 240 const TypedValueRegion *TR = nullptr; 241 if (const MaterializeTemporaryExpr *MT = 242 dyn_cast<MaterializeTemporaryExpr>(Result)) { 243 StorageDuration SD = MT->getStorageDuration(); 244 // If this object is bound to a reference with static storage duration, we 245 // put it in a different region to prevent "address leakage" warnings. 246 if (SD == SD_Static || SD == SD_Thread) 247 TR = MRMgr.getCXXStaticTempObjectRegion(Init); 248 } 249 if (!TR) 250 TR = MRMgr.getCXXTempObjectRegion(Init, LC); 251 252 SVal Reg = loc::MemRegionVal(TR); 253 SVal BaseReg = Reg; 254 255 // Make the necessary adjustments to obtain the sub-object. 256 for (auto I = Adjustments.rbegin(), E = Adjustments.rend(); I != E; ++I) { 257 const SubobjectAdjustment &Adj = *I; 258 switch (Adj.Kind) { 259 case SubobjectAdjustment::DerivedToBaseAdjustment: 260 Reg = StoreMgr.evalDerivedToBase(Reg, Adj.DerivedToBase.BasePath); 261 break; 262 case SubobjectAdjustment::FieldAdjustment: 263 Reg = StoreMgr.getLValueField(Adj.Field, Reg); 264 break; 265 case SubobjectAdjustment::MemberPointerAdjustment: 266 // FIXME: Unimplemented. 267 State = State->bindDefault(Reg, UnknownVal(), LC); 268 return State; 269 } 270 } 271 272 // What remains is to copy the value of the object to the new region. 273 // FIXME: In other words, what we should always do is copy value of the 274 // Init expression (which corresponds to the bigger object) to the whole 275 // temporary region TR. However, this value is often no longer present 276 // in the Environment. If it has disappeared, we instead invalidate TR. 277 // Still, what we can do is assign the value of expression Ex (which 278 // corresponds to the sub-object) to the TR's sub-region Reg. At least, 279 // values inside Reg would be correct. 280 SVal InitVal = State->getSVal(Init, LC); 281 if (InitVal.isUnknown()) { 282 InitVal = getSValBuilder().conjureSymbolVal(Result, LC, Init->getType(), 283 currBldrCtx->blockCount()); 284 State = State->bindLoc(BaseReg.castAs<Loc>(), InitVal, LC, false); 285 286 // Then we'd need to take the value that certainly exists and bind it over. 287 if (InitValWithAdjustments.isUnknown()) { 288 // Try to recover some path sensitivity in case we couldn't 289 // compute the value. 290 InitValWithAdjustments = getSValBuilder().conjureSymbolVal( 291 Result, LC, InitWithAdjustments->getType(), 292 currBldrCtx->blockCount()); 293 } 294 State = 295 State->bindLoc(Reg.castAs<Loc>(), InitValWithAdjustments, LC, false); 296 } else { 297 State = State->bindLoc(BaseReg.castAs<Loc>(), InitVal, LC, false); 298 } 299 300 // The result expression would now point to the correct sub-region of the 301 // newly created temporary region. Do this last in order to getSVal of Init 302 // correctly in case (Result == Init). 303 State = State->BindExpr(Result, LC, Reg); 304 305 // Notify checkers once for two bindLoc()s. 306 State = processRegionChange(State, TR, LC); 307 308 return State; 309 } 310 311 //===----------------------------------------------------------------------===// 312 // Top-level transfer function logic (Dispatcher). 313 //===----------------------------------------------------------------------===// 314 315 /// evalAssume - Called by ConstraintManager. Used to call checker-specific 316 /// logic for handling assumptions on symbolic values. 317 ProgramStateRef ExprEngine::processAssume(ProgramStateRef state, 318 SVal cond, bool assumption) { 319 return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption); 320 } 321 322 ProgramStateRef 323 ExprEngine::processRegionChanges(ProgramStateRef state, 324 const InvalidatedSymbols *invalidated, 325 ArrayRef<const MemRegion *> Explicits, 326 ArrayRef<const MemRegion *> Regions, 327 const LocationContext *LCtx, 328 const CallEvent *Call) { 329 return getCheckerManager().runCheckersForRegionChanges(state, invalidated, 330 Explicits, Regions, 331 LCtx, Call); 332 } 333 334 void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State, 335 const char *NL, const char *Sep) { 336 getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep); 337 } 338 339 void ExprEngine::processEndWorklist(bool hasWorkRemaining) { 340 getCheckerManager().runCheckersForEndAnalysis(G, BR, *this); 341 } 342 343 void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred, 344 unsigned StmtIdx, NodeBuilderContext *Ctx) { 345 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 346 currStmtIdx = StmtIdx; 347 currBldrCtx = Ctx; 348 349 switch (E.getKind()) { 350 case CFGElement::Statement: 351 ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred); 352 return; 353 case CFGElement::Initializer: 354 ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred); 355 return; 356 case CFGElement::NewAllocator: 357 ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(), 358 Pred); 359 return; 360 case CFGElement::AutomaticObjectDtor: 361 case CFGElement::DeleteDtor: 362 case CFGElement::BaseDtor: 363 case CFGElement::MemberDtor: 364 case CFGElement::TemporaryDtor: 365 ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred); 366 return; 367 case CFGElement::LoopExit: 368 ProcessLoopExit(E.castAs<CFGLoopExit>().getLoopStmt(), Pred); 369 return; 370 case CFGElement::LifetimeEnds: 371 return; 372 } 373 } 374 375 static bool shouldRemoveDeadBindings(AnalysisManager &AMgr, 376 const CFGStmt S, 377 const ExplodedNode *Pred, 378 const LocationContext *LC) { 379 380 // Are we never purging state values? 381 if (AMgr.options.AnalysisPurgeOpt == PurgeNone) 382 return false; 383 384 // Is this the beginning of a basic block? 385 if (Pred->getLocation().getAs<BlockEntrance>()) 386 return true; 387 388 // Is this on a non-expression? 389 if (!isa<Expr>(S.getStmt())) 390 return true; 391 392 // Run before processing a call. 393 if (CallEvent::isCallStmt(S.getStmt())) 394 return true; 395 396 // Is this an expression that is consumed by another expression? If so, 397 // postpone cleaning out the state. 398 ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap(); 399 return !PM.isConsumedExpr(cast<Expr>(S.getStmt())); 400 } 401 402 void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out, 403 const Stmt *ReferenceStmt, 404 const LocationContext *LC, 405 const Stmt *DiagnosticStmt, 406 ProgramPoint::Kind K) { 407 assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind || 408 ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt)) 409 && "PostStmt is not generally supported by the SymbolReaper yet"); 410 assert(LC && "Must pass the current (or expiring) LocationContext"); 411 412 if (!DiagnosticStmt) { 413 DiagnosticStmt = ReferenceStmt; 414 assert(DiagnosticStmt && "Required for clearing a LocationContext"); 415 } 416 417 NumRemoveDeadBindings++; 418 ProgramStateRef CleanedState = Pred->getState(); 419 420 // LC is the location context being destroyed, but SymbolReaper wants a 421 // location context that is still live. (If this is the top-level stack 422 // frame, this will be null.) 423 if (!ReferenceStmt) { 424 assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind && 425 "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext"); 426 LC = LC->getParent(); 427 } 428 429 const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr; 430 SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager()); 431 432 getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper); 433 434 // Create a state in which dead bindings are removed from the environment 435 // and the store. TODO: The function should just return new env and store, 436 // not a new state. 437 CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper); 438 439 // Process any special transfer function for dead symbols. 440 // A tag to track convenience transitions, which can be removed at cleanup. 441 static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node"); 442 if (!SymReaper.hasDeadSymbols()) { 443 // Generate a CleanedNode that has the environment and store cleaned 444 // up. Since no symbols are dead, we can optimize and not clean out 445 // the constraint manager. 446 StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx); 447 Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K); 448 449 } else { 450 // Call checkers with the non-cleaned state so that they could query the 451 // values of the soon to be dead symbols. 452 ExplodedNodeSet CheckedSet; 453 getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper, 454 DiagnosticStmt, *this, K); 455 456 // For each node in CheckedSet, generate CleanedNodes that have the 457 // environment, the store, and the constraints cleaned up but have the 458 // user-supplied states as the predecessors. 459 StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx); 460 for (ExplodedNodeSet::const_iterator 461 I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { 462 ProgramStateRef CheckerState = (*I)->getState(); 463 464 // The constraint manager has not been cleaned up yet, so clean up now. 465 CheckerState = getConstraintManager().removeDeadBindings(CheckerState, 466 SymReaper); 467 468 assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) && 469 "Checkers are not allowed to modify the Environment as a part of " 470 "checkDeadSymbols processing."); 471 assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) && 472 "Checkers are not allowed to modify the Store as a part of " 473 "checkDeadSymbols processing."); 474 475 // Create a state based on CleanedState with CheckerState GDM and 476 // generate a transition to that state. 477 ProgramStateRef CleanedCheckerSt = 478 StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState); 479 Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K); 480 } 481 } 482 } 483 484 void ExprEngine::ProcessStmt(const CFGStmt S, 485 ExplodedNode *Pred) { 486 // Reclaim any unnecessary nodes in the ExplodedGraph. 487 G.reclaimRecentlyAllocatedNodes(); 488 489 const Stmt *currStmt = S.getStmt(); 490 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 491 currStmt->getLocStart(), 492 "Error evaluating statement"); 493 494 // Remove dead bindings and symbols. 495 ExplodedNodeSet CleanedStates; 496 if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){ 497 removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext()); 498 } else 499 CleanedStates.Add(Pred); 500 501 // Visit the statement. 502 ExplodedNodeSet Dst; 503 for (ExplodedNodeSet::iterator I = CleanedStates.begin(), 504 E = CleanedStates.end(); I != E; ++I) { 505 ExplodedNodeSet DstI; 506 // Visit the statement. 507 Visit(currStmt, *I, DstI); 508 Dst.insert(DstI); 509 } 510 511 // Enqueue the new nodes onto the work list. 512 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 513 } 514 515 void ExprEngine::ProcessLoopExit(const Stmt* S, ExplodedNode *Pred) { 516 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 517 S->getLocStart(), 518 "Error evaluating end of the loop"); 519 ExplodedNodeSet Dst; 520 Dst.Add(Pred); 521 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 522 ProgramStateRef NewState = Pred->getState(); 523 524 if(AMgr.options.shouldUnrollLoops()) 525 NewState = processLoopEnd(S, NewState); 526 527 LoopExit PP(S, Pred->getLocationContext()); 528 Bldr.generateNode(PP, NewState, Pred); 529 // Enqueue the new nodes onto the work list. 530 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 531 } 532 533 void ExprEngine::ProcessInitializer(const CFGInitializer Init, 534 ExplodedNode *Pred) { 535 const CXXCtorInitializer *BMI = Init.getInitializer(); 536 537 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 538 BMI->getSourceLocation(), 539 "Error evaluating initializer"); 540 541 // We don't clean up dead bindings here. 542 const StackFrameContext *stackFrame = 543 cast<StackFrameContext>(Pred->getLocationContext()); 544 const CXXConstructorDecl *decl = 545 cast<CXXConstructorDecl>(stackFrame->getDecl()); 546 547 ProgramStateRef State = Pred->getState(); 548 SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame)); 549 550 ExplodedNodeSet Tmp(Pred); 551 SVal FieldLoc; 552 553 // Evaluate the initializer, if necessary 554 if (BMI->isAnyMemberInitializer()) { 555 // Constructors build the object directly in the field, 556 // but non-objects must be copied in from the initializer. 557 if (auto *CtorExpr = findDirectConstructorForCurrentCFGElement()) { 558 assert(BMI->getInit()->IgnoreImplicit() == CtorExpr); 559 (void)CtorExpr; 560 // The field was directly constructed, so there is no need to bind. 561 } else { 562 const Expr *Init = BMI->getInit()->IgnoreImplicit(); 563 const ValueDecl *Field; 564 if (BMI->isIndirectMemberInitializer()) { 565 Field = BMI->getIndirectMember(); 566 FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal); 567 } else { 568 Field = BMI->getMember(); 569 FieldLoc = State->getLValue(BMI->getMember(), thisVal); 570 } 571 572 SVal InitVal; 573 if (Init->getType()->isArrayType()) { 574 // Handle arrays of trivial type. We can represent this with a 575 // primitive load/copy from the base array region. 576 const ArraySubscriptExpr *ASE; 577 while ((ASE = dyn_cast<ArraySubscriptExpr>(Init))) 578 Init = ASE->getBase()->IgnoreImplicit(); 579 580 SVal LValue = State->getSVal(Init, stackFrame); 581 if (!Field->getType()->isReferenceType()) 582 if (Optional<Loc> LValueLoc = LValue.getAs<Loc>()) 583 InitVal = State->getSVal(*LValueLoc); 584 585 // If we fail to get the value for some reason, use a symbolic value. 586 if (InitVal.isUnknownOrUndef()) { 587 SValBuilder &SVB = getSValBuilder(); 588 InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame, 589 Field->getType(), 590 currBldrCtx->blockCount()); 591 } 592 } else { 593 InitVal = State->getSVal(BMI->getInit(), stackFrame); 594 } 595 596 assert(Tmp.size() == 1 && "have not generated any new nodes yet"); 597 assert(*Tmp.begin() == Pred && "have not generated any new nodes yet"); 598 Tmp.clear(); 599 600 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 601 evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP); 602 } 603 } else { 604 assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer()); 605 // We already did all the work when visiting the CXXConstructExpr. 606 } 607 608 // Construct PostInitializer nodes whether the state changed or not, 609 // so that the diagnostics don't get confused. 610 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 611 ExplodedNodeSet Dst; 612 NodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 613 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) { 614 ExplodedNode *N = *I; 615 Bldr.generateNode(PP, N->getState(), N); 616 } 617 618 // Enqueue the new nodes onto the work list. 619 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 620 } 621 622 void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D, 623 ExplodedNode *Pred) { 624 ExplodedNodeSet Dst; 625 switch (D.getKind()) { 626 case CFGElement::AutomaticObjectDtor: 627 ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst); 628 break; 629 case CFGElement::BaseDtor: 630 ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst); 631 break; 632 case CFGElement::MemberDtor: 633 ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst); 634 break; 635 case CFGElement::TemporaryDtor: 636 ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst); 637 break; 638 case CFGElement::DeleteDtor: 639 ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst); 640 break; 641 default: 642 llvm_unreachable("Unexpected dtor kind."); 643 } 644 645 // Enqueue the new nodes onto the work list. 646 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 647 } 648 649 void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE, 650 ExplodedNode *Pred) { 651 ExplodedNodeSet Dst; 652 AnalysisManager &AMgr = getAnalysisManager(); 653 AnalyzerOptions &Opts = AMgr.options; 654 // TODO: We're not evaluating allocators for all cases just yet as 655 // we're not handling the return value correctly, which causes false 656 // positives when the alpha.cplusplus.NewDeleteLeaks check is on. 657 if (Opts.mayInlineCXXAllocator()) 658 VisitCXXNewAllocatorCall(NE, Pred, Dst); 659 else { 660 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 661 const LocationContext *LCtx = Pred->getLocationContext(); 662 PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx); 663 Bldr.generateNode(PP, Pred->getState(), Pred); 664 } 665 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 666 } 667 668 void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor, 669 ExplodedNode *Pred, 670 ExplodedNodeSet &Dst) { 671 const VarDecl *varDecl = Dtor.getVarDecl(); 672 QualType varType = varDecl->getType(); 673 674 ProgramStateRef state = Pred->getState(); 675 SVal dest = state->getLValue(varDecl, Pred->getLocationContext()); 676 const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion(); 677 678 if (varType->isReferenceType()) { 679 const MemRegion *ValueRegion = state->getSVal(Region).getAsRegion(); 680 if (!ValueRegion) { 681 // FIXME: This should not happen. The language guarantees a presence 682 // of a valid initializer here, so the reference shall not be undefined. 683 // It seems that we're calling destructors over variables that 684 // were not initialized yet. 685 return; 686 } 687 Region = ValueRegion->getBaseRegion(); 688 varType = cast<TypedValueRegion>(Region)->getValueType(); 689 } 690 691 VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false, 692 Pred, Dst); 693 } 694 695 void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor, 696 ExplodedNode *Pred, 697 ExplodedNodeSet &Dst) { 698 ProgramStateRef State = Pred->getState(); 699 const LocationContext *LCtx = Pred->getLocationContext(); 700 const CXXDeleteExpr *DE = Dtor.getDeleteExpr(); 701 const Stmt *Arg = DE->getArgument(); 702 SVal ArgVal = State->getSVal(Arg, LCtx); 703 704 // If the argument to delete is known to be a null value, 705 // don't run destructor. 706 if (State->isNull(ArgVal).isConstrainedTrue()) { 707 QualType DTy = DE->getDestroyedType(); 708 QualType BTy = getContext().getBaseElementType(DTy); 709 const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl(); 710 const CXXDestructorDecl *Dtor = RD->getDestructor(); 711 712 PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx); 713 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 714 Bldr.generateNode(PP, Pred->getState(), Pred); 715 return; 716 } 717 718 VisitCXXDestructor(DE->getDestroyedType(), 719 ArgVal.getAsRegion(), 720 DE, /*IsBase=*/ false, 721 Pred, Dst); 722 } 723 724 void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D, 725 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 726 const LocationContext *LCtx = Pred->getLocationContext(); 727 728 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 729 Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor, 730 LCtx->getCurrentStackFrame()); 731 SVal ThisVal = Pred->getState()->getSVal(ThisPtr); 732 733 // Create the base object region. 734 const CXXBaseSpecifier *Base = D.getBaseSpecifier(); 735 QualType BaseTy = Base->getType(); 736 SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy, 737 Base->isVirtual()); 738 739 VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(), 740 CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst); 741 } 742 743 void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D, 744 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 745 const FieldDecl *Member = D.getFieldDecl(); 746 ProgramStateRef State = Pred->getState(); 747 const LocationContext *LCtx = Pred->getLocationContext(); 748 749 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 750 Loc ThisVal = getSValBuilder().getCXXThis(CurDtor, 751 LCtx->getCurrentStackFrame()); 752 SVal FieldVal = 753 State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>()); 754 755 VisitCXXDestructor(Member->getType(), 756 FieldVal.castAs<loc::MemRegionVal>().getRegion(), 757 CurDtor->getBody(), /*IsBase=*/false, Pred, Dst); 758 } 759 760 void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D, 761 ExplodedNode *Pred, 762 ExplodedNodeSet &Dst) { 763 ExplodedNodeSet CleanDtorState; 764 StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx); 765 ProgramStateRef State = Pred->getState(); 766 if (State->contains<InitializedTemporariesSet>( 767 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) { 768 // FIXME: Currently we insert temporary destructors for default parameters, 769 // but we don't insert the constructors. 770 State = State->remove<InitializedTemporariesSet>( 771 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame())); 772 } 773 StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State); 774 775 QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType(); 776 // FIXME: Currently CleanDtorState can be empty here due to temporaries being 777 // bound to default parameters. 778 assert(CleanDtorState.size() <= 1); 779 ExplodedNode *CleanPred = 780 CleanDtorState.empty() ? Pred : *CleanDtorState.begin(); 781 // FIXME: Inlining of temporary destructors is not supported yet anyway, so 782 // we just put a NULL region for now. This will need to be changed later. 783 VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(), 784 /*IsBase=*/false, CleanPred, Dst); 785 } 786 787 void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 788 NodeBuilderContext &BldCtx, 789 ExplodedNode *Pred, 790 ExplodedNodeSet &Dst, 791 const CFGBlock *DstT, 792 const CFGBlock *DstF) { 793 BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF); 794 if (Pred->getState()->contains<InitializedTemporariesSet>( 795 std::make_pair(BTE, Pred->getStackFrame()))) { 796 TempDtorBuilder.markInfeasible(false); 797 TempDtorBuilder.generateNode(Pred->getState(), true, Pred); 798 } else { 799 TempDtorBuilder.markInfeasible(true); 800 TempDtorBuilder.generateNode(Pred->getState(), false, Pred); 801 } 802 } 803 804 void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE, 805 ExplodedNodeSet &PreVisit, 806 ExplodedNodeSet &Dst) { 807 if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) { 808 // In case we don't have temporary destructors in the CFG, do not mark 809 // the initialization - we would otherwise never clean it up. 810 Dst = PreVisit; 811 return; 812 } 813 StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx); 814 for (ExplodedNode *Node : PreVisit) { 815 ProgramStateRef State = Node->getState(); 816 817 if (!State->contains<InitializedTemporariesSet>( 818 std::make_pair(BTE, Node->getStackFrame()))) { 819 // FIXME: Currently the state might already contain the marker due to 820 // incorrect handling of temporaries bound to default parameters; for 821 // those, we currently skip the CXXBindTemporaryExpr but rely on adding 822 // temporary destructor nodes. 823 State = State->add<InitializedTemporariesSet>( 824 std::make_pair(BTE, Node->getStackFrame())); 825 } 826 StmtBldr.generateNode(BTE, Node, State); 827 } 828 } 829 830 void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred, 831 ExplodedNodeSet &DstTop) { 832 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 833 S->getLocStart(), 834 "Error evaluating statement"); 835 ExplodedNodeSet Dst; 836 StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx); 837 838 assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens()); 839 840 switch (S->getStmtClass()) { 841 // C++, OpenMP and ARC stuff we don't support yet. 842 case Expr::ObjCIndirectCopyRestoreExprClass: 843 case Stmt::CXXDependentScopeMemberExprClass: 844 case Stmt::CXXInheritedCtorInitExprClass: 845 case Stmt::CXXTryStmtClass: 846 case Stmt::CXXTypeidExprClass: 847 case Stmt::CXXUuidofExprClass: 848 case Stmt::CXXFoldExprClass: 849 case Stmt::MSPropertyRefExprClass: 850 case Stmt::MSPropertySubscriptExprClass: 851 case Stmt::CXXUnresolvedConstructExprClass: 852 case Stmt::DependentScopeDeclRefExprClass: 853 case Stmt::ArrayTypeTraitExprClass: 854 case Stmt::ExpressionTraitExprClass: 855 case Stmt::UnresolvedLookupExprClass: 856 case Stmt::UnresolvedMemberExprClass: 857 case Stmt::TypoExprClass: 858 case Stmt::CXXNoexceptExprClass: 859 case Stmt::PackExpansionExprClass: 860 case Stmt::SubstNonTypeTemplateParmPackExprClass: 861 case Stmt::FunctionParmPackExprClass: 862 case Stmt::CoroutineBodyStmtClass: 863 case Stmt::CoawaitExprClass: 864 case Stmt::DependentCoawaitExprClass: 865 case Stmt::CoreturnStmtClass: 866 case Stmt::CoyieldExprClass: 867 case Stmt::SEHTryStmtClass: 868 case Stmt::SEHExceptStmtClass: 869 case Stmt::SEHLeaveStmtClass: 870 case Stmt::SEHFinallyStmtClass: 871 case Stmt::OMPParallelDirectiveClass: 872 case Stmt::OMPSimdDirectiveClass: 873 case Stmt::OMPForDirectiveClass: 874 case Stmt::OMPForSimdDirectiveClass: 875 case Stmt::OMPSectionsDirectiveClass: 876 case Stmt::OMPSectionDirectiveClass: 877 case Stmt::OMPSingleDirectiveClass: 878 case Stmt::OMPMasterDirectiveClass: 879 case Stmt::OMPCriticalDirectiveClass: 880 case Stmt::OMPParallelForDirectiveClass: 881 case Stmt::OMPParallelForSimdDirectiveClass: 882 case Stmt::OMPParallelSectionsDirectiveClass: 883 case Stmt::OMPTaskDirectiveClass: 884 case Stmt::OMPTaskyieldDirectiveClass: 885 case Stmt::OMPBarrierDirectiveClass: 886 case Stmt::OMPTaskwaitDirectiveClass: 887 case Stmt::OMPTaskgroupDirectiveClass: 888 case Stmt::OMPFlushDirectiveClass: 889 case Stmt::OMPOrderedDirectiveClass: 890 case Stmt::OMPAtomicDirectiveClass: 891 case Stmt::OMPTargetDirectiveClass: 892 case Stmt::OMPTargetDataDirectiveClass: 893 case Stmt::OMPTargetEnterDataDirectiveClass: 894 case Stmt::OMPTargetExitDataDirectiveClass: 895 case Stmt::OMPTargetParallelDirectiveClass: 896 case Stmt::OMPTargetParallelForDirectiveClass: 897 case Stmt::OMPTargetUpdateDirectiveClass: 898 case Stmt::OMPTeamsDirectiveClass: 899 case Stmt::OMPCancellationPointDirectiveClass: 900 case Stmt::OMPCancelDirectiveClass: 901 case Stmt::OMPTaskLoopDirectiveClass: 902 case Stmt::OMPTaskLoopSimdDirectiveClass: 903 case Stmt::OMPDistributeDirectiveClass: 904 case Stmt::OMPDistributeParallelForDirectiveClass: 905 case Stmt::OMPDistributeParallelForSimdDirectiveClass: 906 case Stmt::OMPDistributeSimdDirectiveClass: 907 case Stmt::OMPTargetParallelForSimdDirectiveClass: 908 case Stmt::OMPTargetSimdDirectiveClass: 909 case Stmt::OMPTeamsDistributeDirectiveClass: 910 case Stmt::OMPTeamsDistributeSimdDirectiveClass: 911 case Stmt::OMPTeamsDistributeParallelForSimdDirectiveClass: 912 case Stmt::OMPTeamsDistributeParallelForDirectiveClass: 913 case Stmt::OMPTargetTeamsDirectiveClass: 914 case Stmt::OMPTargetTeamsDistributeDirectiveClass: 915 case Stmt::OMPTargetTeamsDistributeParallelForDirectiveClass: 916 case Stmt::OMPTargetTeamsDistributeParallelForSimdDirectiveClass: 917 case Stmt::OMPTargetTeamsDistributeSimdDirectiveClass: 918 case Stmt::CapturedStmtClass: 919 { 920 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 921 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 922 break; 923 } 924 925 case Stmt::ParenExprClass: 926 llvm_unreachable("ParenExprs already handled."); 927 case Stmt::GenericSelectionExprClass: 928 llvm_unreachable("GenericSelectionExprs already handled."); 929 // Cases that should never be evaluated simply because they shouldn't 930 // appear in the CFG. 931 case Stmt::BreakStmtClass: 932 case Stmt::CaseStmtClass: 933 case Stmt::CompoundStmtClass: 934 case Stmt::ContinueStmtClass: 935 case Stmt::CXXForRangeStmtClass: 936 case Stmt::DefaultStmtClass: 937 case Stmt::DoStmtClass: 938 case Stmt::ForStmtClass: 939 case Stmt::GotoStmtClass: 940 case Stmt::IfStmtClass: 941 case Stmt::IndirectGotoStmtClass: 942 case Stmt::LabelStmtClass: 943 case Stmt::NoStmtClass: 944 case Stmt::NullStmtClass: 945 case Stmt::SwitchStmtClass: 946 case Stmt::WhileStmtClass: 947 case Expr::MSDependentExistsStmtClass: 948 llvm_unreachable("Stmt should not be in analyzer evaluation loop"); 949 950 case Stmt::ObjCSubscriptRefExprClass: 951 case Stmt::ObjCPropertyRefExprClass: 952 llvm_unreachable("These are handled by PseudoObjectExpr"); 953 954 case Stmt::GNUNullExprClass: { 955 // GNU __null is a pointer-width integer, not an actual pointer. 956 ProgramStateRef state = Pred->getState(); 957 state = state->BindExpr(S, Pred->getLocationContext(), 958 svalBuilder.makeIntValWithPtrWidth(0, false)); 959 Bldr.generateNode(S, Pred, state); 960 break; 961 } 962 963 case Stmt::ObjCAtSynchronizedStmtClass: 964 Bldr.takeNodes(Pred); 965 VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst); 966 Bldr.addNodes(Dst); 967 break; 968 969 case Stmt::ExprWithCleanupsClass: 970 // Handled due to fully linearised CFG. 971 break; 972 973 case Stmt::CXXBindTemporaryExprClass: { 974 Bldr.takeNodes(Pred); 975 ExplodedNodeSet PreVisit; 976 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 977 ExplodedNodeSet Next; 978 VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next); 979 getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this); 980 Bldr.addNodes(Dst); 981 break; 982 } 983 984 // Cases not handled yet; but will handle some day. 985 case Stmt::DesignatedInitExprClass: 986 case Stmt::DesignatedInitUpdateExprClass: 987 case Stmt::ArrayInitLoopExprClass: 988 case Stmt::ArrayInitIndexExprClass: 989 case Stmt::ExtVectorElementExprClass: 990 case Stmt::ImaginaryLiteralClass: 991 case Stmt::ObjCAtCatchStmtClass: 992 case Stmt::ObjCAtFinallyStmtClass: 993 case Stmt::ObjCAtTryStmtClass: 994 case Stmt::ObjCAutoreleasePoolStmtClass: 995 case Stmt::ObjCEncodeExprClass: 996 case Stmt::ObjCIsaExprClass: 997 case Stmt::ObjCProtocolExprClass: 998 case Stmt::ObjCSelectorExprClass: 999 case Stmt::ParenListExprClass: 1000 case Stmt::ShuffleVectorExprClass: 1001 case Stmt::ConvertVectorExprClass: 1002 case Stmt::VAArgExprClass: 1003 case Stmt::CUDAKernelCallExprClass: 1004 case Stmt::OpaqueValueExprClass: 1005 case Stmt::AsTypeExprClass: 1006 // Fall through. 1007 1008 // Cases we intentionally don't evaluate, since they don't need 1009 // to be explicitly evaluated. 1010 case Stmt::PredefinedExprClass: 1011 case Stmt::AddrLabelExprClass: 1012 case Stmt::AttributedStmtClass: 1013 case Stmt::IntegerLiteralClass: 1014 case Stmt::CharacterLiteralClass: 1015 case Stmt::ImplicitValueInitExprClass: 1016 case Stmt::CXXScalarValueInitExprClass: 1017 case Stmt::CXXBoolLiteralExprClass: 1018 case Stmt::ObjCBoolLiteralExprClass: 1019 case Stmt::ObjCAvailabilityCheckExprClass: 1020 case Stmt::FloatingLiteralClass: 1021 case Stmt::NoInitExprClass: 1022 case Stmt::SizeOfPackExprClass: 1023 case Stmt::StringLiteralClass: 1024 case Stmt::ObjCStringLiteralClass: 1025 case Stmt::CXXPseudoDestructorExprClass: 1026 case Stmt::SubstNonTypeTemplateParmExprClass: 1027 case Stmt::CXXNullPtrLiteralExprClass: 1028 case Stmt::OMPArraySectionExprClass: 1029 case Stmt::TypeTraitExprClass: { 1030 Bldr.takeNodes(Pred); 1031 ExplodedNodeSet preVisit; 1032 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 1033 getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this); 1034 Bldr.addNodes(Dst); 1035 break; 1036 } 1037 1038 case Stmt::CXXDefaultArgExprClass: 1039 case Stmt::CXXDefaultInitExprClass: { 1040 Bldr.takeNodes(Pred); 1041 ExplodedNodeSet PreVisit; 1042 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1043 1044 ExplodedNodeSet Tmp; 1045 StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx); 1046 1047 const Expr *ArgE; 1048 if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S)) 1049 ArgE = DefE->getExpr(); 1050 else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S)) 1051 ArgE = DefE->getExpr(); 1052 else 1053 llvm_unreachable("unknown constant wrapper kind"); 1054 1055 bool IsTemporary = false; 1056 if (const MaterializeTemporaryExpr *MTE = 1057 dyn_cast<MaterializeTemporaryExpr>(ArgE)) { 1058 ArgE = MTE->GetTemporaryExpr(); 1059 IsTemporary = true; 1060 } 1061 1062 Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE); 1063 if (!ConstantVal) 1064 ConstantVal = UnknownVal(); 1065 1066 const LocationContext *LCtx = Pred->getLocationContext(); 1067 for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end(); 1068 I != E; ++I) { 1069 ProgramStateRef State = (*I)->getState(); 1070 State = State->BindExpr(S, LCtx, *ConstantVal); 1071 if (IsTemporary) 1072 State = createTemporaryRegionIfNeeded(State, LCtx, 1073 cast<Expr>(S), 1074 cast<Expr>(S)); 1075 Bldr2.generateNode(S, *I, State); 1076 } 1077 1078 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1079 Bldr.addNodes(Dst); 1080 break; 1081 } 1082 1083 // Cases we evaluate as opaque expressions, conjuring a symbol. 1084 case Stmt::CXXStdInitializerListExprClass: 1085 case Expr::ObjCArrayLiteralClass: 1086 case Expr::ObjCDictionaryLiteralClass: 1087 case Expr::ObjCBoxedExprClass: { 1088 Bldr.takeNodes(Pred); 1089 1090 ExplodedNodeSet preVisit; 1091 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 1092 1093 ExplodedNodeSet Tmp; 1094 StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx); 1095 1096 const Expr *Ex = cast<Expr>(S); 1097 QualType resultType = Ex->getType(); 1098 1099 for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end(); 1100 it != et; ++it) { 1101 ExplodedNode *N = *it; 1102 const LocationContext *LCtx = N->getLocationContext(); 1103 SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx, 1104 resultType, 1105 currBldrCtx->blockCount()); 1106 ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result); 1107 Bldr2.generateNode(S, N, state); 1108 } 1109 1110 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1111 Bldr.addNodes(Dst); 1112 break; 1113 } 1114 1115 case Stmt::ArraySubscriptExprClass: 1116 Bldr.takeNodes(Pred); 1117 VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst); 1118 Bldr.addNodes(Dst); 1119 break; 1120 1121 case Stmt::GCCAsmStmtClass: 1122 Bldr.takeNodes(Pred); 1123 VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst); 1124 Bldr.addNodes(Dst); 1125 break; 1126 1127 case Stmt::MSAsmStmtClass: 1128 Bldr.takeNodes(Pred); 1129 VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst); 1130 Bldr.addNodes(Dst); 1131 break; 1132 1133 case Stmt::BlockExprClass: 1134 Bldr.takeNodes(Pred); 1135 VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst); 1136 Bldr.addNodes(Dst); 1137 break; 1138 1139 case Stmt::LambdaExprClass: 1140 if (AMgr.options.shouldInlineLambdas()) { 1141 Bldr.takeNodes(Pred); 1142 VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst); 1143 Bldr.addNodes(Dst); 1144 } else { 1145 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 1146 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 1147 } 1148 break; 1149 1150 case Stmt::BinaryOperatorClass: { 1151 const BinaryOperator* B = cast<BinaryOperator>(S); 1152 if (B->isLogicalOp()) { 1153 Bldr.takeNodes(Pred); 1154 VisitLogicalExpr(B, Pred, Dst); 1155 Bldr.addNodes(Dst); 1156 break; 1157 } 1158 else if (B->getOpcode() == BO_Comma) { 1159 ProgramStateRef state = Pred->getState(); 1160 Bldr.generateNode(B, Pred, 1161 state->BindExpr(B, Pred->getLocationContext(), 1162 state->getSVal(B->getRHS(), 1163 Pred->getLocationContext()))); 1164 break; 1165 } 1166 1167 Bldr.takeNodes(Pred); 1168 1169 if (AMgr.options.eagerlyAssumeBinOpBifurcation && 1170 (B->isRelationalOp() || B->isEqualityOp())) { 1171 ExplodedNodeSet Tmp; 1172 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp); 1173 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S)); 1174 } 1175 else 1176 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1177 1178 Bldr.addNodes(Dst); 1179 break; 1180 } 1181 1182 case Stmt::CXXOperatorCallExprClass: { 1183 const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S); 1184 1185 // For instance method operators, make sure the 'this' argument has a 1186 // valid region. 1187 const Decl *Callee = OCE->getCalleeDecl(); 1188 if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) { 1189 if (MD->isInstance()) { 1190 ProgramStateRef State = Pred->getState(); 1191 const LocationContext *LCtx = Pred->getLocationContext(); 1192 ProgramStateRef NewState = 1193 createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0)); 1194 if (NewState != State) { 1195 Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr, 1196 ProgramPoint::PreStmtKind); 1197 // Did we cache out? 1198 if (!Pred) 1199 break; 1200 } 1201 } 1202 } 1203 // FALLTHROUGH 1204 LLVM_FALLTHROUGH; 1205 } 1206 case Stmt::CallExprClass: 1207 case Stmt::CXXMemberCallExprClass: 1208 case Stmt::UserDefinedLiteralClass: { 1209 Bldr.takeNodes(Pred); 1210 VisitCallExpr(cast<CallExpr>(S), Pred, Dst); 1211 Bldr.addNodes(Dst); 1212 break; 1213 } 1214 1215 case Stmt::CXXCatchStmtClass: { 1216 Bldr.takeNodes(Pred); 1217 VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst); 1218 Bldr.addNodes(Dst); 1219 break; 1220 } 1221 1222 case Stmt::CXXTemporaryObjectExprClass: 1223 case Stmt::CXXConstructExprClass: { 1224 Bldr.takeNodes(Pred); 1225 VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst); 1226 Bldr.addNodes(Dst); 1227 break; 1228 } 1229 1230 case Stmt::CXXNewExprClass: { 1231 Bldr.takeNodes(Pred); 1232 ExplodedNodeSet PostVisit; 1233 VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit); 1234 getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this); 1235 Bldr.addNodes(Dst); 1236 break; 1237 } 1238 1239 case Stmt::CXXDeleteExprClass: { 1240 Bldr.takeNodes(Pred); 1241 ExplodedNodeSet PreVisit; 1242 const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S); 1243 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1244 1245 for (ExplodedNodeSet::iterator i = PreVisit.begin(), 1246 e = PreVisit.end(); i != e ; ++i) 1247 VisitCXXDeleteExpr(CDE, *i, Dst); 1248 1249 Bldr.addNodes(Dst); 1250 break; 1251 } 1252 // FIXME: ChooseExpr is really a constant. We need to fix 1253 // the CFG do not model them as explicit control-flow. 1254 1255 case Stmt::ChooseExprClass: { // __builtin_choose_expr 1256 Bldr.takeNodes(Pred); 1257 const ChooseExpr *C = cast<ChooseExpr>(S); 1258 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); 1259 Bldr.addNodes(Dst); 1260 break; 1261 } 1262 1263 case Stmt::CompoundAssignOperatorClass: 1264 Bldr.takeNodes(Pred); 1265 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1266 Bldr.addNodes(Dst); 1267 break; 1268 1269 case Stmt::CompoundLiteralExprClass: 1270 Bldr.takeNodes(Pred); 1271 VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst); 1272 Bldr.addNodes(Dst); 1273 break; 1274 1275 case Stmt::BinaryConditionalOperatorClass: 1276 case Stmt::ConditionalOperatorClass: { // '?' operator 1277 Bldr.takeNodes(Pred); 1278 const AbstractConditionalOperator *C 1279 = cast<AbstractConditionalOperator>(S); 1280 VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst); 1281 Bldr.addNodes(Dst); 1282 break; 1283 } 1284 1285 case Stmt::CXXThisExprClass: 1286 Bldr.takeNodes(Pred); 1287 VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst); 1288 Bldr.addNodes(Dst); 1289 break; 1290 1291 case Stmt::DeclRefExprClass: { 1292 Bldr.takeNodes(Pred); 1293 const DeclRefExpr *DE = cast<DeclRefExpr>(S); 1294 VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst); 1295 Bldr.addNodes(Dst); 1296 break; 1297 } 1298 1299 case Stmt::DeclStmtClass: 1300 Bldr.takeNodes(Pred); 1301 VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst); 1302 Bldr.addNodes(Dst); 1303 break; 1304 1305 case Stmt::ImplicitCastExprClass: 1306 case Stmt::CStyleCastExprClass: 1307 case Stmt::CXXStaticCastExprClass: 1308 case Stmt::CXXDynamicCastExprClass: 1309 case Stmt::CXXReinterpretCastExprClass: 1310 case Stmt::CXXConstCastExprClass: 1311 case Stmt::CXXFunctionalCastExprClass: 1312 case Stmt::ObjCBridgedCastExprClass: { 1313 Bldr.takeNodes(Pred); 1314 const CastExpr *C = cast<CastExpr>(S); 1315 ExplodedNodeSet dstExpr; 1316 VisitCast(C, C->getSubExpr(), Pred, dstExpr); 1317 1318 // Handle the postvisit checks. 1319 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this); 1320 Bldr.addNodes(Dst); 1321 break; 1322 } 1323 1324 case Expr::MaterializeTemporaryExprClass: { 1325 Bldr.takeNodes(Pred); 1326 const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S); 1327 ExplodedNodeSet dstPrevisit; 1328 getCheckerManager().runCheckersForPreStmt(dstPrevisit, Pred, MTE, *this); 1329 ExplodedNodeSet dstExpr; 1330 for (ExplodedNodeSet::iterator i = dstPrevisit.begin(), 1331 e = dstPrevisit.end(); i != e ; ++i) { 1332 CreateCXXTemporaryObject(MTE, *i, dstExpr); 1333 } 1334 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, MTE, *this); 1335 Bldr.addNodes(Dst); 1336 break; 1337 } 1338 1339 case Stmt::InitListExprClass: 1340 Bldr.takeNodes(Pred); 1341 VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst); 1342 Bldr.addNodes(Dst); 1343 break; 1344 1345 case Stmt::MemberExprClass: 1346 Bldr.takeNodes(Pred); 1347 VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst); 1348 Bldr.addNodes(Dst); 1349 break; 1350 1351 case Stmt::AtomicExprClass: 1352 Bldr.takeNodes(Pred); 1353 VisitAtomicExpr(cast<AtomicExpr>(S), Pred, Dst); 1354 Bldr.addNodes(Dst); 1355 break; 1356 1357 case Stmt::ObjCIvarRefExprClass: 1358 Bldr.takeNodes(Pred); 1359 VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst); 1360 Bldr.addNodes(Dst); 1361 break; 1362 1363 case Stmt::ObjCForCollectionStmtClass: 1364 Bldr.takeNodes(Pred); 1365 VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst); 1366 Bldr.addNodes(Dst); 1367 break; 1368 1369 case Stmt::ObjCMessageExprClass: 1370 Bldr.takeNodes(Pred); 1371 VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst); 1372 Bldr.addNodes(Dst); 1373 break; 1374 1375 case Stmt::ObjCAtThrowStmtClass: 1376 case Stmt::CXXThrowExprClass: 1377 // FIXME: This is not complete. We basically treat @throw as 1378 // an abort. 1379 Bldr.generateSink(S, Pred, Pred->getState()); 1380 break; 1381 1382 case Stmt::ReturnStmtClass: 1383 Bldr.takeNodes(Pred); 1384 VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst); 1385 Bldr.addNodes(Dst); 1386 break; 1387 1388 case Stmt::OffsetOfExprClass: 1389 Bldr.takeNodes(Pred); 1390 VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst); 1391 Bldr.addNodes(Dst); 1392 break; 1393 1394 case Stmt::UnaryExprOrTypeTraitExprClass: 1395 Bldr.takeNodes(Pred); 1396 VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S), 1397 Pred, Dst); 1398 Bldr.addNodes(Dst); 1399 break; 1400 1401 case Stmt::StmtExprClass: { 1402 const StmtExpr *SE = cast<StmtExpr>(S); 1403 1404 if (SE->getSubStmt()->body_empty()) { 1405 // Empty statement expression. 1406 assert(SE->getType() == getContext().VoidTy 1407 && "Empty statement expression must have void type."); 1408 break; 1409 } 1410 1411 if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) { 1412 ProgramStateRef state = Pred->getState(); 1413 Bldr.generateNode(SE, Pred, 1414 state->BindExpr(SE, Pred->getLocationContext(), 1415 state->getSVal(LastExpr, 1416 Pred->getLocationContext()))); 1417 } 1418 break; 1419 } 1420 1421 case Stmt::UnaryOperatorClass: { 1422 Bldr.takeNodes(Pred); 1423 const UnaryOperator *U = cast<UnaryOperator>(S); 1424 if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) { 1425 ExplodedNodeSet Tmp; 1426 VisitUnaryOperator(U, Pred, Tmp); 1427 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U); 1428 } 1429 else 1430 VisitUnaryOperator(U, Pred, Dst); 1431 Bldr.addNodes(Dst); 1432 break; 1433 } 1434 1435 case Stmt::PseudoObjectExprClass: { 1436 Bldr.takeNodes(Pred); 1437 ProgramStateRef state = Pred->getState(); 1438 const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S); 1439 if (const Expr *Result = PE->getResultExpr()) { 1440 SVal V = state->getSVal(Result, Pred->getLocationContext()); 1441 Bldr.generateNode(S, Pred, 1442 state->BindExpr(S, Pred->getLocationContext(), V)); 1443 } 1444 else 1445 Bldr.generateNode(S, Pred, 1446 state->BindExpr(S, Pred->getLocationContext(), 1447 UnknownVal())); 1448 1449 Bldr.addNodes(Dst); 1450 break; 1451 } 1452 } 1453 } 1454 1455 bool ExprEngine::replayWithoutInlining(ExplodedNode *N, 1456 const LocationContext *CalleeLC) { 1457 const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1458 const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame(); 1459 assert(CalleeSF && CallerSF); 1460 ExplodedNode *BeforeProcessingCall = nullptr; 1461 const Stmt *CE = CalleeSF->getCallSite(); 1462 1463 // Find the first node before we started processing the call expression. 1464 while (N) { 1465 ProgramPoint L = N->getLocation(); 1466 BeforeProcessingCall = N; 1467 N = N->pred_empty() ? nullptr : *(N->pred_begin()); 1468 1469 // Skip the nodes corresponding to the inlined code. 1470 if (L.getLocationContext()->getCurrentStackFrame() != CallerSF) 1471 continue; 1472 // We reached the caller. Find the node right before we started 1473 // processing the call. 1474 if (L.isPurgeKind()) 1475 continue; 1476 if (L.getAs<PreImplicitCall>()) 1477 continue; 1478 if (L.getAs<CallEnter>()) 1479 continue; 1480 if (Optional<StmtPoint> SP = L.getAs<StmtPoint>()) 1481 if (SP->getStmt() == CE) 1482 continue; 1483 break; 1484 } 1485 1486 if (!BeforeProcessingCall) 1487 return false; 1488 1489 // TODO: Clean up the unneeded nodes. 1490 1491 // Build an Epsilon node from which we will restart the analyzes. 1492 // Note that CE is permitted to be NULL! 1493 ProgramPoint NewNodeLoc = 1494 EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE); 1495 // Add the special flag to GDM to signal retrying with no inlining. 1496 // Note, changing the state ensures that we are not going to cache out. 1497 ProgramStateRef NewNodeState = BeforeProcessingCall->getState(); 1498 NewNodeState = 1499 NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE)); 1500 1501 // Make the new node a successor of BeforeProcessingCall. 1502 bool IsNew = false; 1503 ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew); 1504 // We cached out at this point. Caching out is common due to us backtracking 1505 // from the inlined function, which might spawn several paths. 1506 if (!IsNew) 1507 return true; 1508 1509 NewNode->addPredecessor(BeforeProcessingCall, G); 1510 1511 // Add the new node to the work list. 1512 Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(), 1513 CalleeSF->getIndex()); 1514 NumTimesRetriedWithoutInlining++; 1515 return true; 1516 } 1517 1518 /// Block entrance. (Update counters). 1519 void ExprEngine::processCFGBlockEntrance(const BlockEdge &L, 1520 NodeBuilderWithSinks &nodeBuilder, 1521 ExplodedNode *Pred) { 1522 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1523 // If we reach a loop which has a known bound (and meets 1524 // other constraints) then consider completely unrolling it. 1525 if(AMgr.options.shouldUnrollLoops()) { 1526 unsigned maxBlockVisitOnPath = AMgr.options.maxBlockVisitOnPath; 1527 const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator(); 1528 if (Term) { 1529 ProgramStateRef NewState = updateLoopStack(Term, AMgr.getASTContext(), 1530 Pred, maxBlockVisitOnPath); 1531 if (NewState != Pred->getState()) { 1532 ExplodedNode *UpdatedNode = nodeBuilder.generateNode(NewState, Pred); 1533 if (!UpdatedNode) 1534 return; 1535 Pred = UpdatedNode; 1536 } 1537 } 1538 // Is we are inside an unrolled loop then no need the check the counters. 1539 if(isUnrolledState(Pred->getState())) 1540 return; 1541 } 1542 1543 // If this block is terminated by a loop and it has already been visited the 1544 // maximum number of times, widen the loop. 1545 unsigned int BlockCount = nodeBuilder.getContext().blockCount(); 1546 if (BlockCount == AMgr.options.maxBlockVisitOnPath - 1 && 1547 AMgr.options.shouldWidenLoops()) { 1548 const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator(); 1549 if (!(Term && 1550 (isa<ForStmt>(Term) || isa<WhileStmt>(Term) || isa<DoStmt>(Term)))) 1551 return; 1552 // Widen. 1553 const LocationContext *LCtx = Pred->getLocationContext(); 1554 ProgramStateRef WidenedState = 1555 getWidenedLoopState(Pred->getState(), LCtx, BlockCount, Term); 1556 nodeBuilder.generateNode(WidenedState, Pred); 1557 return; 1558 } 1559 1560 // FIXME: Refactor this into a checker. 1561 if (BlockCount >= AMgr.options.maxBlockVisitOnPath) { 1562 static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded"); 1563 const ExplodedNode *Sink = 1564 nodeBuilder.generateSink(Pred->getState(), Pred, &tag); 1565 1566 // Check if we stopped at the top level function or not. 1567 // Root node should have the location context of the top most function. 1568 const LocationContext *CalleeLC = Pred->getLocation().getLocationContext(); 1569 const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1570 const LocationContext *RootLC = 1571 (*G.roots_begin())->getLocation().getLocationContext(); 1572 if (RootLC->getCurrentStackFrame() != CalleeSF) { 1573 Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl()); 1574 1575 // Re-run the call evaluation without inlining it, by storing the 1576 // no-inlining policy in the state and enqueuing the new work item on 1577 // the list. Replay should almost never fail. Use the stats to catch it 1578 // if it does. 1579 if ((!AMgr.options.NoRetryExhausted && 1580 replayWithoutInlining(Pred, CalleeLC))) 1581 return; 1582 NumMaxBlockCountReachedInInlined++; 1583 } else 1584 NumMaxBlockCountReached++; 1585 1586 // Make sink nodes as exhausted(for stats) only if retry failed. 1587 Engine.blocksExhausted.push_back(std::make_pair(L, Sink)); 1588 } 1589 } 1590 1591 //===----------------------------------------------------------------------===// 1592 // Branch processing. 1593 //===----------------------------------------------------------------------===// 1594 1595 /// RecoverCastedSymbol - A helper function for ProcessBranch that is used 1596 /// to try to recover some path-sensitivity for casts of symbolic 1597 /// integers that promote their values (which are currently not tracked well). 1598 /// This function returns the SVal bound to Condition->IgnoreCasts if all the 1599 // cast(s) did was sign-extend the original value. 1600 static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr, 1601 ProgramStateRef state, 1602 const Stmt *Condition, 1603 const LocationContext *LCtx, 1604 ASTContext &Ctx) { 1605 1606 const Expr *Ex = dyn_cast<Expr>(Condition); 1607 if (!Ex) 1608 return UnknownVal(); 1609 1610 uint64_t bits = 0; 1611 bool bitsInit = false; 1612 1613 while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) { 1614 QualType T = CE->getType(); 1615 1616 if (!T->isIntegralOrEnumerationType()) 1617 return UnknownVal(); 1618 1619 uint64_t newBits = Ctx.getTypeSize(T); 1620 if (!bitsInit || newBits < bits) { 1621 bitsInit = true; 1622 bits = newBits; 1623 } 1624 1625 Ex = CE->getSubExpr(); 1626 } 1627 1628 // We reached a non-cast. Is it a symbolic value? 1629 QualType T = Ex->getType(); 1630 1631 if (!bitsInit || !T->isIntegralOrEnumerationType() || 1632 Ctx.getTypeSize(T) > bits) 1633 return UnknownVal(); 1634 1635 return state->getSVal(Ex, LCtx); 1636 } 1637 1638 #ifndef NDEBUG 1639 static const Stmt *getRightmostLeaf(const Stmt *Condition) { 1640 while (Condition) { 1641 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1642 if (!BO || !BO->isLogicalOp()) { 1643 return Condition; 1644 } 1645 Condition = BO->getRHS()->IgnoreParens(); 1646 } 1647 return nullptr; 1648 } 1649 #endif 1650 1651 // Returns the condition the branch at the end of 'B' depends on and whose value 1652 // has been evaluated within 'B'. 1653 // In most cases, the terminator condition of 'B' will be evaluated fully in 1654 // the last statement of 'B'; in those cases, the resolved condition is the 1655 // given 'Condition'. 1656 // If the condition of the branch is a logical binary operator tree, the CFG is 1657 // optimized: in that case, we know that the expression formed by all but the 1658 // rightmost leaf of the logical binary operator tree must be true, and thus 1659 // the branch condition is at this point equivalent to the truth value of that 1660 // rightmost leaf; the CFG block thus only evaluates this rightmost leaf 1661 // expression in its final statement. As the full condition in that case was 1662 // not evaluated, and is thus not in the SVal cache, we need to use that leaf 1663 // expression to evaluate the truth value of the condition in the current state 1664 // space. 1665 static const Stmt *ResolveCondition(const Stmt *Condition, 1666 const CFGBlock *B) { 1667 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1668 Condition = Ex->IgnoreParens(); 1669 1670 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1671 if (!BO || !BO->isLogicalOp()) 1672 return Condition; 1673 1674 assert(!B->getTerminator().isTemporaryDtorsBranch() && 1675 "Temporary destructor branches handled by processBindTemporary."); 1676 1677 // For logical operations, we still have the case where some branches 1678 // use the traditional "merge" approach and others sink the branch 1679 // directly into the basic blocks representing the logical operation. 1680 // We need to distinguish between those two cases here. 1681 1682 // The invariants are still shifting, but it is possible that the 1683 // last element in a CFGBlock is not a CFGStmt. Look for the last 1684 // CFGStmt as the value of the condition. 1685 CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend(); 1686 for (; I != E; ++I) { 1687 CFGElement Elem = *I; 1688 Optional<CFGStmt> CS = Elem.getAs<CFGStmt>(); 1689 if (!CS) 1690 continue; 1691 const Stmt *LastStmt = CS->getStmt(); 1692 assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition)); 1693 return LastStmt; 1694 } 1695 llvm_unreachable("could not resolve condition"); 1696 } 1697 1698 void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term, 1699 NodeBuilderContext& BldCtx, 1700 ExplodedNode *Pred, 1701 ExplodedNodeSet &Dst, 1702 const CFGBlock *DstT, 1703 const CFGBlock *DstF) { 1704 assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) && 1705 "CXXBindTemporaryExprs are handled by processBindTemporary."); 1706 const LocationContext *LCtx = Pred->getLocationContext(); 1707 PrettyStackTraceLocationContext StackCrashInfo(LCtx); 1708 currBldrCtx = &BldCtx; 1709 1710 // Check for NULL conditions; e.g. "for(;;)" 1711 if (!Condition) { 1712 BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF); 1713 NullCondBldr.markInfeasible(false); 1714 NullCondBldr.generateNode(Pred->getState(), true, Pred); 1715 return; 1716 } 1717 1718 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1719 Condition = Ex->IgnoreParens(); 1720 1721 Condition = ResolveCondition(Condition, BldCtx.getBlock()); 1722 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 1723 Condition->getLocStart(), 1724 "Error evaluating branch"); 1725 1726 ExplodedNodeSet CheckersOutSet; 1727 getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet, 1728 Pred, *this); 1729 // We generated only sinks. 1730 if (CheckersOutSet.empty()) 1731 return; 1732 1733 BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF); 1734 for (NodeBuilder::iterator I = CheckersOutSet.begin(), 1735 E = CheckersOutSet.end(); E != I; ++I) { 1736 ExplodedNode *PredI = *I; 1737 1738 if (PredI->isSink()) 1739 continue; 1740 1741 ProgramStateRef PrevState = PredI->getState(); 1742 SVal X = PrevState->getSVal(Condition, PredI->getLocationContext()); 1743 1744 if (X.isUnknownOrUndef()) { 1745 // Give it a chance to recover from unknown. 1746 if (const Expr *Ex = dyn_cast<Expr>(Condition)) { 1747 if (Ex->getType()->isIntegralOrEnumerationType()) { 1748 // Try to recover some path-sensitivity. Right now casts of symbolic 1749 // integers that promote their values are currently not tracked well. 1750 // If 'Condition' is such an expression, try and recover the 1751 // underlying value and use that instead. 1752 SVal recovered = RecoverCastedSymbol(getStateManager(), 1753 PrevState, Condition, 1754 PredI->getLocationContext(), 1755 getContext()); 1756 1757 if (!recovered.isUnknown()) { 1758 X = recovered; 1759 } 1760 } 1761 } 1762 } 1763 1764 // If the condition is still unknown, give up. 1765 if (X.isUnknownOrUndef()) { 1766 builder.generateNode(PrevState, true, PredI); 1767 builder.generateNode(PrevState, false, PredI); 1768 continue; 1769 } 1770 1771 DefinedSVal V = X.castAs<DefinedSVal>(); 1772 1773 ProgramStateRef StTrue, StFalse; 1774 std::tie(StTrue, StFalse) = PrevState->assume(V); 1775 1776 // Process the true branch. 1777 if (builder.isFeasible(true)) { 1778 if (StTrue) 1779 builder.generateNode(StTrue, true, PredI); 1780 else 1781 builder.markInfeasible(true); 1782 } 1783 1784 // Process the false branch. 1785 if (builder.isFeasible(false)) { 1786 if (StFalse) 1787 builder.generateNode(StFalse, false, PredI); 1788 else 1789 builder.markInfeasible(false); 1790 } 1791 } 1792 currBldrCtx = nullptr; 1793 } 1794 1795 /// The GDM component containing the set of global variables which have been 1796 /// previously initialized with explicit initializers. 1797 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet, 1798 llvm::ImmutableSet<const VarDecl *>) 1799 1800 void ExprEngine::processStaticInitializer(const DeclStmt *DS, 1801 NodeBuilderContext &BuilderCtx, 1802 ExplodedNode *Pred, 1803 clang::ento::ExplodedNodeSet &Dst, 1804 const CFGBlock *DstT, 1805 const CFGBlock *DstF) { 1806 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1807 currBldrCtx = &BuilderCtx; 1808 1809 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl()); 1810 ProgramStateRef state = Pred->getState(); 1811 bool initHasRun = state->contains<InitializedGlobalsSet>(VD); 1812 BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF); 1813 1814 if (!initHasRun) { 1815 state = state->add<InitializedGlobalsSet>(VD); 1816 } 1817 1818 builder.generateNode(state, initHasRun, Pred); 1819 builder.markInfeasible(!initHasRun); 1820 1821 currBldrCtx = nullptr; 1822 } 1823 1824 /// processIndirectGoto - Called by CoreEngine. Used to generate successor 1825 /// nodes by processing the 'effects' of a computed goto jump. 1826 void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) { 1827 1828 ProgramStateRef state = builder.getState(); 1829 SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext()); 1830 1831 // Three possibilities: 1832 // 1833 // (1) We know the computed label. 1834 // (2) The label is NULL (or some other constant), or Undefined. 1835 // (3) We have no clue about the label. Dispatch to all targets. 1836 // 1837 1838 typedef IndirectGotoNodeBuilder::iterator iterator; 1839 1840 if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) { 1841 const LabelDecl *L = LV->getLabel(); 1842 1843 for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) { 1844 if (I.getLabel() == L) { 1845 builder.generateNode(I, state); 1846 return; 1847 } 1848 } 1849 1850 llvm_unreachable("No block with label."); 1851 } 1852 1853 if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) { 1854 // Dispatch to the first target and mark it as a sink. 1855 //ExplodedNode* N = builder.generateNode(builder.begin(), state, true); 1856 // FIXME: add checker visit. 1857 // UndefBranches.insert(N); 1858 return; 1859 } 1860 1861 // This is really a catch-all. We don't support symbolics yet. 1862 // FIXME: Implement dispatch for symbolic pointers. 1863 1864 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I) 1865 builder.generateNode(I, state); 1866 } 1867 1868 #if 0 1869 static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) { 1870 const StackFrameContext* Frame = Pred.getStackFrame(); 1871 const llvm::ImmutableSet<CXXBindTemporaryContext> &Set = 1872 Pred.getState()->get<InitializedTemporariesSet>(); 1873 return std::find_if(Set.begin(), Set.end(), 1874 [&](const CXXBindTemporaryContext &Ctx) { 1875 if (Ctx.second == Frame) { 1876 Ctx.first->dump(); 1877 llvm::errs() << "\n"; 1878 } 1879 return Ctx.second == Frame; 1880 }) == Set.end(); 1881 } 1882 #endif 1883 1884 void ExprEngine::processBeginOfFunction(NodeBuilderContext &BC, 1885 ExplodedNode *Pred, 1886 ExplodedNodeSet &Dst, 1887 const BlockEdge &L) { 1888 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC); 1889 getCheckerManager().runCheckersForBeginFunction(Dst, L, Pred, *this); 1890 } 1891 1892 /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path 1893 /// nodes when the control reaches the end of a function. 1894 void ExprEngine::processEndOfFunction(NodeBuilderContext& BC, 1895 ExplodedNode *Pred, 1896 const ReturnStmt *RS) { 1897 // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)). 1898 // We currently cannot enable this assert, as lifetime extended temporaries 1899 // are not modelled correctly. 1900 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1901 StateMgr.EndPath(Pred->getState()); 1902 1903 ExplodedNodeSet Dst; 1904 if (Pred->getLocationContext()->inTopFrame()) { 1905 // Remove dead symbols. 1906 ExplodedNodeSet AfterRemovedDead; 1907 removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead); 1908 1909 // Notify checkers. 1910 for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(), 1911 E = AfterRemovedDead.end(); I != E; ++I) { 1912 getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this); 1913 } 1914 } else { 1915 getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this); 1916 } 1917 1918 Engine.enqueueEndOfFunction(Dst, RS); 1919 } 1920 1921 /// ProcessSwitch - Called by CoreEngine. Used to generate successor 1922 /// nodes by processing the 'effects' of a switch statement. 1923 void ExprEngine::processSwitch(SwitchNodeBuilder& builder) { 1924 typedef SwitchNodeBuilder::iterator iterator; 1925 ProgramStateRef state = builder.getState(); 1926 const Expr *CondE = builder.getCondition(); 1927 SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext()); 1928 1929 if (CondV_untested.isUndef()) { 1930 //ExplodedNode* N = builder.generateDefaultCaseNode(state, true); 1931 // FIXME: add checker 1932 //UndefBranches.insert(N); 1933 1934 return; 1935 } 1936 DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>(); 1937 1938 ProgramStateRef DefaultSt = state; 1939 1940 iterator I = builder.begin(), EI = builder.end(); 1941 bool defaultIsFeasible = I == EI; 1942 1943 for ( ; I != EI; ++I) { 1944 // Successor may be pruned out during CFG construction. 1945 if (!I.getBlock()) 1946 continue; 1947 1948 const CaseStmt *Case = I.getCase(); 1949 1950 // Evaluate the LHS of the case value. 1951 llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext()); 1952 assert(V1.getBitWidth() == getContext().getIntWidth(CondE->getType())); 1953 1954 // Get the RHS of the case, if it exists. 1955 llvm::APSInt V2; 1956 if (const Expr *E = Case->getRHS()) 1957 V2 = E->EvaluateKnownConstInt(getContext()); 1958 else 1959 V2 = V1; 1960 1961 ProgramStateRef StateCase; 1962 if (Optional<NonLoc> NL = CondV.getAs<NonLoc>()) 1963 std::tie(StateCase, DefaultSt) = 1964 DefaultSt->assumeInclusiveRange(*NL, V1, V2); 1965 else // UnknownVal 1966 StateCase = DefaultSt; 1967 1968 if (StateCase) 1969 builder.generateCaseStmtNode(I, StateCase); 1970 1971 // Now "assume" that the case doesn't match. Add this state 1972 // to the default state (if it is feasible). 1973 if (DefaultSt) 1974 defaultIsFeasible = true; 1975 else { 1976 defaultIsFeasible = false; 1977 break; 1978 } 1979 } 1980 1981 if (!defaultIsFeasible) 1982 return; 1983 1984 // If we have switch(enum value), the default branch is not 1985 // feasible if all of the enum constants not covered by 'case:' statements 1986 // are not feasible values for the switch condition. 1987 // 1988 // Note that this isn't as accurate as it could be. Even if there isn't 1989 // a case for a particular enum value as long as that enum value isn't 1990 // feasible then it shouldn't be considered for making 'default:' reachable. 1991 const SwitchStmt *SS = builder.getSwitch(); 1992 const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts(); 1993 if (CondExpr->getType()->getAs<EnumType>()) { 1994 if (SS->isAllEnumCasesCovered()) 1995 return; 1996 } 1997 1998 builder.generateDefaultCaseNode(DefaultSt); 1999 } 2000 2001 //===----------------------------------------------------------------------===// 2002 // Transfer functions: Loads and stores. 2003 //===----------------------------------------------------------------------===// 2004 2005 void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D, 2006 ExplodedNode *Pred, 2007 ExplodedNodeSet &Dst) { 2008 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2009 2010 ProgramStateRef state = Pred->getState(); 2011 const LocationContext *LCtx = Pred->getLocationContext(); 2012 2013 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) { 2014 // C permits "extern void v", and if you cast the address to a valid type, 2015 // you can even do things with it. We simply pretend 2016 assert(Ex->isGLValue() || VD->getType()->isVoidType()); 2017 const LocationContext *LocCtxt = Pred->getLocationContext(); 2018 const Decl *D = LocCtxt->getDecl(); 2019 const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr; 2020 const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex); 2021 SVal V; 2022 bool IsReference; 2023 if (AMgr.options.shouldInlineLambdas() && DeclRefEx && 2024 DeclRefEx->refersToEnclosingVariableOrCapture() && MD && 2025 MD->getParent()->isLambda()) { 2026 // Lookup the field of the lambda. 2027 const CXXRecordDecl *CXXRec = MD->getParent(); 2028 llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields; 2029 FieldDecl *LambdaThisCaptureField; 2030 CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField); 2031 const FieldDecl *FD = LambdaCaptureFields[VD]; 2032 if (!FD) { 2033 // When a constant is captured, sometimes no corresponding field is 2034 // created in the lambda object. 2035 assert(VD->getType().isConstQualified()); 2036 V = state->getLValue(VD, LocCtxt); 2037 IsReference = false; 2038 } else { 2039 Loc CXXThis = 2040 svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame()); 2041 SVal CXXThisVal = state->getSVal(CXXThis); 2042 V = state->getLValue(FD, CXXThisVal); 2043 IsReference = FD->getType()->isReferenceType(); 2044 } 2045 } else { 2046 V = state->getLValue(VD, LocCtxt); 2047 IsReference = VD->getType()->isReferenceType(); 2048 } 2049 2050 // For references, the 'lvalue' is the pointer address stored in the 2051 // reference region. 2052 if (IsReference) { 2053 if (const MemRegion *R = V.getAsRegion()) 2054 V = state->getSVal(R); 2055 else 2056 V = UnknownVal(); 2057 } 2058 2059 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 2060 ProgramPoint::PostLValueKind); 2061 return; 2062 } 2063 if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) { 2064 assert(!Ex->isGLValue()); 2065 SVal V = svalBuilder.makeIntVal(ED->getInitVal()); 2066 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V)); 2067 return; 2068 } 2069 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 2070 SVal V = svalBuilder.getFunctionPointer(FD); 2071 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 2072 ProgramPoint::PostLValueKind); 2073 return; 2074 } 2075 if (isa<FieldDecl>(D)) { 2076 // FIXME: Compute lvalue of field pointers-to-member. 2077 // Right now we just use a non-null void pointer, so that it gives proper 2078 // results in boolean contexts. 2079 SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy, 2080 currBldrCtx->blockCount()); 2081 state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true); 2082 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 2083 ProgramPoint::PostLValueKind); 2084 return; 2085 } 2086 2087 llvm_unreachable("Support for this Decl not implemented."); 2088 } 2089 2090 /// VisitArraySubscriptExpr - Transfer function for array accesses 2091 void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A, 2092 ExplodedNode *Pred, 2093 ExplodedNodeSet &Dst){ 2094 2095 const Expr *Base = A->getBase()->IgnoreParens(); 2096 const Expr *Idx = A->getIdx()->IgnoreParens(); 2097 2098 ExplodedNodeSet CheckerPreStmt; 2099 getCheckerManager().runCheckersForPreStmt(CheckerPreStmt, Pred, A, *this); 2100 2101 ExplodedNodeSet EvalSet; 2102 StmtNodeBuilder Bldr(CheckerPreStmt, EvalSet, *currBldrCtx); 2103 assert(A->isGLValue() || 2104 (!AMgr.getLangOpts().CPlusPlus && 2105 A->getType().isCForbiddenLValueType())); 2106 2107 for (auto *Node : CheckerPreStmt) { 2108 const LocationContext *LCtx = Node->getLocationContext(); 2109 ProgramStateRef state = Node->getState(); 2110 SVal V = state->getLValue(A->getType(), 2111 state->getSVal(Idx, LCtx), 2112 state->getSVal(Base, LCtx)); 2113 Bldr.generateNode(A, Node, state->BindExpr(A, LCtx, V), nullptr, 2114 ProgramPoint::PostLValueKind); 2115 } 2116 2117 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, A, *this); 2118 } 2119 2120 /// VisitMemberExpr - Transfer function for member expressions. 2121 void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred, 2122 ExplodedNodeSet &Dst) { 2123 2124 // FIXME: Prechecks eventually go in ::Visit(). 2125 ExplodedNodeSet CheckedSet; 2126 getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this); 2127 2128 ExplodedNodeSet EvalSet; 2129 ValueDecl *Member = M->getMemberDecl(); 2130 2131 // Handle static member variables and enum constants accessed via 2132 // member syntax. 2133 if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) { 2134 ExplodedNodeSet Dst; 2135 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2136 I != E; ++I) { 2137 VisitCommonDeclRefExpr(M, Member, Pred, EvalSet); 2138 } 2139 } else { 2140 StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); 2141 ExplodedNodeSet Tmp; 2142 2143 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2144 I != E; ++I) { 2145 ProgramStateRef state = (*I)->getState(); 2146 const LocationContext *LCtx = (*I)->getLocationContext(); 2147 Expr *BaseExpr = M->getBase(); 2148 2149 // Handle C++ method calls. 2150 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) { 2151 if (MD->isInstance()) 2152 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2153 2154 SVal MDVal = svalBuilder.getFunctionPointer(MD); 2155 state = state->BindExpr(M, LCtx, MDVal); 2156 2157 Bldr.generateNode(M, *I, state); 2158 continue; 2159 } 2160 2161 // Handle regular struct fields / member variables. 2162 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2163 SVal baseExprVal = state->getSVal(BaseExpr, LCtx); 2164 2165 FieldDecl *field = cast<FieldDecl>(Member); 2166 SVal L = state->getLValue(field, baseExprVal); 2167 2168 if (M->isGLValue() || M->getType()->isArrayType()) { 2169 // We special-case rvalues of array type because the analyzer cannot 2170 // reason about them, since we expect all regions to be wrapped in Locs. 2171 // We instead treat these as lvalues and assume that they will decay to 2172 // pointers as soon as they are used. 2173 if (!M->isGLValue()) { 2174 assert(M->getType()->isArrayType()); 2175 const ImplicitCastExpr *PE = 2176 dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParentIgnoreParens(M)); 2177 if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) { 2178 llvm_unreachable("should always be wrapped in ArrayToPointerDecay"); 2179 } 2180 } 2181 2182 if (field->getType()->isReferenceType()) { 2183 if (const MemRegion *R = L.getAsRegion()) 2184 L = state->getSVal(R); 2185 else 2186 L = UnknownVal(); 2187 } 2188 2189 Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr, 2190 ProgramPoint::PostLValueKind); 2191 } else { 2192 Bldr.takeNodes(*I); 2193 evalLoad(Tmp, M, M, *I, state, L); 2194 Bldr.addNodes(Tmp); 2195 } 2196 } 2197 } 2198 2199 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this); 2200 } 2201 2202 void ExprEngine::VisitAtomicExpr(const AtomicExpr *AE, ExplodedNode *Pred, 2203 ExplodedNodeSet &Dst) { 2204 ExplodedNodeSet AfterPreSet; 2205 getCheckerManager().runCheckersForPreStmt(AfterPreSet, Pred, AE, *this); 2206 2207 // For now, treat all the arguments to C11 atomics as escaping. 2208 // FIXME: Ideally we should model the behavior of the atomics precisely here. 2209 2210 ExplodedNodeSet AfterInvalidateSet; 2211 StmtNodeBuilder Bldr(AfterPreSet, AfterInvalidateSet, *currBldrCtx); 2212 2213 for (ExplodedNodeSet::iterator I = AfterPreSet.begin(), E = AfterPreSet.end(); 2214 I != E; ++I) { 2215 ProgramStateRef State = (*I)->getState(); 2216 const LocationContext *LCtx = (*I)->getLocationContext(); 2217 2218 SmallVector<SVal, 8> ValuesToInvalidate; 2219 for (unsigned SI = 0, Count = AE->getNumSubExprs(); SI != Count; SI++) { 2220 const Expr *SubExpr = AE->getSubExprs()[SI]; 2221 SVal SubExprVal = State->getSVal(SubExpr, LCtx); 2222 ValuesToInvalidate.push_back(SubExprVal); 2223 } 2224 2225 State = State->invalidateRegions(ValuesToInvalidate, AE, 2226 currBldrCtx->blockCount(), 2227 LCtx, 2228 /*CausedByPointerEscape*/true, 2229 /*Symbols=*/nullptr); 2230 2231 SVal ResultVal = UnknownVal(); 2232 State = State->BindExpr(AE, LCtx, ResultVal); 2233 Bldr.generateNode(AE, *I, State, nullptr, 2234 ProgramPoint::PostStmtKind); 2235 } 2236 2237 getCheckerManager().runCheckersForPostStmt(Dst, AfterInvalidateSet, AE, *this); 2238 } 2239 2240 namespace { 2241 class CollectReachableSymbolsCallback final : public SymbolVisitor { 2242 InvalidatedSymbols Symbols; 2243 2244 public: 2245 CollectReachableSymbolsCallback(ProgramStateRef State) {} 2246 const InvalidatedSymbols &getSymbols() const { return Symbols; } 2247 2248 bool VisitSymbol(SymbolRef Sym) override { 2249 Symbols.insert(Sym); 2250 return true; 2251 } 2252 }; 2253 } // end anonymous namespace 2254 2255 // A value escapes in three possible cases: 2256 // (1) We are binding to something that is not a memory region. 2257 // (2) We are binding to a MemrRegion that does not have stack storage. 2258 // (3) We are binding to a MemRegion with stack storage that the store 2259 // does not understand. 2260 ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State, 2261 SVal Loc, 2262 SVal Val, 2263 const LocationContext *LCtx) { 2264 // Are we storing to something that causes the value to "escape"? 2265 bool escapes = true; 2266 2267 // TODO: Move to StoreManager. 2268 if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) { 2269 escapes = !regionLoc->getRegion()->hasStackStorage(); 2270 2271 if (!escapes) { 2272 // To test (3), generate a new state with the binding added. If it is 2273 // the same state, then it escapes (since the store cannot represent 2274 // the binding). 2275 // Do this only if we know that the store is not supposed to generate the 2276 // same state. 2277 SVal StoredVal = State->getSVal(regionLoc->getRegion()); 2278 if (StoredVal != Val) 2279 escapes = (State == (State->bindLoc(*regionLoc, Val, LCtx))); 2280 } 2281 } 2282 2283 // If our store can represent the binding and we aren't storing to something 2284 // that doesn't have local storage then just return and have the simulation 2285 // state continue as is. 2286 if (!escapes) 2287 return State; 2288 2289 // Otherwise, find all symbols referenced by 'val' that we are tracking 2290 // and stop tracking them. 2291 CollectReachableSymbolsCallback Scanner = 2292 State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val); 2293 const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols(); 2294 State = getCheckerManager().runCheckersForPointerEscape(State, 2295 EscapedSymbols, 2296 /*CallEvent*/ nullptr, 2297 PSK_EscapeOnBind, 2298 nullptr); 2299 2300 return State; 2301 } 2302 2303 ProgramStateRef 2304 ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State, 2305 const InvalidatedSymbols *Invalidated, 2306 ArrayRef<const MemRegion *> ExplicitRegions, 2307 ArrayRef<const MemRegion *> Regions, 2308 const CallEvent *Call, 2309 RegionAndSymbolInvalidationTraits &ITraits) { 2310 2311 if (!Invalidated || Invalidated->empty()) 2312 return State; 2313 2314 if (!Call) 2315 return getCheckerManager().runCheckersForPointerEscape(State, 2316 *Invalidated, 2317 nullptr, 2318 PSK_EscapeOther, 2319 &ITraits); 2320 2321 // If the symbols were invalidated by a call, we want to find out which ones 2322 // were invalidated directly due to being arguments to the call. 2323 InvalidatedSymbols SymbolsDirectlyInvalidated; 2324 for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(), 2325 E = ExplicitRegions.end(); I != E; ++I) { 2326 if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>()) 2327 SymbolsDirectlyInvalidated.insert(R->getSymbol()); 2328 } 2329 2330 InvalidatedSymbols SymbolsIndirectlyInvalidated; 2331 for (InvalidatedSymbols::const_iterator I=Invalidated->begin(), 2332 E = Invalidated->end(); I!=E; ++I) { 2333 SymbolRef sym = *I; 2334 if (SymbolsDirectlyInvalidated.count(sym)) 2335 continue; 2336 SymbolsIndirectlyInvalidated.insert(sym); 2337 } 2338 2339 if (!SymbolsDirectlyInvalidated.empty()) 2340 State = getCheckerManager().runCheckersForPointerEscape(State, 2341 SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits); 2342 2343 // Notify about the symbols that get indirectly invalidated by the call. 2344 if (!SymbolsIndirectlyInvalidated.empty()) 2345 State = getCheckerManager().runCheckersForPointerEscape(State, 2346 SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits); 2347 2348 return State; 2349 } 2350 2351 /// evalBind - Handle the semantics of binding a value to a specific location. 2352 /// This method is used by evalStore and (soon) VisitDeclStmt, and others. 2353 void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE, 2354 ExplodedNode *Pred, 2355 SVal location, SVal Val, 2356 bool atDeclInit, const ProgramPoint *PP) { 2357 2358 const LocationContext *LC = Pred->getLocationContext(); 2359 PostStmt PS(StoreE, LC); 2360 if (!PP) 2361 PP = &PS; 2362 2363 // Do a previsit of the bind. 2364 ExplodedNodeSet CheckedSet; 2365 getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val, 2366 StoreE, *this, *PP); 2367 2368 StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx); 2369 2370 // If the location is not a 'Loc', it will already be handled by 2371 // the checkers. There is nothing left to do. 2372 if (!location.getAs<Loc>()) { 2373 const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr, 2374 /*tag*/nullptr); 2375 ProgramStateRef state = Pred->getState(); 2376 state = processPointerEscapedOnBind(state, location, Val, LC); 2377 Bldr.generateNode(L, state, Pred); 2378 return; 2379 } 2380 2381 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2382 I!=E; ++I) { 2383 ExplodedNode *PredI = *I; 2384 ProgramStateRef state = PredI->getState(); 2385 2386 state = processPointerEscapedOnBind(state, location, Val, LC); 2387 2388 // When binding the value, pass on the hint that this is a initialization. 2389 // For initializations, we do not need to inform clients of region 2390 // changes. 2391 state = state->bindLoc(location.castAs<Loc>(), 2392 Val, LC, /* notifyChanges = */ !atDeclInit); 2393 2394 const MemRegion *LocReg = nullptr; 2395 if (Optional<loc::MemRegionVal> LocRegVal = 2396 location.getAs<loc::MemRegionVal>()) { 2397 LocReg = LocRegVal->getRegion(); 2398 } 2399 2400 const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr); 2401 Bldr.generateNode(L, state, PredI); 2402 } 2403 } 2404 2405 /// evalStore - Handle the semantics of a store via an assignment. 2406 /// @param Dst The node set to store generated state nodes 2407 /// @param AssignE The assignment expression if the store happens in an 2408 /// assignment. 2409 /// @param LocationE The location expression that is stored to. 2410 /// @param state The current simulation state 2411 /// @param location The location to store the value 2412 /// @param Val The value to be stored 2413 void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE, 2414 const Expr *LocationE, 2415 ExplodedNode *Pred, 2416 ProgramStateRef state, SVal location, SVal Val, 2417 const ProgramPointTag *tag) { 2418 // Proceed with the store. We use AssignE as the anchor for the PostStore 2419 // ProgramPoint if it is non-NULL, and LocationE otherwise. 2420 const Expr *StoreE = AssignE ? AssignE : LocationE; 2421 2422 // Evaluate the location (checks for bad dereferences). 2423 ExplodedNodeSet Tmp; 2424 evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false); 2425 2426 if (Tmp.empty()) 2427 return; 2428 2429 if (location.isUndef()) 2430 return; 2431 2432 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) 2433 evalBind(Dst, StoreE, *NI, location, Val, false); 2434 } 2435 2436 void ExprEngine::evalLoad(ExplodedNodeSet &Dst, 2437 const Expr *NodeEx, 2438 const Expr *BoundEx, 2439 ExplodedNode *Pred, 2440 ProgramStateRef state, 2441 SVal location, 2442 const ProgramPointTag *tag, 2443 QualType LoadTy) 2444 { 2445 assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc."); 2446 2447 // Are we loading from a region? This actually results in two loads; one 2448 // to fetch the address of the referenced value and one to fetch the 2449 // referenced value. 2450 if (const TypedValueRegion *TR = 2451 dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) { 2452 2453 QualType ValTy = TR->getValueType(); 2454 if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) { 2455 static SimpleProgramPointTag 2456 loadReferenceTag(TagProviderName, "Load Reference"); 2457 ExplodedNodeSet Tmp; 2458 evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state, 2459 location, &loadReferenceTag, 2460 getContext().getPointerType(RT->getPointeeType())); 2461 2462 // Perform the load from the referenced value. 2463 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) { 2464 state = (*I)->getState(); 2465 location = state->getSVal(BoundEx, (*I)->getLocationContext()); 2466 evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy); 2467 } 2468 return; 2469 } 2470 } 2471 2472 evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy); 2473 } 2474 2475 void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst, 2476 const Expr *NodeEx, 2477 const Expr *BoundEx, 2478 ExplodedNode *Pred, 2479 ProgramStateRef state, 2480 SVal location, 2481 const ProgramPointTag *tag, 2482 QualType LoadTy) { 2483 assert(NodeEx); 2484 assert(BoundEx); 2485 // Evaluate the location (checks for bad dereferences). 2486 ExplodedNodeSet Tmp; 2487 evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true); 2488 if (Tmp.empty()) 2489 return; 2490 2491 StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 2492 if (location.isUndef()) 2493 return; 2494 2495 // Proceed with the load. 2496 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) { 2497 state = (*NI)->getState(); 2498 const LocationContext *LCtx = (*NI)->getLocationContext(); 2499 2500 SVal V = UnknownVal(); 2501 if (location.isValid()) { 2502 if (LoadTy.isNull()) 2503 LoadTy = BoundEx->getType(); 2504 V = state->getSVal(location.castAs<Loc>(), LoadTy); 2505 } 2506 2507 Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag, 2508 ProgramPoint::PostLoadKind); 2509 } 2510 } 2511 2512 void ExprEngine::evalLocation(ExplodedNodeSet &Dst, 2513 const Stmt *NodeEx, 2514 const Stmt *BoundEx, 2515 ExplodedNode *Pred, 2516 ProgramStateRef state, 2517 SVal location, 2518 const ProgramPointTag *tag, 2519 bool isLoad) { 2520 StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx); 2521 // Early checks for performance reason. 2522 if (location.isUnknown()) { 2523 return; 2524 } 2525 2526 ExplodedNodeSet Src; 2527 BldrTop.takeNodes(Pred); 2528 StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx); 2529 if (Pred->getState() != state) { 2530 // Associate this new state with an ExplodedNode. 2531 // FIXME: If I pass null tag, the graph is incorrect, e.g for 2532 // int *p; 2533 // p = 0; 2534 // *p = 0xDEADBEEF; 2535 // "p = 0" is not noted as "Null pointer value stored to 'p'" but 2536 // instead "int *p" is noted as 2537 // "Variable 'p' initialized to a null pointer value" 2538 2539 static SimpleProgramPointTag tag(TagProviderName, "Location"); 2540 Bldr.generateNode(NodeEx, Pred, state, &tag); 2541 } 2542 ExplodedNodeSet Tmp; 2543 getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad, 2544 NodeEx, BoundEx, *this); 2545 BldrTop.addNodes(Tmp); 2546 } 2547 2548 std::pair<const ProgramPointTag *, const ProgramPointTag*> 2549 ExprEngine::geteagerlyAssumeBinOpBifurcationTags() { 2550 static SimpleProgramPointTag 2551 eagerlyAssumeBinOpBifurcationTrue(TagProviderName, 2552 "Eagerly Assume True"), 2553 eagerlyAssumeBinOpBifurcationFalse(TagProviderName, 2554 "Eagerly Assume False"); 2555 return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue, 2556 &eagerlyAssumeBinOpBifurcationFalse); 2557 } 2558 2559 void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst, 2560 ExplodedNodeSet &Src, 2561 const Expr *Ex) { 2562 StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx); 2563 2564 for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) { 2565 ExplodedNode *Pred = *I; 2566 // Test if the previous node was as the same expression. This can happen 2567 // when the expression fails to evaluate to anything meaningful and 2568 // (as an optimization) we don't generate a node. 2569 ProgramPoint P = Pred->getLocation(); 2570 if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) { 2571 continue; 2572 } 2573 2574 ProgramStateRef state = Pred->getState(); 2575 SVal V = state->getSVal(Ex, Pred->getLocationContext()); 2576 Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>(); 2577 if (SEV && SEV->isExpression()) { 2578 const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags = 2579 geteagerlyAssumeBinOpBifurcationTags(); 2580 2581 ProgramStateRef StateTrue, StateFalse; 2582 std::tie(StateTrue, StateFalse) = state->assume(*SEV); 2583 2584 // First assume that the condition is true. 2585 if (StateTrue) { 2586 SVal Val = svalBuilder.makeIntVal(1U, Ex->getType()); 2587 StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val); 2588 Bldr.generateNode(Ex, Pred, StateTrue, tags.first); 2589 } 2590 2591 // Next, assume that the condition is false. 2592 if (StateFalse) { 2593 SVal Val = svalBuilder.makeIntVal(0U, Ex->getType()); 2594 StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val); 2595 Bldr.generateNode(Ex, Pred, StateFalse, tags.second); 2596 } 2597 } 2598 } 2599 } 2600 2601 void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred, 2602 ExplodedNodeSet &Dst) { 2603 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2604 // We have processed both the inputs and the outputs. All of the outputs 2605 // should evaluate to Locs. Nuke all of their values. 2606 2607 // FIXME: Some day in the future it would be nice to allow a "plug-in" 2608 // which interprets the inline asm and stores proper results in the 2609 // outputs. 2610 2611 ProgramStateRef state = Pred->getState(); 2612 2613 for (const Expr *O : A->outputs()) { 2614 SVal X = state->getSVal(O, Pred->getLocationContext()); 2615 assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef. 2616 2617 if (Optional<Loc> LV = X.getAs<Loc>()) 2618 state = state->bindLoc(*LV, UnknownVal(), Pred->getLocationContext()); 2619 } 2620 2621 Bldr.generateNode(A, Pred, state); 2622 } 2623 2624 void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred, 2625 ExplodedNodeSet &Dst) { 2626 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2627 Bldr.generateNode(A, Pred, Pred->getState()); 2628 } 2629 2630 //===----------------------------------------------------------------------===// 2631 // Visualization. 2632 //===----------------------------------------------------------------------===// 2633 2634 #ifndef NDEBUG 2635 static ExprEngine* GraphPrintCheckerState; 2636 static SourceManager* GraphPrintSourceManager; 2637 2638 namespace llvm { 2639 template<> 2640 struct DOTGraphTraits<ExplodedNode*> : 2641 public DefaultDOTGraphTraits { 2642 2643 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {} 2644 2645 // FIXME: Since we do not cache error nodes in ExprEngine now, this does not 2646 // work. 2647 static std::string getNodeAttributes(const ExplodedNode *N, void*) { 2648 return ""; 2649 } 2650 2651 // De-duplicate some source location pretty-printing. 2652 static void printLocation(raw_ostream &Out, SourceLocation SLoc) { 2653 if (SLoc.isFileID()) { 2654 Out << "\\lline=" 2655 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2656 << " col=" 2657 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc) 2658 << "\\l"; 2659 } 2660 } 2661 static void printLocation2(raw_ostream &Out, SourceLocation SLoc) { 2662 if (SLoc.isFileID() && GraphPrintSourceManager->isInMainFile(SLoc)) 2663 Out << "line " << GraphPrintSourceManager->getExpansionLineNumber(SLoc); 2664 else 2665 SLoc.print(Out, *GraphPrintSourceManager); 2666 } 2667 2668 static std::string getNodeLabel(const ExplodedNode *N, void*){ 2669 2670 std::string sbuf; 2671 llvm::raw_string_ostream Out(sbuf); 2672 2673 // Program Location. 2674 ProgramPoint Loc = N->getLocation(); 2675 2676 switch (Loc.getKind()) { 2677 case ProgramPoint::BlockEntranceKind: { 2678 Out << "Block Entrance: B" 2679 << Loc.castAs<BlockEntrance>().getBlock()->getBlockID(); 2680 break; 2681 } 2682 2683 case ProgramPoint::BlockExitKind: 2684 assert (false); 2685 break; 2686 2687 case ProgramPoint::CallEnterKind: 2688 Out << "CallEnter"; 2689 break; 2690 2691 case ProgramPoint::CallExitBeginKind: 2692 Out << "CallExitBegin"; 2693 break; 2694 2695 case ProgramPoint::CallExitEndKind: 2696 Out << "CallExitEnd"; 2697 break; 2698 2699 case ProgramPoint::PostStmtPurgeDeadSymbolsKind: 2700 Out << "PostStmtPurgeDeadSymbols"; 2701 break; 2702 2703 case ProgramPoint::PreStmtPurgeDeadSymbolsKind: 2704 Out << "PreStmtPurgeDeadSymbols"; 2705 break; 2706 2707 case ProgramPoint::EpsilonKind: 2708 Out << "Epsilon Point"; 2709 break; 2710 2711 case ProgramPoint::LoopExitKind: { 2712 LoopExit LE = Loc.castAs<LoopExit>(); 2713 Out << "LoopExit: " << LE.getLoopStmt()->getStmtClassName(); 2714 break; 2715 } 2716 2717 case ProgramPoint::PreImplicitCallKind: { 2718 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2719 Out << "PreCall: "; 2720 2721 // FIXME: Get proper printing options. 2722 PC.getDecl()->print(Out, LangOptions()); 2723 printLocation(Out, PC.getLocation()); 2724 break; 2725 } 2726 2727 case ProgramPoint::PostImplicitCallKind: { 2728 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2729 Out << "PostCall: "; 2730 2731 // FIXME: Get proper printing options. 2732 PC.getDecl()->print(Out, LangOptions()); 2733 printLocation(Out, PC.getLocation()); 2734 break; 2735 } 2736 2737 case ProgramPoint::PostInitializerKind: { 2738 Out << "PostInitializer: "; 2739 const CXXCtorInitializer *Init = 2740 Loc.castAs<PostInitializer>().getInitializer(); 2741 if (const FieldDecl *FD = Init->getAnyMember()) 2742 Out << *FD; 2743 else { 2744 QualType Ty = Init->getTypeSourceInfo()->getType(); 2745 Ty = Ty.getLocalUnqualifiedType(); 2746 LangOptions LO; // FIXME. 2747 Ty.print(Out, LO); 2748 } 2749 break; 2750 } 2751 2752 case ProgramPoint::BlockEdgeKind: { 2753 const BlockEdge &E = Loc.castAs<BlockEdge>(); 2754 Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B" 2755 << E.getDst()->getBlockID() << ')'; 2756 2757 if (const Stmt *T = E.getSrc()->getTerminator()) { 2758 SourceLocation SLoc = T->getLocStart(); 2759 2760 Out << "\\|Terminator: "; 2761 LangOptions LO; // FIXME. 2762 E.getSrc()->printTerminator(Out, LO); 2763 2764 if (SLoc.isFileID()) { 2765 Out << "\\lline=" 2766 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2767 << " col=" 2768 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc); 2769 } 2770 2771 if (isa<SwitchStmt>(T)) { 2772 const Stmt *Label = E.getDst()->getLabel(); 2773 2774 if (Label) { 2775 if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) { 2776 Out << "\\lcase "; 2777 LangOptions LO; // FIXME. 2778 if (C->getLHS()) 2779 C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO)); 2780 2781 if (const Stmt *RHS = C->getRHS()) { 2782 Out << " .. "; 2783 RHS->printPretty(Out, nullptr, PrintingPolicy(LO)); 2784 } 2785 2786 Out << ":"; 2787 } 2788 else { 2789 assert (isa<DefaultStmt>(Label)); 2790 Out << "\\ldefault:"; 2791 } 2792 } 2793 else 2794 Out << "\\l(implicit) default:"; 2795 } 2796 else if (isa<IndirectGotoStmt>(T)) { 2797 // FIXME 2798 } 2799 else { 2800 Out << "\\lCondition: "; 2801 if (*E.getSrc()->succ_begin() == E.getDst()) 2802 Out << "true"; 2803 else 2804 Out << "false"; 2805 } 2806 2807 Out << "\\l"; 2808 } 2809 2810 break; 2811 } 2812 2813 default: { 2814 const Stmt *S = Loc.castAs<StmtPoint>().getStmt(); 2815 assert(S != nullptr && "Expecting non-null Stmt"); 2816 2817 Out << S->getStmtClassName() << ' ' << (const void*) S << ' '; 2818 LangOptions LO; // FIXME. 2819 S->printPretty(Out, nullptr, PrintingPolicy(LO)); 2820 printLocation(Out, S->getLocStart()); 2821 2822 if (Loc.getAs<PreStmt>()) 2823 Out << "\\lPreStmt\\l;"; 2824 else if (Loc.getAs<PostLoad>()) 2825 Out << "\\lPostLoad\\l;"; 2826 else if (Loc.getAs<PostStore>()) 2827 Out << "\\lPostStore\\l"; 2828 else if (Loc.getAs<PostLValue>()) 2829 Out << "\\lPostLValue\\l"; 2830 2831 break; 2832 } 2833 } 2834 2835 ProgramStateRef state = N->getState(); 2836 Out << "\\|StateID: " << (const void*) state.get() 2837 << " NodeID: " << (const void*) N << "\\|"; 2838 2839 // Analysis stack backtrace. 2840 Out << "Location context stack (from current to outer):\\l"; 2841 const LocationContext *LC = Loc.getLocationContext(); 2842 unsigned Idx = 0; 2843 for (; LC; LC = LC->getParent(), ++Idx) { 2844 Out << Idx << ". (" << (const void *)LC << ") "; 2845 switch (LC->getKind()) { 2846 case LocationContext::StackFrame: 2847 if (const NamedDecl *D = dyn_cast<NamedDecl>(LC->getDecl())) 2848 Out << "Calling " << D->getQualifiedNameAsString(); 2849 else 2850 Out << "Calling anonymous code"; 2851 if (const Stmt *S = cast<StackFrameContext>(LC)->getCallSite()) { 2852 Out << " at "; 2853 printLocation2(Out, S->getLocStart()); 2854 } 2855 break; 2856 case LocationContext::Block: 2857 Out << "Invoking block"; 2858 if (const Decl *D = cast<BlockInvocationContext>(LC)->getBlockDecl()) { 2859 Out << " defined at "; 2860 printLocation2(Out, D->getLocStart()); 2861 } 2862 break; 2863 case LocationContext::Scope: 2864 Out << "Entering scope"; 2865 // FIXME: Add more info once ScopeContext is activated. 2866 break; 2867 } 2868 Out << "\\l"; 2869 } 2870 Out << "\\l"; 2871 2872 state->printDOT(Out); 2873 2874 Out << "\\l"; 2875 2876 if (const ProgramPointTag *tag = Loc.getTag()) { 2877 Out << "\\|Tag: " << tag->getTagDescription(); 2878 Out << "\\l"; 2879 } 2880 return Out.str(); 2881 } 2882 }; 2883 } // end llvm namespace 2884 #endif 2885 2886 void ExprEngine::ViewGraph(bool trim) { 2887 #ifndef NDEBUG 2888 if (trim) { 2889 std::vector<const ExplodedNode*> Src; 2890 2891 // Flush any outstanding reports to make sure we cover all the nodes. 2892 // This does not cause them to get displayed. 2893 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I) 2894 const_cast<BugType*>(*I)->FlushReports(BR); 2895 2896 // Iterate through the reports and get their nodes. 2897 for (BugReporter::EQClasses_iterator 2898 EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) { 2899 ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode()); 2900 if (N) Src.push_back(N); 2901 } 2902 2903 ViewGraph(Src); 2904 } 2905 else { 2906 GraphPrintCheckerState = this; 2907 GraphPrintSourceManager = &getContext().getSourceManager(); 2908 2909 llvm::ViewGraph(*G.roots_begin(), "ExprEngine"); 2910 2911 GraphPrintCheckerState = nullptr; 2912 GraphPrintSourceManager = nullptr; 2913 } 2914 #endif 2915 } 2916 2917 void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) { 2918 #ifndef NDEBUG 2919 GraphPrintCheckerState = this; 2920 GraphPrintSourceManager = &getContext().getSourceManager(); 2921 2922 std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes)); 2923 2924 if (!TrimmedG.get()) 2925 llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n"; 2926 else 2927 llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine"); 2928 2929 GraphPrintCheckerState = nullptr; 2930 GraphPrintSourceManager = nullptr; 2931 #endif 2932 } 2933