1 //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-= 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a meta-engine for path-sensitive dataflow analysis that 11 // is built on GREngine, but provides the boilerplate to execute transfer 12 // functions and build the ExplodedGraph at the expression level. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 17 #include "PrettyStackTraceLocationContext.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/AST/ParentMap.h" 20 #include "clang/Analysis/CFGStmtMap.h" 21 #include "clang/AST/StmtCXX.h" 22 #include "clang/AST/StmtObjC.h" 23 #include "clang/Basic/Builtins.h" 24 #include "clang/Basic/PrettyStackTrace.h" 25 #include "clang/Basic/SourceManager.h" 26 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 27 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 30 #include "clang/StaticAnalyzer/Core/PathSensitive/LoopWidening.h" 31 #include "clang/StaticAnalyzer/Core/PathSensitive/LoopUnrolling.h" 32 #include "llvm/ADT/Statistic.h" 33 #include "llvm/Support/SaveAndRestore.h" 34 #include "llvm/Support/raw_ostream.h" 35 36 #ifndef NDEBUG 37 #include "llvm/Support/GraphWriter.h" 38 #endif 39 40 using namespace clang; 41 using namespace ento; 42 using llvm::APSInt; 43 44 #define DEBUG_TYPE "ExprEngine" 45 46 STATISTIC(NumRemoveDeadBindings, 47 "The # of times RemoveDeadBindings is called"); 48 STATISTIC(NumMaxBlockCountReached, 49 "The # of aborted paths due to reaching the maximum block count in " 50 "a top level function"); 51 STATISTIC(NumMaxBlockCountReachedInInlined, 52 "The # of aborted paths due to reaching the maximum block count in " 53 "an inlined function"); 54 STATISTIC(NumTimesRetriedWithoutInlining, 55 "The # of times we re-evaluated a call without inlining"); 56 57 typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *> 58 CXXBindTemporaryContext; 59 60 // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated. 61 // The StackFrameContext assures that nested calls due to inlined recursive 62 // functions do not interfere. 63 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet, 64 llvm::ImmutableSet<CXXBindTemporaryContext>) 65 66 //===----------------------------------------------------------------------===// 67 // Engine construction and deletion. 68 //===----------------------------------------------------------------------===// 69 70 static const char* TagProviderName = "ExprEngine"; 71 72 ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled, 73 SetOfConstDecls *VisitedCalleesIn, 74 FunctionSummariesTy *FS, 75 InliningModes HowToInlineIn) 76 : AMgr(mgr), 77 AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()), 78 Engine(*this, FS), 79 G(Engine.getGraph()), 80 StateMgr(getContext(), mgr.getStoreManagerCreator(), 81 mgr.getConstraintManagerCreator(), G.getAllocator(), 82 this), 83 SymMgr(StateMgr.getSymbolManager()), 84 svalBuilder(StateMgr.getSValBuilder()), 85 currStmtIdx(0), currBldrCtx(nullptr), 86 ObjCNoRet(mgr.getASTContext()), 87 ObjCGCEnabled(gcEnabled), BR(mgr, *this), 88 VisitedCallees(VisitedCalleesIn), 89 HowToInline(HowToInlineIn) 90 { 91 unsigned TrimInterval = mgr.options.getGraphTrimInterval(); 92 if (TrimInterval != 0) { 93 // Enable eager node reclaimation when constructing the ExplodedGraph. 94 G.enableNodeReclamation(TrimInterval); 95 } 96 } 97 98 ExprEngine::~ExprEngine() { 99 BR.FlushReports(); 100 } 101 102 //===----------------------------------------------------------------------===// 103 // Utility methods. 104 //===----------------------------------------------------------------------===// 105 106 ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) { 107 ProgramStateRef state = StateMgr.getInitialState(InitLoc); 108 const Decl *D = InitLoc->getDecl(); 109 110 // Preconditions. 111 // FIXME: It would be nice if we had a more general mechanism to add 112 // such preconditions. Some day. 113 do { 114 115 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 116 // Precondition: the first argument of 'main' is an integer guaranteed 117 // to be > 0. 118 const IdentifierInfo *II = FD->getIdentifier(); 119 if (!II || !(II->getName() == "main" && FD->getNumParams() > 0)) 120 break; 121 122 const ParmVarDecl *PD = FD->getParamDecl(0); 123 QualType T = PD->getType(); 124 const BuiltinType *BT = dyn_cast<BuiltinType>(T); 125 if (!BT || !BT->isInteger()) 126 break; 127 128 const MemRegion *R = state->getRegion(PD, InitLoc); 129 if (!R) 130 break; 131 132 SVal V = state->getSVal(loc::MemRegionVal(R)); 133 SVal Constraint_untested = evalBinOp(state, BO_GT, V, 134 svalBuilder.makeZeroVal(T), 135 svalBuilder.getConditionType()); 136 137 Optional<DefinedOrUnknownSVal> Constraint = 138 Constraint_untested.getAs<DefinedOrUnknownSVal>(); 139 140 if (!Constraint) 141 break; 142 143 if (ProgramStateRef newState = state->assume(*Constraint, true)) 144 state = newState; 145 } 146 break; 147 } 148 while (0); 149 150 if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 151 // Precondition: 'self' is always non-null upon entry to an Objective-C 152 // method. 153 const ImplicitParamDecl *SelfD = MD->getSelfDecl(); 154 const MemRegion *R = state->getRegion(SelfD, InitLoc); 155 SVal V = state->getSVal(loc::MemRegionVal(R)); 156 157 if (Optional<Loc> LV = V.getAs<Loc>()) { 158 // Assume that the pointer value in 'self' is non-null. 159 state = state->assume(*LV, true); 160 assert(state && "'self' cannot be null"); 161 } 162 } 163 164 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) { 165 if (!MD->isStatic()) { 166 // Precondition: 'this' is always non-null upon entry to the 167 // top-level function. This is our starting assumption for 168 // analyzing an "open" program. 169 const StackFrameContext *SFC = InitLoc->getCurrentStackFrame(); 170 if (SFC->getParent() == nullptr) { 171 loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC); 172 SVal V = state->getSVal(L); 173 if (Optional<Loc> LV = V.getAs<Loc>()) { 174 state = state->assume(*LV, true); 175 assert(state && "'this' cannot be null"); 176 } 177 } 178 } 179 } 180 181 return state; 182 } 183 184 ProgramStateRef 185 ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State, 186 const LocationContext *LC, 187 const Expr *InitWithAdjustments, 188 const Expr *Result) { 189 // FIXME: This function is a hack that works around the quirky AST 190 // we're often having with respect to C++ temporaries. If only we modelled 191 // the actual execution order of statements properly in the CFG, 192 // all the hassle with adjustments would not be necessary, 193 // and perhaps the whole function would be removed. 194 SVal InitValWithAdjustments = State->getSVal(InitWithAdjustments, LC); 195 if (!Result) { 196 // If we don't have an explicit result expression, we're in "if needed" 197 // mode. Only create a region if the current value is a NonLoc. 198 if (!InitValWithAdjustments.getAs<NonLoc>()) 199 return State; 200 Result = InitWithAdjustments; 201 } else { 202 // We need to create a region no matter what. For sanity, make sure we don't 203 // try to stuff a Loc into a non-pointer temporary region. 204 assert(!InitValWithAdjustments.getAs<Loc>() || 205 Loc::isLocType(Result->getType()) || 206 Result->getType()->isMemberPointerType()); 207 } 208 209 ProgramStateManager &StateMgr = State->getStateManager(); 210 MemRegionManager &MRMgr = StateMgr.getRegionManager(); 211 StoreManager &StoreMgr = StateMgr.getStoreManager(); 212 213 // MaterializeTemporaryExpr may appear out of place, after a few field and 214 // base-class accesses have been made to the object, even though semantically 215 // it is the whole object that gets materialized and lifetime-extended. 216 // 217 // For example: 218 // 219 // `-MaterializeTemporaryExpr 220 // `-MemberExpr 221 // `-CXXTemporaryObjectExpr 222 // 223 // instead of the more natural 224 // 225 // `-MemberExpr 226 // `-MaterializeTemporaryExpr 227 // `-CXXTemporaryObjectExpr 228 // 229 // Use the usual methods for obtaining the expression of the base object, 230 // and record the adjustments that we need to make to obtain the sub-object 231 // that the whole expression 'Ex' refers to. This trick is usual, 232 // in the sense that CodeGen takes a similar route. 233 234 SmallVector<const Expr *, 2> CommaLHSs; 235 SmallVector<SubobjectAdjustment, 2> Adjustments; 236 237 const Expr *Init = InitWithAdjustments->skipRValueSubobjectAdjustments( 238 CommaLHSs, Adjustments); 239 240 const TypedValueRegion *TR = nullptr; 241 if (const MaterializeTemporaryExpr *MT = 242 dyn_cast<MaterializeTemporaryExpr>(Result)) { 243 StorageDuration SD = MT->getStorageDuration(); 244 // If this object is bound to a reference with static storage duration, we 245 // put it in a different region to prevent "address leakage" warnings. 246 if (SD == SD_Static || SD == SD_Thread) 247 TR = MRMgr.getCXXStaticTempObjectRegion(Init); 248 } 249 if (!TR) 250 TR = MRMgr.getCXXTempObjectRegion(Init, LC); 251 252 SVal Reg = loc::MemRegionVal(TR); 253 SVal BaseReg = Reg; 254 255 // Make the necessary adjustments to obtain the sub-object. 256 for (auto I = Adjustments.rbegin(), E = Adjustments.rend(); I != E; ++I) { 257 const SubobjectAdjustment &Adj = *I; 258 switch (Adj.Kind) { 259 case SubobjectAdjustment::DerivedToBaseAdjustment: 260 Reg = StoreMgr.evalDerivedToBase(Reg, Adj.DerivedToBase.BasePath); 261 break; 262 case SubobjectAdjustment::FieldAdjustment: 263 Reg = StoreMgr.getLValueField(Adj.Field, Reg); 264 break; 265 case SubobjectAdjustment::MemberPointerAdjustment: 266 // FIXME: Unimplemented. 267 State = State->bindDefault(Reg, UnknownVal(), LC); 268 return State; 269 } 270 } 271 272 // What remains is to copy the value of the object to the new region. 273 // FIXME: In other words, what we should always do is copy value of the 274 // Init expression (which corresponds to the bigger object) to the whole 275 // temporary region TR. However, this value is often no longer present 276 // in the Environment. If it has disappeared, we instead invalidate TR. 277 // Still, what we can do is assign the value of expression Ex (which 278 // corresponds to the sub-object) to the TR's sub-region Reg. At least, 279 // values inside Reg would be correct. 280 SVal InitVal = State->getSVal(Init, LC); 281 if (InitVal.isUnknown()) { 282 InitVal = getSValBuilder().conjureSymbolVal(Result, LC, Init->getType(), 283 currBldrCtx->blockCount()); 284 State = State->bindLoc(BaseReg.castAs<Loc>(), InitVal, LC, false); 285 286 // Then we'd need to take the value that certainly exists and bind it over. 287 if (InitValWithAdjustments.isUnknown()) { 288 // Try to recover some path sensitivity in case we couldn't 289 // compute the value. 290 InitValWithAdjustments = getSValBuilder().conjureSymbolVal( 291 Result, LC, InitWithAdjustments->getType(), 292 currBldrCtx->blockCount()); 293 } 294 State = 295 State->bindLoc(Reg.castAs<Loc>(), InitValWithAdjustments, LC, false); 296 } else { 297 State = State->bindLoc(BaseReg.castAs<Loc>(), InitVal, LC, false); 298 } 299 300 // The result expression would now point to the correct sub-region of the 301 // newly created temporary region. Do this last in order to getSVal of Init 302 // correctly in case (Result == Init). 303 State = State->BindExpr(Result, LC, Reg); 304 305 // Notify checkers once for two bindLoc()s. 306 State = processRegionChange(State, TR, LC); 307 308 return State; 309 } 310 311 //===----------------------------------------------------------------------===// 312 // Top-level transfer function logic (Dispatcher). 313 //===----------------------------------------------------------------------===// 314 315 /// evalAssume - Called by ConstraintManager. Used to call checker-specific 316 /// logic for handling assumptions on symbolic values. 317 ProgramStateRef ExprEngine::processAssume(ProgramStateRef state, 318 SVal cond, bool assumption) { 319 return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption); 320 } 321 322 ProgramStateRef 323 ExprEngine::processRegionChanges(ProgramStateRef state, 324 const InvalidatedSymbols *invalidated, 325 ArrayRef<const MemRegion *> Explicits, 326 ArrayRef<const MemRegion *> Regions, 327 const LocationContext *LCtx, 328 const CallEvent *Call) { 329 return getCheckerManager().runCheckersForRegionChanges(state, invalidated, 330 Explicits, Regions, 331 LCtx, Call); 332 } 333 334 void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State, 335 const char *NL, const char *Sep) { 336 getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep); 337 } 338 339 void ExprEngine::processEndWorklist(bool hasWorkRemaining) { 340 getCheckerManager().runCheckersForEndAnalysis(G, BR, *this); 341 } 342 343 void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred, 344 unsigned StmtIdx, NodeBuilderContext *Ctx) { 345 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 346 currStmtIdx = StmtIdx; 347 currBldrCtx = Ctx; 348 349 switch (E.getKind()) { 350 case CFGElement::Statement: 351 ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred); 352 return; 353 case CFGElement::Initializer: 354 ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred); 355 return; 356 case CFGElement::NewAllocator: 357 ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(), 358 Pred); 359 return; 360 case CFGElement::AutomaticObjectDtor: 361 case CFGElement::DeleteDtor: 362 case CFGElement::BaseDtor: 363 case CFGElement::MemberDtor: 364 case CFGElement::TemporaryDtor: 365 ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred); 366 return; 367 case CFGElement::LifetimeEnds: 368 return; 369 } 370 } 371 372 static bool shouldRemoveDeadBindings(AnalysisManager &AMgr, 373 const CFGStmt S, 374 const ExplodedNode *Pred, 375 const LocationContext *LC) { 376 377 // Are we never purging state values? 378 if (AMgr.options.AnalysisPurgeOpt == PurgeNone) 379 return false; 380 381 // Is this the beginning of a basic block? 382 if (Pred->getLocation().getAs<BlockEntrance>()) 383 return true; 384 385 // Is this on a non-expression? 386 if (!isa<Expr>(S.getStmt())) 387 return true; 388 389 // Run before processing a call. 390 if (CallEvent::isCallStmt(S.getStmt())) 391 return true; 392 393 // Is this an expression that is consumed by another expression? If so, 394 // postpone cleaning out the state. 395 ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap(); 396 return !PM.isConsumedExpr(cast<Expr>(S.getStmt())); 397 } 398 399 void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out, 400 const Stmt *ReferenceStmt, 401 const LocationContext *LC, 402 const Stmt *DiagnosticStmt, 403 ProgramPoint::Kind K) { 404 assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind || 405 ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt)) 406 && "PostStmt is not generally supported by the SymbolReaper yet"); 407 assert(LC && "Must pass the current (or expiring) LocationContext"); 408 409 if (!DiagnosticStmt) { 410 DiagnosticStmt = ReferenceStmt; 411 assert(DiagnosticStmt && "Required for clearing a LocationContext"); 412 } 413 414 NumRemoveDeadBindings++; 415 ProgramStateRef CleanedState = Pred->getState(); 416 417 // LC is the location context being destroyed, but SymbolReaper wants a 418 // location context that is still live. (If this is the top-level stack 419 // frame, this will be null.) 420 if (!ReferenceStmt) { 421 assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind && 422 "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext"); 423 LC = LC->getParent(); 424 } 425 426 const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr; 427 SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager()); 428 429 getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper); 430 431 // Create a state in which dead bindings are removed from the environment 432 // and the store. TODO: The function should just return new env and store, 433 // not a new state. 434 CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper); 435 436 // Process any special transfer function for dead symbols. 437 // A tag to track convenience transitions, which can be removed at cleanup. 438 static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node"); 439 if (!SymReaper.hasDeadSymbols()) { 440 // Generate a CleanedNode that has the environment and store cleaned 441 // up. Since no symbols are dead, we can optimize and not clean out 442 // the constraint manager. 443 StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx); 444 Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K); 445 446 } else { 447 // Call checkers with the non-cleaned state so that they could query the 448 // values of the soon to be dead symbols. 449 ExplodedNodeSet CheckedSet; 450 getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper, 451 DiagnosticStmt, *this, K); 452 453 // For each node in CheckedSet, generate CleanedNodes that have the 454 // environment, the store, and the constraints cleaned up but have the 455 // user-supplied states as the predecessors. 456 StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx); 457 for (ExplodedNodeSet::const_iterator 458 I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { 459 ProgramStateRef CheckerState = (*I)->getState(); 460 461 // The constraint manager has not been cleaned up yet, so clean up now. 462 CheckerState = getConstraintManager().removeDeadBindings(CheckerState, 463 SymReaper); 464 465 assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) && 466 "Checkers are not allowed to modify the Environment as a part of " 467 "checkDeadSymbols processing."); 468 assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) && 469 "Checkers are not allowed to modify the Store as a part of " 470 "checkDeadSymbols processing."); 471 472 // Create a state based on CleanedState with CheckerState GDM and 473 // generate a transition to that state. 474 ProgramStateRef CleanedCheckerSt = 475 StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState); 476 Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K); 477 } 478 } 479 } 480 481 void ExprEngine::ProcessStmt(const CFGStmt S, 482 ExplodedNode *Pred) { 483 // Reclaim any unnecessary nodes in the ExplodedGraph. 484 G.reclaimRecentlyAllocatedNodes(); 485 486 const Stmt *currStmt = S.getStmt(); 487 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 488 currStmt->getLocStart(), 489 "Error evaluating statement"); 490 491 // Remove dead bindings and symbols. 492 ExplodedNodeSet CleanedStates; 493 if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){ 494 removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext()); 495 } else 496 CleanedStates.Add(Pred); 497 498 // Visit the statement. 499 ExplodedNodeSet Dst; 500 for (ExplodedNodeSet::iterator I = CleanedStates.begin(), 501 E = CleanedStates.end(); I != E; ++I) { 502 ExplodedNodeSet DstI; 503 // Visit the statement. 504 Visit(currStmt, *I, DstI); 505 Dst.insert(DstI); 506 } 507 508 // Enqueue the new nodes onto the work list. 509 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 510 } 511 512 void ExprEngine::ProcessInitializer(const CFGInitializer Init, 513 ExplodedNode *Pred) { 514 const CXXCtorInitializer *BMI = Init.getInitializer(); 515 516 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 517 BMI->getSourceLocation(), 518 "Error evaluating initializer"); 519 520 // We don't clean up dead bindings here. 521 const StackFrameContext *stackFrame = 522 cast<StackFrameContext>(Pred->getLocationContext()); 523 const CXXConstructorDecl *decl = 524 cast<CXXConstructorDecl>(stackFrame->getDecl()); 525 526 ProgramStateRef State = Pred->getState(); 527 SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame)); 528 529 ExplodedNodeSet Tmp(Pred); 530 SVal FieldLoc; 531 532 // Evaluate the initializer, if necessary 533 if (BMI->isAnyMemberInitializer()) { 534 // Constructors build the object directly in the field, 535 // but non-objects must be copied in from the initializer. 536 if (auto *CtorExpr = findDirectConstructorForCurrentCFGElement()) { 537 assert(BMI->getInit()->IgnoreImplicit() == CtorExpr); 538 (void)CtorExpr; 539 // The field was directly constructed, so there is no need to bind. 540 } else { 541 const Expr *Init = BMI->getInit()->IgnoreImplicit(); 542 const ValueDecl *Field; 543 if (BMI->isIndirectMemberInitializer()) { 544 Field = BMI->getIndirectMember(); 545 FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal); 546 } else { 547 Field = BMI->getMember(); 548 FieldLoc = State->getLValue(BMI->getMember(), thisVal); 549 } 550 551 SVal InitVal; 552 if (Init->getType()->isArrayType()) { 553 // Handle arrays of trivial type. We can represent this with a 554 // primitive load/copy from the base array region. 555 const ArraySubscriptExpr *ASE; 556 while ((ASE = dyn_cast<ArraySubscriptExpr>(Init))) 557 Init = ASE->getBase()->IgnoreImplicit(); 558 559 SVal LValue = State->getSVal(Init, stackFrame); 560 if (!Field->getType()->isReferenceType()) 561 if (Optional<Loc> LValueLoc = LValue.getAs<Loc>()) 562 InitVal = State->getSVal(*LValueLoc); 563 564 // If we fail to get the value for some reason, use a symbolic value. 565 if (InitVal.isUnknownOrUndef()) { 566 SValBuilder &SVB = getSValBuilder(); 567 InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame, 568 Field->getType(), 569 currBldrCtx->blockCount()); 570 } 571 } else { 572 InitVal = State->getSVal(BMI->getInit(), stackFrame); 573 } 574 575 assert(Tmp.size() == 1 && "have not generated any new nodes yet"); 576 assert(*Tmp.begin() == Pred && "have not generated any new nodes yet"); 577 Tmp.clear(); 578 579 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 580 evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP); 581 } 582 } else { 583 assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer()); 584 // We already did all the work when visiting the CXXConstructExpr. 585 } 586 587 // Construct PostInitializer nodes whether the state changed or not, 588 // so that the diagnostics don't get confused. 589 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 590 ExplodedNodeSet Dst; 591 NodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 592 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) { 593 ExplodedNode *N = *I; 594 Bldr.generateNode(PP, N->getState(), N); 595 } 596 597 // Enqueue the new nodes onto the work list. 598 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 599 } 600 601 void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D, 602 ExplodedNode *Pred) { 603 ExplodedNodeSet Dst; 604 switch (D.getKind()) { 605 case CFGElement::AutomaticObjectDtor: 606 ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst); 607 break; 608 case CFGElement::BaseDtor: 609 ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst); 610 break; 611 case CFGElement::MemberDtor: 612 ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst); 613 break; 614 case CFGElement::TemporaryDtor: 615 ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst); 616 break; 617 case CFGElement::DeleteDtor: 618 ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst); 619 break; 620 default: 621 llvm_unreachable("Unexpected dtor kind."); 622 } 623 624 // Enqueue the new nodes onto the work list. 625 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 626 } 627 628 void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE, 629 ExplodedNode *Pred) { 630 ExplodedNodeSet Dst; 631 AnalysisManager &AMgr = getAnalysisManager(); 632 AnalyzerOptions &Opts = AMgr.options; 633 // TODO: We're not evaluating allocators for all cases just yet as 634 // we're not handling the return value correctly, which causes false 635 // positives when the alpha.cplusplus.NewDeleteLeaks check is on. 636 if (Opts.mayInlineCXXAllocator()) 637 VisitCXXNewAllocatorCall(NE, Pred, Dst); 638 else { 639 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 640 const LocationContext *LCtx = Pred->getLocationContext(); 641 PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx); 642 Bldr.generateNode(PP, Pred->getState(), Pred); 643 } 644 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 645 } 646 647 void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor, 648 ExplodedNode *Pred, 649 ExplodedNodeSet &Dst) { 650 const VarDecl *varDecl = Dtor.getVarDecl(); 651 QualType varType = varDecl->getType(); 652 653 ProgramStateRef state = Pred->getState(); 654 SVal dest = state->getLValue(varDecl, Pred->getLocationContext()); 655 const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion(); 656 657 if (varType->isReferenceType()) { 658 const MemRegion *ValueRegion = state->getSVal(Region).getAsRegion(); 659 if (!ValueRegion) { 660 // FIXME: This should not happen. The language guarantees a presence 661 // of a valid initializer here, so the reference shall not be undefined. 662 // It seems that we're calling destructors over variables that 663 // were not initialized yet. 664 return; 665 } 666 Region = ValueRegion->getBaseRegion(); 667 varType = cast<TypedValueRegion>(Region)->getValueType(); 668 } 669 670 VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false, 671 Pred, Dst); 672 } 673 674 void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor, 675 ExplodedNode *Pred, 676 ExplodedNodeSet &Dst) { 677 ProgramStateRef State = Pred->getState(); 678 const LocationContext *LCtx = Pred->getLocationContext(); 679 const CXXDeleteExpr *DE = Dtor.getDeleteExpr(); 680 const Stmt *Arg = DE->getArgument(); 681 SVal ArgVal = State->getSVal(Arg, LCtx); 682 683 // If the argument to delete is known to be a null value, 684 // don't run destructor. 685 if (State->isNull(ArgVal).isConstrainedTrue()) { 686 QualType DTy = DE->getDestroyedType(); 687 QualType BTy = getContext().getBaseElementType(DTy); 688 const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl(); 689 const CXXDestructorDecl *Dtor = RD->getDestructor(); 690 691 PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx); 692 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 693 Bldr.generateNode(PP, Pred->getState(), Pred); 694 return; 695 } 696 697 VisitCXXDestructor(DE->getDestroyedType(), 698 ArgVal.getAsRegion(), 699 DE, /*IsBase=*/ false, 700 Pred, Dst); 701 } 702 703 void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D, 704 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 705 const LocationContext *LCtx = Pred->getLocationContext(); 706 707 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 708 Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor, 709 LCtx->getCurrentStackFrame()); 710 SVal ThisVal = Pred->getState()->getSVal(ThisPtr); 711 712 // Create the base object region. 713 const CXXBaseSpecifier *Base = D.getBaseSpecifier(); 714 QualType BaseTy = Base->getType(); 715 SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy, 716 Base->isVirtual()); 717 718 VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(), 719 CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst); 720 } 721 722 void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D, 723 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 724 const FieldDecl *Member = D.getFieldDecl(); 725 ProgramStateRef State = Pred->getState(); 726 const LocationContext *LCtx = Pred->getLocationContext(); 727 728 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 729 Loc ThisVal = getSValBuilder().getCXXThis(CurDtor, 730 LCtx->getCurrentStackFrame()); 731 SVal FieldVal = 732 State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>()); 733 734 VisitCXXDestructor(Member->getType(), 735 FieldVal.castAs<loc::MemRegionVal>().getRegion(), 736 CurDtor->getBody(), /*IsBase=*/false, Pred, Dst); 737 } 738 739 void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D, 740 ExplodedNode *Pred, 741 ExplodedNodeSet &Dst) { 742 ExplodedNodeSet CleanDtorState; 743 StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx); 744 ProgramStateRef State = Pred->getState(); 745 if (State->contains<InitializedTemporariesSet>( 746 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) { 747 // FIXME: Currently we insert temporary destructors for default parameters, 748 // but we don't insert the constructors. 749 State = State->remove<InitializedTemporariesSet>( 750 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame())); 751 } 752 StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State); 753 754 QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType(); 755 // FIXME: Currently CleanDtorState can be empty here due to temporaries being 756 // bound to default parameters. 757 assert(CleanDtorState.size() <= 1); 758 ExplodedNode *CleanPred = 759 CleanDtorState.empty() ? Pred : *CleanDtorState.begin(); 760 // FIXME: Inlining of temporary destructors is not supported yet anyway, so 761 // we just put a NULL region for now. This will need to be changed later. 762 VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(), 763 /*IsBase=*/false, CleanPred, Dst); 764 } 765 766 void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 767 NodeBuilderContext &BldCtx, 768 ExplodedNode *Pred, 769 ExplodedNodeSet &Dst, 770 const CFGBlock *DstT, 771 const CFGBlock *DstF) { 772 BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF); 773 if (Pred->getState()->contains<InitializedTemporariesSet>( 774 std::make_pair(BTE, Pred->getStackFrame()))) { 775 TempDtorBuilder.markInfeasible(false); 776 TempDtorBuilder.generateNode(Pred->getState(), true, Pred); 777 } else { 778 TempDtorBuilder.markInfeasible(true); 779 TempDtorBuilder.generateNode(Pred->getState(), false, Pred); 780 } 781 } 782 783 void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE, 784 ExplodedNodeSet &PreVisit, 785 ExplodedNodeSet &Dst) { 786 if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) { 787 // In case we don't have temporary destructors in the CFG, do not mark 788 // the initialization - we would otherwise never clean it up. 789 Dst = PreVisit; 790 return; 791 } 792 StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx); 793 for (ExplodedNode *Node : PreVisit) { 794 ProgramStateRef State = Node->getState(); 795 796 if (!State->contains<InitializedTemporariesSet>( 797 std::make_pair(BTE, Node->getStackFrame()))) { 798 // FIXME: Currently the state might already contain the marker due to 799 // incorrect handling of temporaries bound to default parameters; for 800 // those, we currently skip the CXXBindTemporaryExpr but rely on adding 801 // temporary destructor nodes. 802 State = State->add<InitializedTemporariesSet>( 803 std::make_pair(BTE, Node->getStackFrame())); 804 } 805 StmtBldr.generateNode(BTE, Node, State); 806 } 807 } 808 809 void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred, 810 ExplodedNodeSet &DstTop) { 811 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 812 S->getLocStart(), 813 "Error evaluating statement"); 814 ExplodedNodeSet Dst; 815 StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx); 816 817 assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens()); 818 819 switch (S->getStmtClass()) { 820 // C++, OpenMP and ARC stuff we don't support yet. 821 case Expr::ObjCIndirectCopyRestoreExprClass: 822 case Stmt::CXXDependentScopeMemberExprClass: 823 case Stmt::CXXInheritedCtorInitExprClass: 824 case Stmt::CXXTryStmtClass: 825 case Stmt::CXXTypeidExprClass: 826 case Stmt::CXXUuidofExprClass: 827 case Stmt::CXXFoldExprClass: 828 case Stmt::MSPropertyRefExprClass: 829 case Stmt::MSPropertySubscriptExprClass: 830 case Stmt::CXXUnresolvedConstructExprClass: 831 case Stmt::DependentScopeDeclRefExprClass: 832 case Stmt::ArrayTypeTraitExprClass: 833 case Stmt::ExpressionTraitExprClass: 834 case Stmt::UnresolvedLookupExprClass: 835 case Stmt::UnresolvedMemberExprClass: 836 case Stmt::TypoExprClass: 837 case Stmt::CXXNoexceptExprClass: 838 case Stmt::PackExpansionExprClass: 839 case Stmt::SubstNonTypeTemplateParmPackExprClass: 840 case Stmt::FunctionParmPackExprClass: 841 case Stmt::CoroutineBodyStmtClass: 842 case Stmt::CoawaitExprClass: 843 case Stmt::DependentCoawaitExprClass: 844 case Stmt::CoreturnStmtClass: 845 case Stmt::CoyieldExprClass: 846 case Stmt::SEHTryStmtClass: 847 case Stmt::SEHExceptStmtClass: 848 case Stmt::SEHLeaveStmtClass: 849 case Stmt::SEHFinallyStmtClass: 850 case Stmt::OMPParallelDirectiveClass: 851 case Stmt::OMPSimdDirectiveClass: 852 case Stmt::OMPForDirectiveClass: 853 case Stmt::OMPForSimdDirectiveClass: 854 case Stmt::OMPSectionsDirectiveClass: 855 case Stmt::OMPSectionDirectiveClass: 856 case Stmt::OMPSingleDirectiveClass: 857 case Stmt::OMPMasterDirectiveClass: 858 case Stmt::OMPCriticalDirectiveClass: 859 case Stmt::OMPParallelForDirectiveClass: 860 case Stmt::OMPParallelForSimdDirectiveClass: 861 case Stmt::OMPParallelSectionsDirectiveClass: 862 case Stmt::OMPTaskDirectiveClass: 863 case Stmt::OMPTaskyieldDirectiveClass: 864 case Stmt::OMPBarrierDirectiveClass: 865 case Stmt::OMPTaskwaitDirectiveClass: 866 case Stmt::OMPTaskgroupDirectiveClass: 867 case Stmt::OMPFlushDirectiveClass: 868 case Stmt::OMPOrderedDirectiveClass: 869 case Stmt::OMPAtomicDirectiveClass: 870 case Stmt::OMPTargetDirectiveClass: 871 case Stmt::OMPTargetDataDirectiveClass: 872 case Stmt::OMPTargetEnterDataDirectiveClass: 873 case Stmt::OMPTargetExitDataDirectiveClass: 874 case Stmt::OMPTargetParallelDirectiveClass: 875 case Stmt::OMPTargetParallelForDirectiveClass: 876 case Stmt::OMPTargetUpdateDirectiveClass: 877 case Stmt::OMPTeamsDirectiveClass: 878 case Stmt::OMPCancellationPointDirectiveClass: 879 case Stmt::OMPCancelDirectiveClass: 880 case Stmt::OMPTaskLoopDirectiveClass: 881 case Stmt::OMPTaskLoopSimdDirectiveClass: 882 case Stmt::OMPDistributeDirectiveClass: 883 case Stmt::OMPDistributeParallelForDirectiveClass: 884 case Stmt::OMPDistributeParallelForSimdDirectiveClass: 885 case Stmt::OMPDistributeSimdDirectiveClass: 886 case Stmt::OMPTargetParallelForSimdDirectiveClass: 887 case Stmt::OMPTargetSimdDirectiveClass: 888 case Stmt::OMPTeamsDistributeDirectiveClass: 889 case Stmt::OMPTeamsDistributeSimdDirectiveClass: 890 case Stmt::OMPTeamsDistributeParallelForSimdDirectiveClass: 891 case Stmt::OMPTeamsDistributeParallelForDirectiveClass: 892 case Stmt::OMPTargetTeamsDirectiveClass: 893 case Stmt::OMPTargetTeamsDistributeDirectiveClass: 894 case Stmt::OMPTargetTeamsDistributeParallelForDirectiveClass: 895 case Stmt::OMPTargetTeamsDistributeParallelForSimdDirectiveClass: 896 case Stmt::OMPTargetTeamsDistributeSimdDirectiveClass: 897 case Stmt::CapturedStmtClass: 898 { 899 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 900 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 901 break; 902 } 903 904 case Stmt::ParenExprClass: 905 llvm_unreachable("ParenExprs already handled."); 906 case Stmt::GenericSelectionExprClass: 907 llvm_unreachable("GenericSelectionExprs already handled."); 908 // Cases that should never be evaluated simply because they shouldn't 909 // appear in the CFG. 910 case Stmt::BreakStmtClass: 911 case Stmt::CaseStmtClass: 912 case Stmt::CompoundStmtClass: 913 case Stmt::ContinueStmtClass: 914 case Stmt::CXXForRangeStmtClass: 915 case Stmt::DefaultStmtClass: 916 case Stmt::DoStmtClass: 917 case Stmt::ForStmtClass: 918 case Stmt::GotoStmtClass: 919 case Stmt::IfStmtClass: 920 case Stmt::IndirectGotoStmtClass: 921 case Stmt::LabelStmtClass: 922 case Stmt::NoStmtClass: 923 case Stmt::NullStmtClass: 924 case Stmt::SwitchStmtClass: 925 case Stmt::WhileStmtClass: 926 case Expr::MSDependentExistsStmtClass: 927 llvm_unreachable("Stmt should not be in analyzer evaluation loop"); 928 929 case Stmt::ObjCSubscriptRefExprClass: 930 case Stmt::ObjCPropertyRefExprClass: 931 llvm_unreachable("These are handled by PseudoObjectExpr"); 932 933 case Stmt::GNUNullExprClass: { 934 // GNU __null is a pointer-width integer, not an actual pointer. 935 ProgramStateRef state = Pred->getState(); 936 state = state->BindExpr(S, Pred->getLocationContext(), 937 svalBuilder.makeIntValWithPtrWidth(0, false)); 938 Bldr.generateNode(S, Pred, state); 939 break; 940 } 941 942 case Stmt::ObjCAtSynchronizedStmtClass: 943 Bldr.takeNodes(Pred); 944 VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst); 945 Bldr.addNodes(Dst); 946 break; 947 948 case Stmt::ExprWithCleanupsClass: 949 // Handled due to fully linearised CFG. 950 break; 951 952 case Stmt::CXXBindTemporaryExprClass: { 953 Bldr.takeNodes(Pred); 954 ExplodedNodeSet PreVisit; 955 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 956 ExplodedNodeSet Next; 957 VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next); 958 getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this); 959 Bldr.addNodes(Dst); 960 break; 961 } 962 963 // Cases not handled yet; but will handle some day. 964 case Stmt::DesignatedInitExprClass: 965 case Stmt::DesignatedInitUpdateExprClass: 966 case Stmt::ArrayInitLoopExprClass: 967 case Stmt::ArrayInitIndexExprClass: 968 case Stmt::ExtVectorElementExprClass: 969 case Stmt::ImaginaryLiteralClass: 970 case Stmt::ObjCAtCatchStmtClass: 971 case Stmt::ObjCAtFinallyStmtClass: 972 case Stmt::ObjCAtTryStmtClass: 973 case Stmt::ObjCAutoreleasePoolStmtClass: 974 case Stmt::ObjCEncodeExprClass: 975 case Stmt::ObjCIsaExprClass: 976 case Stmt::ObjCProtocolExprClass: 977 case Stmt::ObjCSelectorExprClass: 978 case Stmt::ParenListExprClass: 979 case Stmt::ShuffleVectorExprClass: 980 case Stmt::ConvertVectorExprClass: 981 case Stmt::VAArgExprClass: 982 case Stmt::CUDAKernelCallExprClass: 983 case Stmt::OpaqueValueExprClass: 984 case Stmt::AsTypeExprClass: 985 // Fall through. 986 987 // Cases we intentionally don't evaluate, since they don't need 988 // to be explicitly evaluated. 989 case Stmt::PredefinedExprClass: 990 case Stmt::AddrLabelExprClass: 991 case Stmt::AttributedStmtClass: 992 case Stmt::IntegerLiteralClass: 993 case Stmt::CharacterLiteralClass: 994 case Stmt::ImplicitValueInitExprClass: 995 case Stmt::CXXScalarValueInitExprClass: 996 case Stmt::CXXBoolLiteralExprClass: 997 case Stmt::ObjCBoolLiteralExprClass: 998 case Stmt::ObjCAvailabilityCheckExprClass: 999 case Stmt::FloatingLiteralClass: 1000 case Stmt::NoInitExprClass: 1001 case Stmt::SizeOfPackExprClass: 1002 case Stmt::StringLiteralClass: 1003 case Stmt::ObjCStringLiteralClass: 1004 case Stmt::CXXPseudoDestructorExprClass: 1005 case Stmt::SubstNonTypeTemplateParmExprClass: 1006 case Stmt::CXXNullPtrLiteralExprClass: 1007 case Stmt::OMPArraySectionExprClass: 1008 case Stmt::TypeTraitExprClass: { 1009 Bldr.takeNodes(Pred); 1010 ExplodedNodeSet preVisit; 1011 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 1012 getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this); 1013 Bldr.addNodes(Dst); 1014 break; 1015 } 1016 1017 case Stmt::CXXDefaultArgExprClass: 1018 case Stmt::CXXDefaultInitExprClass: { 1019 Bldr.takeNodes(Pred); 1020 ExplodedNodeSet PreVisit; 1021 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1022 1023 ExplodedNodeSet Tmp; 1024 StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx); 1025 1026 const Expr *ArgE; 1027 if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S)) 1028 ArgE = DefE->getExpr(); 1029 else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S)) 1030 ArgE = DefE->getExpr(); 1031 else 1032 llvm_unreachable("unknown constant wrapper kind"); 1033 1034 bool IsTemporary = false; 1035 if (const MaterializeTemporaryExpr *MTE = 1036 dyn_cast<MaterializeTemporaryExpr>(ArgE)) { 1037 ArgE = MTE->GetTemporaryExpr(); 1038 IsTemporary = true; 1039 } 1040 1041 Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE); 1042 if (!ConstantVal) 1043 ConstantVal = UnknownVal(); 1044 1045 const LocationContext *LCtx = Pred->getLocationContext(); 1046 for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end(); 1047 I != E; ++I) { 1048 ProgramStateRef State = (*I)->getState(); 1049 State = State->BindExpr(S, LCtx, *ConstantVal); 1050 if (IsTemporary) 1051 State = createTemporaryRegionIfNeeded(State, LCtx, 1052 cast<Expr>(S), 1053 cast<Expr>(S)); 1054 Bldr2.generateNode(S, *I, State); 1055 } 1056 1057 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1058 Bldr.addNodes(Dst); 1059 break; 1060 } 1061 1062 // Cases we evaluate as opaque expressions, conjuring a symbol. 1063 case Stmt::CXXStdInitializerListExprClass: 1064 case Expr::ObjCArrayLiteralClass: 1065 case Expr::ObjCDictionaryLiteralClass: 1066 case Expr::ObjCBoxedExprClass: { 1067 Bldr.takeNodes(Pred); 1068 1069 ExplodedNodeSet preVisit; 1070 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 1071 1072 ExplodedNodeSet Tmp; 1073 StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx); 1074 1075 const Expr *Ex = cast<Expr>(S); 1076 QualType resultType = Ex->getType(); 1077 1078 for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end(); 1079 it != et; ++it) { 1080 ExplodedNode *N = *it; 1081 const LocationContext *LCtx = N->getLocationContext(); 1082 SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx, 1083 resultType, 1084 currBldrCtx->blockCount()); 1085 ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result); 1086 Bldr2.generateNode(S, N, state); 1087 } 1088 1089 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 1090 Bldr.addNodes(Dst); 1091 break; 1092 } 1093 1094 case Stmt::ArraySubscriptExprClass: 1095 Bldr.takeNodes(Pred); 1096 VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst); 1097 Bldr.addNodes(Dst); 1098 break; 1099 1100 case Stmt::GCCAsmStmtClass: 1101 Bldr.takeNodes(Pred); 1102 VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst); 1103 Bldr.addNodes(Dst); 1104 break; 1105 1106 case Stmt::MSAsmStmtClass: 1107 Bldr.takeNodes(Pred); 1108 VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst); 1109 Bldr.addNodes(Dst); 1110 break; 1111 1112 case Stmt::BlockExprClass: 1113 Bldr.takeNodes(Pred); 1114 VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst); 1115 Bldr.addNodes(Dst); 1116 break; 1117 1118 case Stmt::LambdaExprClass: 1119 if (AMgr.options.shouldInlineLambdas()) { 1120 Bldr.takeNodes(Pred); 1121 VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst); 1122 Bldr.addNodes(Dst); 1123 } else { 1124 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 1125 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 1126 } 1127 break; 1128 1129 case Stmt::BinaryOperatorClass: { 1130 const BinaryOperator* B = cast<BinaryOperator>(S); 1131 if (B->isLogicalOp()) { 1132 Bldr.takeNodes(Pred); 1133 VisitLogicalExpr(B, Pred, Dst); 1134 Bldr.addNodes(Dst); 1135 break; 1136 } 1137 else if (B->getOpcode() == BO_Comma) { 1138 ProgramStateRef state = Pred->getState(); 1139 Bldr.generateNode(B, Pred, 1140 state->BindExpr(B, Pred->getLocationContext(), 1141 state->getSVal(B->getRHS(), 1142 Pred->getLocationContext()))); 1143 break; 1144 } 1145 1146 Bldr.takeNodes(Pred); 1147 1148 if (AMgr.options.eagerlyAssumeBinOpBifurcation && 1149 (B->isRelationalOp() || B->isEqualityOp())) { 1150 ExplodedNodeSet Tmp; 1151 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp); 1152 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S)); 1153 } 1154 else 1155 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1156 1157 Bldr.addNodes(Dst); 1158 break; 1159 } 1160 1161 case Stmt::CXXOperatorCallExprClass: { 1162 const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S); 1163 1164 // For instance method operators, make sure the 'this' argument has a 1165 // valid region. 1166 const Decl *Callee = OCE->getCalleeDecl(); 1167 if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) { 1168 if (MD->isInstance()) { 1169 ProgramStateRef State = Pred->getState(); 1170 const LocationContext *LCtx = Pred->getLocationContext(); 1171 ProgramStateRef NewState = 1172 createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0)); 1173 if (NewState != State) { 1174 Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr, 1175 ProgramPoint::PreStmtKind); 1176 // Did we cache out? 1177 if (!Pred) 1178 break; 1179 } 1180 } 1181 } 1182 // FALLTHROUGH 1183 LLVM_FALLTHROUGH; 1184 } 1185 case Stmt::CallExprClass: 1186 case Stmt::CXXMemberCallExprClass: 1187 case Stmt::UserDefinedLiteralClass: { 1188 Bldr.takeNodes(Pred); 1189 VisitCallExpr(cast<CallExpr>(S), Pred, Dst); 1190 Bldr.addNodes(Dst); 1191 break; 1192 } 1193 1194 case Stmt::CXXCatchStmtClass: { 1195 Bldr.takeNodes(Pred); 1196 VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst); 1197 Bldr.addNodes(Dst); 1198 break; 1199 } 1200 1201 case Stmt::CXXTemporaryObjectExprClass: 1202 case Stmt::CXXConstructExprClass: { 1203 Bldr.takeNodes(Pred); 1204 VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst); 1205 Bldr.addNodes(Dst); 1206 break; 1207 } 1208 1209 case Stmt::CXXNewExprClass: { 1210 Bldr.takeNodes(Pred); 1211 ExplodedNodeSet PostVisit; 1212 VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit); 1213 getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this); 1214 Bldr.addNodes(Dst); 1215 break; 1216 } 1217 1218 case Stmt::CXXDeleteExprClass: { 1219 Bldr.takeNodes(Pred); 1220 ExplodedNodeSet PreVisit; 1221 const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S); 1222 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1223 1224 for (ExplodedNodeSet::iterator i = PreVisit.begin(), 1225 e = PreVisit.end(); i != e ; ++i) 1226 VisitCXXDeleteExpr(CDE, *i, Dst); 1227 1228 Bldr.addNodes(Dst); 1229 break; 1230 } 1231 // FIXME: ChooseExpr is really a constant. We need to fix 1232 // the CFG do not model them as explicit control-flow. 1233 1234 case Stmt::ChooseExprClass: { // __builtin_choose_expr 1235 Bldr.takeNodes(Pred); 1236 const ChooseExpr *C = cast<ChooseExpr>(S); 1237 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); 1238 Bldr.addNodes(Dst); 1239 break; 1240 } 1241 1242 case Stmt::CompoundAssignOperatorClass: 1243 Bldr.takeNodes(Pred); 1244 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1245 Bldr.addNodes(Dst); 1246 break; 1247 1248 case Stmt::CompoundLiteralExprClass: 1249 Bldr.takeNodes(Pred); 1250 VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst); 1251 Bldr.addNodes(Dst); 1252 break; 1253 1254 case Stmt::BinaryConditionalOperatorClass: 1255 case Stmt::ConditionalOperatorClass: { // '?' operator 1256 Bldr.takeNodes(Pred); 1257 const AbstractConditionalOperator *C 1258 = cast<AbstractConditionalOperator>(S); 1259 VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst); 1260 Bldr.addNodes(Dst); 1261 break; 1262 } 1263 1264 case Stmt::CXXThisExprClass: 1265 Bldr.takeNodes(Pred); 1266 VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst); 1267 Bldr.addNodes(Dst); 1268 break; 1269 1270 case Stmt::DeclRefExprClass: { 1271 Bldr.takeNodes(Pred); 1272 const DeclRefExpr *DE = cast<DeclRefExpr>(S); 1273 VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst); 1274 Bldr.addNodes(Dst); 1275 break; 1276 } 1277 1278 case Stmt::DeclStmtClass: 1279 Bldr.takeNodes(Pred); 1280 VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst); 1281 Bldr.addNodes(Dst); 1282 break; 1283 1284 case Stmt::ImplicitCastExprClass: 1285 case Stmt::CStyleCastExprClass: 1286 case Stmt::CXXStaticCastExprClass: 1287 case Stmt::CXXDynamicCastExprClass: 1288 case Stmt::CXXReinterpretCastExprClass: 1289 case Stmt::CXXConstCastExprClass: 1290 case Stmt::CXXFunctionalCastExprClass: 1291 case Stmt::ObjCBridgedCastExprClass: { 1292 Bldr.takeNodes(Pred); 1293 const CastExpr *C = cast<CastExpr>(S); 1294 ExplodedNodeSet dstExpr; 1295 VisitCast(C, C->getSubExpr(), Pred, dstExpr); 1296 1297 // Handle the postvisit checks. 1298 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this); 1299 Bldr.addNodes(Dst); 1300 break; 1301 } 1302 1303 case Expr::MaterializeTemporaryExprClass: { 1304 Bldr.takeNodes(Pred); 1305 const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S); 1306 ExplodedNodeSet dstPrevisit; 1307 getCheckerManager().runCheckersForPreStmt(dstPrevisit, Pred, MTE, *this); 1308 ExplodedNodeSet dstExpr; 1309 for (ExplodedNodeSet::iterator i = dstPrevisit.begin(), 1310 e = dstPrevisit.end(); i != e ; ++i) { 1311 CreateCXXTemporaryObject(MTE, *i, dstExpr); 1312 } 1313 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, MTE, *this); 1314 Bldr.addNodes(Dst); 1315 break; 1316 } 1317 1318 case Stmt::InitListExprClass: 1319 Bldr.takeNodes(Pred); 1320 VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst); 1321 Bldr.addNodes(Dst); 1322 break; 1323 1324 case Stmt::MemberExprClass: 1325 Bldr.takeNodes(Pred); 1326 VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst); 1327 Bldr.addNodes(Dst); 1328 break; 1329 1330 case Stmt::AtomicExprClass: 1331 Bldr.takeNodes(Pred); 1332 VisitAtomicExpr(cast<AtomicExpr>(S), Pred, Dst); 1333 Bldr.addNodes(Dst); 1334 break; 1335 1336 case Stmt::ObjCIvarRefExprClass: 1337 Bldr.takeNodes(Pred); 1338 VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst); 1339 Bldr.addNodes(Dst); 1340 break; 1341 1342 case Stmt::ObjCForCollectionStmtClass: 1343 Bldr.takeNodes(Pred); 1344 VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst); 1345 Bldr.addNodes(Dst); 1346 break; 1347 1348 case Stmt::ObjCMessageExprClass: 1349 Bldr.takeNodes(Pred); 1350 VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst); 1351 Bldr.addNodes(Dst); 1352 break; 1353 1354 case Stmt::ObjCAtThrowStmtClass: 1355 case Stmt::CXXThrowExprClass: 1356 // FIXME: This is not complete. We basically treat @throw as 1357 // an abort. 1358 Bldr.generateSink(S, Pred, Pred->getState()); 1359 break; 1360 1361 case Stmt::ReturnStmtClass: 1362 Bldr.takeNodes(Pred); 1363 VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst); 1364 Bldr.addNodes(Dst); 1365 break; 1366 1367 case Stmt::OffsetOfExprClass: 1368 Bldr.takeNodes(Pred); 1369 VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst); 1370 Bldr.addNodes(Dst); 1371 break; 1372 1373 case Stmt::UnaryExprOrTypeTraitExprClass: 1374 Bldr.takeNodes(Pred); 1375 VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S), 1376 Pred, Dst); 1377 Bldr.addNodes(Dst); 1378 break; 1379 1380 case Stmt::StmtExprClass: { 1381 const StmtExpr *SE = cast<StmtExpr>(S); 1382 1383 if (SE->getSubStmt()->body_empty()) { 1384 // Empty statement expression. 1385 assert(SE->getType() == getContext().VoidTy 1386 && "Empty statement expression must have void type."); 1387 break; 1388 } 1389 1390 if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) { 1391 ProgramStateRef state = Pred->getState(); 1392 Bldr.generateNode(SE, Pred, 1393 state->BindExpr(SE, Pred->getLocationContext(), 1394 state->getSVal(LastExpr, 1395 Pred->getLocationContext()))); 1396 } 1397 break; 1398 } 1399 1400 case Stmt::UnaryOperatorClass: { 1401 Bldr.takeNodes(Pred); 1402 const UnaryOperator *U = cast<UnaryOperator>(S); 1403 if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) { 1404 ExplodedNodeSet Tmp; 1405 VisitUnaryOperator(U, Pred, Tmp); 1406 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U); 1407 } 1408 else 1409 VisitUnaryOperator(U, Pred, Dst); 1410 Bldr.addNodes(Dst); 1411 break; 1412 } 1413 1414 case Stmt::PseudoObjectExprClass: { 1415 Bldr.takeNodes(Pred); 1416 ProgramStateRef state = Pred->getState(); 1417 const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S); 1418 if (const Expr *Result = PE->getResultExpr()) { 1419 SVal V = state->getSVal(Result, Pred->getLocationContext()); 1420 Bldr.generateNode(S, Pred, 1421 state->BindExpr(S, Pred->getLocationContext(), V)); 1422 } 1423 else 1424 Bldr.generateNode(S, Pred, 1425 state->BindExpr(S, Pred->getLocationContext(), 1426 UnknownVal())); 1427 1428 Bldr.addNodes(Dst); 1429 break; 1430 } 1431 } 1432 } 1433 1434 bool ExprEngine::replayWithoutInlining(ExplodedNode *N, 1435 const LocationContext *CalleeLC) { 1436 const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1437 const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame(); 1438 assert(CalleeSF && CallerSF); 1439 ExplodedNode *BeforeProcessingCall = nullptr; 1440 const Stmt *CE = CalleeSF->getCallSite(); 1441 1442 // Find the first node before we started processing the call expression. 1443 while (N) { 1444 ProgramPoint L = N->getLocation(); 1445 BeforeProcessingCall = N; 1446 N = N->pred_empty() ? nullptr : *(N->pred_begin()); 1447 1448 // Skip the nodes corresponding to the inlined code. 1449 if (L.getLocationContext()->getCurrentStackFrame() != CallerSF) 1450 continue; 1451 // We reached the caller. Find the node right before we started 1452 // processing the call. 1453 if (L.isPurgeKind()) 1454 continue; 1455 if (L.getAs<PreImplicitCall>()) 1456 continue; 1457 if (L.getAs<CallEnter>()) 1458 continue; 1459 if (Optional<StmtPoint> SP = L.getAs<StmtPoint>()) 1460 if (SP->getStmt() == CE) 1461 continue; 1462 break; 1463 } 1464 1465 if (!BeforeProcessingCall) 1466 return false; 1467 1468 // TODO: Clean up the unneeded nodes. 1469 1470 // Build an Epsilon node from which we will restart the analyzes. 1471 // Note that CE is permitted to be NULL! 1472 ProgramPoint NewNodeLoc = 1473 EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE); 1474 // Add the special flag to GDM to signal retrying with no inlining. 1475 // Note, changing the state ensures that we are not going to cache out. 1476 ProgramStateRef NewNodeState = BeforeProcessingCall->getState(); 1477 NewNodeState = 1478 NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE)); 1479 1480 // Make the new node a successor of BeforeProcessingCall. 1481 bool IsNew = false; 1482 ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew); 1483 // We cached out at this point. Caching out is common due to us backtracking 1484 // from the inlined function, which might spawn several paths. 1485 if (!IsNew) 1486 return true; 1487 1488 NewNode->addPredecessor(BeforeProcessingCall, G); 1489 1490 // Add the new node to the work list. 1491 Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(), 1492 CalleeSF->getIndex()); 1493 NumTimesRetriedWithoutInlining++; 1494 return true; 1495 } 1496 1497 /// Block entrance. (Update counters). 1498 void ExprEngine::processCFGBlockEntrance(const BlockEdge &L, 1499 NodeBuilderWithSinks &nodeBuilder, 1500 ExplodedNode *Pred) { 1501 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1502 // If we reach a loop which has a known bound (and meets 1503 // other constraints) then consider completely unrolling it. 1504 if (AMgr.options.shouldUnrollLoops()) { 1505 const CFGBlock *ActualBlock = nodeBuilder.getContext().getBlock(); 1506 const Stmt *Term = ActualBlock->getTerminator(); 1507 if (Term && shouldCompletelyUnroll(Term, AMgr.getASTContext())) { 1508 ProgramStateRef UnrolledState = markLoopAsUnrolled( 1509 Term, Pred->getState(), 1510 cast<FunctionDecl>(Pred->getStackFrame()->getDecl())); 1511 if (UnrolledState != Pred->getState()) 1512 nodeBuilder.generateNode(UnrolledState, Pred); 1513 return; 1514 } 1515 1516 if (ActualBlock->empty()) 1517 return; 1518 1519 if (isUnrolledLoopBlock(ActualBlock, Pred, AMgr)) 1520 return; 1521 } 1522 1523 // If this block is terminated by a loop and it has already been visited the 1524 // maximum number of times, widen the loop. 1525 unsigned int BlockCount = nodeBuilder.getContext().blockCount(); 1526 if (BlockCount == AMgr.options.maxBlockVisitOnPath - 1 && 1527 AMgr.options.shouldWidenLoops()) { 1528 const Stmt *Term = nodeBuilder.getContext().getBlock()->getTerminator(); 1529 if (!(Term && 1530 (isa<ForStmt>(Term) || isa<WhileStmt>(Term) || isa<DoStmt>(Term)))) 1531 return; 1532 // Widen. 1533 const LocationContext *LCtx = Pred->getLocationContext(); 1534 ProgramStateRef WidenedState = 1535 getWidenedLoopState(Pred->getState(), LCtx, BlockCount, Term); 1536 nodeBuilder.generateNode(WidenedState, Pred); 1537 return; 1538 } 1539 1540 // FIXME: Refactor this into a checker. 1541 if (BlockCount >= AMgr.options.maxBlockVisitOnPath) { 1542 static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded"); 1543 const ExplodedNode *Sink = 1544 nodeBuilder.generateSink(Pred->getState(), Pred, &tag); 1545 1546 // Check if we stopped at the top level function or not. 1547 // Root node should have the location context of the top most function. 1548 const LocationContext *CalleeLC = Pred->getLocation().getLocationContext(); 1549 const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1550 const LocationContext *RootLC = 1551 (*G.roots_begin())->getLocation().getLocationContext(); 1552 if (RootLC->getCurrentStackFrame() != CalleeSF) { 1553 Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl()); 1554 1555 // Re-run the call evaluation without inlining it, by storing the 1556 // no-inlining policy in the state and enqueuing the new work item on 1557 // the list. Replay should almost never fail. Use the stats to catch it 1558 // if it does. 1559 if ((!AMgr.options.NoRetryExhausted && 1560 replayWithoutInlining(Pred, CalleeLC))) 1561 return; 1562 NumMaxBlockCountReachedInInlined++; 1563 } else 1564 NumMaxBlockCountReached++; 1565 1566 // Make sink nodes as exhausted(for stats) only if retry failed. 1567 Engine.blocksExhausted.push_back(std::make_pair(L, Sink)); 1568 } 1569 } 1570 1571 //===----------------------------------------------------------------------===// 1572 // Branch processing. 1573 //===----------------------------------------------------------------------===// 1574 1575 /// RecoverCastedSymbol - A helper function for ProcessBranch that is used 1576 /// to try to recover some path-sensitivity for casts of symbolic 1577 /// integers that promote their values (which are currently not tracked well). 1578 /// This function returns the SVal bound to Condition->IgnoreCasts if all the 1579 // cast(s) did was sign-extend the original value. 1580 static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr, 1581 ProgramStateRef state, 1582 const Stmt *Condition, 1583 const LocationContext *LCtx, 1584 ASTContext &Ctx) { 1585 1586 const Expr *Ex = dyn_cast<Expr>(Condition); 1587 if (!Ex) 1588 return UnknownVal(); 1589 1590 uint64_t bits = 0; 1591 bool bitsInit = false; 1592 1593 while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) { 1594 QualType T = CE->getType(); 1595 1596 if (!T->isIntegralOrEnumerationType()) 1597 return UnknownVal(); 1598 1599 uint64_t newBits = Ctx.getTypeSize(T); 1600 if (!bitsInit || newBits < bits) { 1601 bitsInit = true; 1602 bits = newBits; 1603 } 1604 1605 Ex = CE->getSubExpr(); 1606 } 1607 1608 // We reached a non-cast. Is it a symbolic value? 1609 QualType T = Ex->getType(); 1610 1611 if (!bitsInit || !T->isIntegralOrEnumerationType() || 1612 Ctx.getTypeSize(T) > bits) 1613 return UnknownVal(); 1614 1615 return state->getSVal(Ex, LCtx); 1616 } 1617 1618 #ifndef NDEBUG 1619 static const Stmt *getRightmostLeaf(const Stmt *Condition) { 1620 while (Condition) { 1621 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1622 if (!BO || !BO->isLogicalOp()) { 1623 return Condition; 1624 } 1625 Condition = BO->getRHS()->IgnoreParens(); 1626 } 1627 return nullptr; 1628 } 1629 #endif 1630 1631 // Returns the condition the branch at the end of 'B' depends on and whose value 1632 // has been evaluated within 'B'. 1633 // In most cases, the terminator condition of 'B' will be evaluated fully in 1634 // the last statement of 'B'; in those cases, the resolved condition is the 1635 // given 'Condition'. 1636 // If the condition of the branch is a logical binary operator tree, the CFG is 1637 // optimized: in that case, we know that the expression formed by all but the 1638 // rightmost leaf of the logical binary operator tree must be true, and thus 1639 // the branch condition is at this point equivalent to the truth value of that 1640 // rightmost leaf; the CFG block thus only evaluates this rightmost leaf 1641 // expression in its final statement. As the full condition in that case was 1642 // not evaluated, and is thus not in the SVal cache, we need to use that leaf 1643 // expression to evaluate the truth value of the condition in the current state 1644 // space. 1645 static const Stmt *ResolveCondition(const Stmt *Condition, 1646 const CFGBlock *B) { 1647 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1648 Condition = Ex->IgnoreParens(); 1649 1650 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1651 if (!BO || !BO->isLogicalOp()) 1652 return Condition; 1653 1654 assert(!B->getTerminator().isTemporaryDtorsBranch() && 1655 "Temporary destructor branches handled by processBindTemporary."); 1656 1657 // For logical operations, we still have the case where some branches 1658 // use the traditional "merge" approach and others sink the branch 1659 // directly into the basic blocks representing the logical operation. 1660 // We need to distinguish between those two cases here. 1661 1662 // The invariants are still shifting, but it is possible that the 1663 // last element in a CFGBlock is not a CFGStmt. Look for the last 1664 // CFGStmt as the value of the condition. 1665 CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend(); 1666 for (; I != E; ++I) { 1667 CFGElement Elem = *I; 1668 Optional<CFGStmt> CS = Elem.getAs<CFGStmt>(); 1669 if (!CS) 1670 continue; 1671 const Stmt *LastStmt = CS->getStmt(); 1672 assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition)); 1673 return LastStmt; 1674 } 1675 llvm_unreachable("could not resolve condition"); 1676 } 1677 1678 void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term, 1679 NodeBuilderContext& BldCtx, 1680 ExplodedNode *Pred, 1681 ExplodedNodeSet &Dst, 1682 const CFGBlock *DstT, 1683 const CFGBlock *DstF) { 1684 assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) && 1685 "CXXBindTemporaryExprs are handled by processBindTemporary."); 1686 const LocationContext *LCtx = Pred->getLocationContext(); 1687 PrettyStackTraceLocationContext StackCrashInfo(LCtx); 1688 currBldrCtx = &BldCtx; 1689 1690 // Check for NULL conditions; e.g. "for(;;)" 1691 if (!Condition) { 1692 BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF); 1693 NullCondBldr.markInfeasible(false); 1694 NullCondBldr.generateNode(Pred->getState(), true, Pred); 1695 return; 1696 } 1697 1698 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1699 Condition = Ex->IgnoreParens(); 1700 1701 Condition = ResolveCondition(Condition, BldCtx.getBlock()); 1702 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 1703 Condition->getLocStart(), 1704 "Error evaluating branch"); 1705 1706 ExplodedNodeSet CheckersOutSet; 1707 getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet, 1708 Pred, *this); 1709 // We generated only sinks. 1710 if (CheckersOutSet.empty()) 1711 return; 1712 1713 BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF); 1714 for (NodeBuilder::iterator I = CheckersOutSet.begin(), 1715 E = CheckersOutSet.end(); E != I; ++I) { 1716 ExplodedNode *PredI = *I; 1717 1718 if (PredI->isSink()) 1719 continue; 1720 1721 ProgramStateRef PrevState = PredI->getState(); 1722 SVal X = PrevState->getSVal(Condition, PredI->getLocationContext()); 1723 1724 if (X.isUnknownOrUndef()) { 1725 // Give it a chance to recover from unknown. 1726 if (const Expr *Ex = dyn_cast<Expr>(Condition)) { 1727 if (Ex->getType()->isIntegralOrEnumerationType()) { 1728 // Try to recover some path-sensitivity. Right now casts of symbolic 1729 // integers that promote their values are currently not tracked well. 1730 // If 'Condition' is such an expression, try and recover the 1731 // underlying value and use that instead. 1732 SVal recovered = RecoverCastedSymbol(getStateManager(), 1733 PrevState, Condition, 1734 PredI->getLocationContext(), 1735 getContext()); 1736 1737 if (!recovered.isUnknown()) { 1738 X = recovered; 1739 } 1740 } 1741 } 1742 } 1743 1744 // If the condition is still unknown, give up. 1745 if (X.isUnknownOrUndef()) { 1746 builder.generateNode(PrevState, true, PredI); 1747 builder.generateNode(PrevState, false, PredI); 1748 continue; 1749 } 1750 1751 DefinedSVal V = X.castAs<DefinedSVal>(); 1752 1753 ProgramStateRef StTrue, StFalse; 1754 std::tie(StTrue, StFalse) = PrevState->assume(V); 1755 1756 // Process the true branch. 1757 if (builder.isFeasible(true)) { 1758 if (StTrue) 1759 builder.generateNode(StTrue, true, PredI); 1760 else 1761 builder.markInfeasible(true); 1762 } 1763 1764 // Process the false branch. 1765 if (builder.isFeasible(false)) { 1766 if (StFalse) 1767 builder.generateNode(StFalse, false, PredI); 1768 else 1769 builder.markInfeasible(false); 1770 } 1771 } 1772 currBldrCtx = nullptr; 1773 } 1774 1775 /// The GDM component containing the set of global variables which have been 1776 /// previously initialized with explicit initializers. 1777 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet, 1778 llvm::ImmutableSet<const VarDecl *>) 1779 1780 void ExprEngine::processStaticInitializer(const DeclStmt *DS, 1781 NodeBuilderContext &BuilderCtx, 1782 ExplodedNode *Pred, 1783 clang::ento::ExplodedNodeSet &Dst, 1784 const CFGBlock *DstT, 1785 const CFGBlock *DstF) { 1786 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1787 currBldrCtx = &BuilderCtx; 1788 1789 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl()); 1790 ProgramStateRef state = Pred->getState(); 1791 bool initHasRun = state->contains<InitializedGlobalsSet>(VD); 1792 BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF); 1793 1794 if (!initHasRun) { 1795 state = state->add<InitializedGlobalsSet>(VD); 1796 } 1797 1798 builder.generateNode(state, initHasRun, Pred); 1799 builder.markInfeasible(!initHasRun); 1800 1801 currBldrCtx = nullptr; 1802 } 1803 1804 /// processIndirectGoto - Called by CoreEngine. Used to generate successor 1805 /// nodes by processing the 'effects' of a computed goto jump. 1806 void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) { 1807 1808 ProgramStateRef state = builder.getState(); 1809 SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext()); 1810 1811 // Three possibilities: 1812 // 1813 // (1) We know the computed label. 1814 // (2) The label is NULL (or some other constant), or Undefined. 1815 // (3) We have no clue about the label. Dispatch to all targets. 1816 // 1817 1818 typedef IndirectGotoNodeBuilder::iterator iterator; 1819 1820 if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) { 1821 const LabelDecl *L = LV->getLabel(); 1822 1823 for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) { 1824 if (I.getLabel() == L) { 1825 builder.generateNode(I, state); 1826 return; 1827 } 1828 } 1829 1830 llvm_unreachable("No block with label."); 1831 } 1832 1833 if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) { 1834 // Dispatch to the first target and mark it as a sink. 1835 //ExplodedNode* N = builder.generateNode(builder.begin(), state, true); 1836 // FIXME: add checker visit. 1837 // UndefBranches.insert(N); 1838 return; 1839 } 1840 1841 // This is really a catch-all. We don't support symbolics yet. 1842 // FIXME: Implement dispatch for symbolic pointers. 1843 1844 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I) 1845 builder.generateNode(I, state); 1846 } 1847 1848 #if 0 1849 static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) { 1850 const StackFrameContext* Frame = Pred.getStackFrame(); 1851 const llvm::ImmutableSet<CXXBindTemporaryContext> &Set = 1852 Pred.getState()->get<InitializedTemporariesSet>(); 1853 return std::find_if(Set.begin(), Set.end(), 1854 [&](const CXXBindTemporaryContext &Ctx) { 1855 if (Ctx.second == Frame) { 1856 Ctx.first->dump(); 1857 llvm::errs() << "\n"; 1858 } 1859 return Ctx.second == Frame; 1860 }) == Set.end(); 1861 } 1862 #endif 1863 1864 void ExprEngine::processBeginOfFunction(NodeBuilderContext &BC, 1865 ExplodedNode *Pred, 1866 ExplodedNodeSet &Dst, 1867 const BlockEdge &L) { 1868 SaveAndRestore<const NodeBuilderContext *> NodeContextRAII(currBldrCtx, &BC); 1869 getCheckerManager().runCheckersForBeginFunction(Dst, L, Pred, *this); 1870 } 1871 1872 /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path 1873 /// nodes when the control reaches the end of a function. 1874 void ExprEngine::processEndOfFunction(NodeBuilderContext& BC, 1875 ExplodedNode *Pred, 1876 const ReturnStmt *RS) { 1877 // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)). 1878 // We currently cannot enable this assert, as lifetime extended temporaries 1879 // are not modelled correctly. 1880 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1881 StateMgr.EndPath(Pred->getState()); 1882 1883 ExplodedNodeSet Dst; 1884 if (Pred->getLocationContext()->inTopFrame()) { 1885 // Remove dead symbols. 1886 ExplodedNodeSet AfterRemovedDead; 1887 removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead); 1888 1889 // Notify checkers. 1890 for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(), 1891 E = AfterRemovedDead.end(); I != E; ++I) { 1892 getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this); 1893 } 1894 } else { 1895 getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this); 1896 } 1897 1898 Engine.enqueueEndOfFunction(Dst, RS); 1899 } 1900 1901 /// ProcessSwitch - Called by CoreEngine. Used to generate successor 1902 /// nodes by processing the 'effects' of a switch statement. 1903 void ExprEngine::processSwitch(SwitchNodeBuilder& builder) { 1904 typedef SwitchNodeBuilder::iterator iterator; 1905 ProgramStateRef state = builder.getState(); 1906 const Expr *CondE = builder.getCondition(); 1907 SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext()); 1908 1909 if (CondV_untested.isUndef()) { 1910 //ExplodedNode* N = builder.generateDefaultCaseNode(state, true); 1911 // FIXME: add checker 1912 //UndefBranches.insert(N); 1913 1914 return; 1915 } 1916 DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>(); 1917 1918 ProgramStateRef DefaultSt = state; 1919 1920 iterator I = builder.begin(), EI = builder.end(); 1921 bool defaultIsFeasible = I == EI; 1922 1923 for ( ; I != EI; ++I) { 1924 // Successor may be pruned out during CFG construction. 1925 if (!I.getBlock()) 1926 continue; 1927 1928 const CaseStmt *Case = I.getCase(); 1929 1930 // Evaluate the LHS of the case value. 1931 llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext()); 1932 assert(V1.getBitWidth() == getContext().getIntWidth(CondE->getType())); 1933 1934 // Get the RHS of the case, if it exists. 1935 llvm::APSInt V2; 1936 if (const Expr *E = Case->getRHS()) 1937 V2 = E->EvaluateKnownConstInt(getContext()); 1938 else 1939 V2 = V1; 1940 1941 ProgramStateRef StateCase; 1942 if (Optional<NonLoc> NL = CondV.getAs<NonLoc>()) 1943 std::tie(StateCase, DefaultSt) = 1944 DefaultSt->assumeInclusiveRange(*NL, V1, V2); 1945 else // UnknownVal 1946 StateCase = DefaultSt; 1947 1948 if (StateCase) 1949 builder.generateCaseStmtNode(I, StateCase); 1950 1951 // Now "assume" that the case doesn't match. Add this state 1952 // to the default state (if it is feasible). 1953 if (DefaultSt) 1954 defaultIsFeasible = true; 1955 else { 1956 defaultIsFeasible = false; 1957 break; 1958 } 1959 } 1960 1961 if (!defaultIsFeasible) 1962 return; 1963 1964 // If we have switch(enum value), the default branch is not 1965 // feasible if all of the enum constants not covered by 'case:' statements 1966 // are not feasible values for the switch condition. 1967 // 1968 // Note that this isn't as accurate as it could be. Even if there isn't 1969 // a case for a particular enum value as long as that enum value isn't 1970 // feasible then it shouldn't be considered for making 'default:' reachable. 1971 const SwitchStmt *SS = builder.getSwitch(); 1972 const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts(); 1973 if (CondExpr->getType()->getAs<EnumType>()) { 1974 if (SS->isAllEnumCasesCovered()) 1975 return; 1976 } 1977 1978 builder.generateDefaultCaseNode(DefaultSt); 1979 } 1980 1981 //===----------------------------------------------------------------------===// 1982 // Transfer functions: Loads and stores. 1983 //===----------------------------------------------------------------------===// 1984 1985 void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D, 1986 ExplodedNode *Pred, 1987 ExplodedNodeSet &Dst) { 1988 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1989 1990 ProgramStateRef state = Pred->getState(); 1991 const LocationContext *LCtx = Pred->getLocationContext(); 1992 1993 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) { 1994 // C permits "extern void v", and if you cast the address to a valid type, 1995 // you can even do things with it. We simply pretend 1996 assert(Ex->isGLValue() || VD->getType()->isVoidType()); 1997 const LocationContext *LocCtxt = Pred->getLocationContext(); 1998 const Decl *D = LocCtxt->getDecl(); 1999 const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr; 2000 const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex); 2001 SVal V; 2002 bool IsReference; 2003 if (AMgr.options.shouldInlineLambdas() && DeclRefEx && 2004 DeclRefEx->refersToEnclosingVariableOrCapture() && MD && 2005 MD->getParent()->isLambda()) { 2006 // Lookup the field of the lambda. 2007 const CXXRecordDecl *CXXRec = MD->getParent(); 2008 llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields; 2009 FieldDecl *LambdaThisCaptureField; 2010 CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField); 2011 const FieldDecl *FD = LambdaCaptureFields[VD]; 2012 if (!FD) { 2013 // When a constant is captured, sometimes no corresponding field is 2014 // created in the lambda object. 2015 assert(VD->getType().isConstQualified()); 2016 V = state->getLValue(VD, LocCtxt); 2017 IsReference = false; 2018 } else { 2019 Loc CXXThis = 2020 svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame()); 2021 SVal CXXThisVal = state->getSVal(CXXThis); 2022 V = state->getLValue(FD, CXXThisVal); 2023 IsReference = FD->getType()->isReferenceType(); 2024 } 2025 } else { 2026 V = state->getLValue(VD, LocCtxt); 2027 IsReference = VD->getType()->isReferenceType(); 2028 } 2029 2030 // For references, the 'lvalue' is the pointer address stored in the 2031 // reference region. 2032 if (IsReference) { 2033 if (const MemRegion *R = V.getAsRegion()) 2034 V = state->getSVal(R); 2035 else 2036 V = UnknownVal(); 2037 } 2038 2039 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 2040 ProgramPoint::PostLValueKind); 2041 return; 2042 } 2043 if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) { 2044 assert(!Ex->isGLValue()); 2045 SVal V = svalBuilder.makeIntVal(ED->getInitVal()); 2046 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V)); 2047 return; 2048 } 2049 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 2050 SVal V = svalBuilder.getFunctionPointer(FD); 2051 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 2052 ProgramPoint::PostLValueKind); 2053 return; 2054 } 2055 if (isa<FieldDecl>(D)) { 2056 // FIXME: Compute lvalue of field pointers-to-member. 2057 // Right now we just use a non-null void pointer, so that it gives proper 2058 // results in boolean contexts. 2059 SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy, 2060 currBldrCtx->blockCount()); 2061 state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true); 2062 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 2063 ProgramPoint::PostLValueKind); 2064 return; 2065 } 2066 2067 llvm_unreachable("Support for this Decl not implemented."); 2068 } 2069 2070 /// VisitArraySubscriptExpr - Transfer function for array accesses 2071 void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A, 2072 ExplodedNode *Pred, 2073 ExplodedNodeSet &Dst){ 2074 2075 const Expr *Base = A->getBase()->IgnoreParens(); 2076 const Expr *Idx = A->getIdx()->IgnoreParens(); 2077 2078 ExplodedNodeSet CheckerPreStmt; 2079 getCheckerManager().runCheckersForPreStmt(CheckerPreStmt, Pred, A, *this); 2080 2081 ExplodedNodeSet EvalSet; 2082 StmtNodeBuilder Bldr(CheckerPreStmt, EvalSet, *currBldrCtx); 2083 assert(A->isGLValue() || 2084 (!AMgr.getLangOpts().CPlusPlus && 2085 A->getType().isCForbiddenLValueType())); 2086 2087 for (auto *Node : CheckerPreStmt) { 2088 const LocationContext *LCtx = Node->getLocationContext(); 2089 ProgramStateRef state = Node->getState(); 2090 SVal V = state->getLValue(A->getType(), 2091 state->getSVal(Idx, LCtx), 2092 state->getSVal(Base, LCtx)); 2093 Bldr.generateNode(A, Node, state->BindExpr(A, LCtx, V), nullptr, 2094 ProgramPoint::PostLValueKind); 2095 } 2096 2097 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, A, *this); 2098 } 2099 2100 /// VisitMemberExpr - Transfer function for member expressions. 2101 void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred, 2102 ExplodedNodeSet &Dst) { 2103 2104 // FIXME: Prechecks eventually go in ::Visit(). 2105 ExplodedNodeSet CheckedSet; 2106 getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this); 2107 2108 ExplodedNodeSet EvalSet; 2109 ValueDecl *Member = M->getMemberDecl(); 2110 2111 // Handle static member variables and enum constants accessed via 2112 // member syntax. 2113 if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) { 2114 ExplodedNodeSet Dst; 2115 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2116 I != E; ++I) { 2117 VisitCommonDeclRefExpr(M, Member, Pred, EvalSet); 2118 } 2119 } else { 2120 StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); 2121 ExplodedNodeSet Tmp; 2122 2123 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2124 I != E; ++I) { 2125 ProgramStateRef state = (*I)->getState(); 2126 const LocationContext *LCtx = (*I)->getLocationContext(); 2127 Expr *BaseExpr = M->getBase(); 2128 2129 // Handle C++ method calls. 2130 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) { 2131 if (MD->isInstance()) 2132 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2133 2134 SVal MDVal = svalBuilder.getFunctionPointer(MD); 2135 state = state->BindExpr(M, LCtx, MDVal); 2136 2137 Bldr.generateNode(M, *I, state); 2138 continue; 2139 } 2140 2141 // Handle regular struct fields / member variables. 2142 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 2143 SVal baseExprVal = state->getSVal(BaseExpr, LCtx); 2144 2145 FieldDecl *field = cast<FieldDecl>(Member); 2146 SVal L = state->getLValue(field, baseExprVal); 2147 2148 if (M->isGLValue() || M->getType()->isArrayType()) { 2149 // We special-case rvalues of array type because the analyzer cannot 2150 // reason about them, since we expect all regions to be wrapped in Locs. 2151 // We instead treat these as lvalues and assume that they will decay to 2152 // pointers as soon as they are used. 2153 if (!M->isGLValue()) { 2154 assert(M->getType()->isArrayType()); 2155 const ImplicitCastExpr *PE = 2156 dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParentIgnoreParens(M)); 2157 if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) { 2158 llvm_unreachable("should always be wrapped in ArrayToPointerDecay"); 2159 } 2160 } 2161 2162 if (field->getType()->isReferenceType()) { 2163 if (const MemRegion *R = L.getAsRegion()) 2164 L = state->getSVal(R); 2165 else 2166 L = UnknownVal(); 2167 } 2168 2169 Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr, 2170 ProgramPoint::PostLValueKind); 2171 } else { 2172 Bldr.takeNodes(*I); 2173 evalLoad(Tmp, M, M, *I, state, L); 2174 Bldr.addNodes(Tmp); 2175 } 2176 } 2177 } 2178 2179 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this); 2180 } 2181 2182 void ExprEngine::VisitAtomicExpr(const AtomicExpr *AE, ExplodedNode *Pred, 2183 ExplodedNodeSet &Dst) { 2184 ExplodedNodeSet AfterPreSet; 2185 getCheckerManager().runCheckersForPreStmt(AfterPreSet, Pred, AE, *this); 2186 2187 // For now, treat all the arguments to C11 atomics as escaping. 2188 // FIXME: Ideally we should model the behavior of the atomics precisely here. 2189 2190 ExplodedNodeSet AfterInvalidateSet; 2191 StmtNodeBuilder Bldr(AfterPreSet, AfterInvalidateSet, *currBldrCtx); 2192 2193 for (ExplodedNodeSet::iterator I = AfterPreSet.begin(), E = AfterPreSet.end(); 2194 I != E; ++I) { 2195 ProgramStateRef State = (*I)->getState(); 2196 const LocationContext *LCtx = (*I)->getLocationContext(); 2197 2198 SmallVector<SVal, 8> ValuesToInvalidate; 2199 for (unsigned SI = 0, Count = AE->getNumSubExprs(); SI != Count; SI++) { 2200 const Expr *SubExpr = AE->getSubExprs()[SI]; 2201 SVal SubExprVal = State->getSVal(SubExpr, LCtx); 2202 ValuesToInvalidate.push_back(SubExprVal); 2203 } 2204 2205 State = State->invalidateRegions(ValuesToInvalidate, AE, 2206 currBldrCtx->blockCount(), 2207 LCtx, 2208 /*CausedByPointerEscape*/true, 2209 /*Symbols=*/nullptr); 2210 2211 SVal ResultVal = UnknownVal(); 2212 State = State->BindExpr(AE, LCtx, ResultVal); 2213 Bldr.generateNode(AE, *I, State, nullptr, 2214 ProgramPoint::PostStmtKind); 2215 } 2216 2217 getCheckerManager().runCheckersForPostStmt(Dst, AfterInvalidateSet, AE, *this); 2218 } 2219 2220 namespace { 2221 class CollectReachableSymbolsCallback final : public SymbolVisitor { 2222 InvalidatedSymbols Symbols; 2223 2224 public: 2225 CollectReachableSymbolsCallback(ProgramStateRef State) {} 2226 const InvalidatedSymbols &getSymbols() const { return Symbols; } 2227 2228 bool VisitSymbol(SymbolRef Sym) override { 2229 Symbols.insert(Sym); 2230 return true; 2231 } 2232 }; 2233 } // end anonymous namespace 2234 2235 // A value escapes in three possible cases: 2236 // (1) We are binding to something that is not a memory region. 2237 // (2) We are binding to a MemrRegion that does not have stack storage. 2238 // (3) We are binding to a MemRegion with stack storage that the store 2239 // does not understand. 2240 ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State, 2241 SVal Loc, 2242 SVal Val, 2243 const LocationContext *LCtx) { 2244 // Are we storing to something that causes the value to "escape"? 2245 bool escapes = true; 2246 2247 // TODO: Move to StoreManager. 2248 if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) { 2249 escapes = !regionLoc->getRegion()->hasStackStorage(); 2250 2251 if (!escapes) { 2252 // To test (3), generate a new state with the binding added. If it is 2253 // the same state, then it escapes (since the store cannot represent 2254 // the binding). 2255 // Do this only if we know that the store is not supposed to generate the 2256 // same state. 2257 SVal StoredVal = State->getSVal(regionLoc->getRegion()); 2258 if (StoredVal != Val) 2259 escapes = (State == (State->bindLoc(*regionLoc, Val, LCtx))); 2260 } 2261 } 2262 2263 // If our store can represent the binding and we aren't storing to something 2264 // that doesn't have local storage then just return and have the simulation 2265 // state continue as is. 2266 if (!escapes) 2267 return State; 2268 2269 // Otherwise, find all symbols referenced by 'val' that we are tracking 2270 // and stop tracking them. 2271 CollectReachableSymbolsCallback Scanner = 2272 State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val); 2273 const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols(); 2274 State = getCheckerManager().runCheckersForPointerEscape(State, 2275 EscapedSymbols, 2276 /*CallEvent*/ nullptr, 2277 PSK_EscapeOnBind, 2278 nullptr); 2279 2280 return State; 2281 } 2282 2283 ProgramStateRef 2284 ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State, 2285 const InvalidatedSymbols *Invalidated, 2286 ArrayRef<const MemRegion *> ExplicitRegions, 2287 ArrayRef<const MemRegion *> Regions, 2288 const CallEvent *Call, 2289 RegionAndSymbolInvalidationTraits &ITraits) { 2290 2291 if (!Invalidated || Invalidated->empty()) 2292 return State; 2293 2294 if (!Call) 2295 return getCheckerManager().runCheckersForPointerEscape(State, 2296 *Invalidated, 2297 nullptr, 2298 PSK_EscapeOther, 2299 &ITraits); 2300 2301 // If the symbols were invalidated by a call, we want to find out which ones 2302 // were invalidated directly due to being arguments to the call. 2303 InvalidatedSymbols SymbolsDirectlyInvalidated; 2304 for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(), 2305 E = ExplicitRegions.end(); I != E; ++I) { 2306 if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>()) 2307 SymbolsDirectlyInvalidated.insert(R->getSymbol()); 2308 } 2309 2310 InvalidatedSymbols SymbolsIndirectlyInvalidated; 2311 for (InvalidatedSymbols::const_iterator I=Invalidated->begin(), 2312 E = Invalidated->end(); I!=E; ++I) { 2313 SymbolRef sym = *I; 2314 if (SymbolsDirectlyInvalidated.count(sym)) 2315 continue; 2316 SymbolsIndirectlyInvalidated.insert(sym); 2317 } 2318 2319 if (!SymbolsDirectlyInvalidated.empty()) 2320 State = getCheckerManager().runCheckersForPointerEscape(State, 2321 SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits); 2322 2323 // Notify about the symbols that get indirectly invalidated by the call. 2324 if (!SymbolsIndirectlyInvalidated.empty()) 2325 State = getCheckerManager().runCheckersForPointerEscape(State, 2326 SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits); 2327 2328 return State; 2329 } 2330 2331 /// evalBind - Handle the semantics of binding a value to a specific location. 2332 /// This method is used by evalStore and (soon) VisitDeclStmt, and others. 2333 void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE, 2334 ExplodedNode *Pred, 2335 SVal location, SVal Val, 2336 bool atDeclInit, const ProgramPoint *PP) { 2337 2338 const LocationContext *LC = Pred->getLocationContext(); 2339 PostStmt PS(StoreE, LC); 2340 if (!PP) 2341 PP = &PS; 2342 2343 // Do a previsit of the bind. 2344 ExplodedNodeSet CheckedSet; 2345 getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val, 2346 StoreE, *this, *PP); 2347 2348 StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx); 2349 2350 // If the location is not a 'Loc', it will already be handled by 2351 // the checkers. There is nothing left to do. 2352 if (!location.getAs<Loc>()) { 2353 const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr, 2354 /*tag*/nullptr); 2355 ProgramStateRef state = Pred->getState(); 2356 state = processPointerEscapedOnBind(state, location, Val, LC); 2357 Bldr.generateNode(L, state, Pred); 2358 return; 2359 } 2360 2361 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2362 I!=E; ++I) { 2363 ExplodedNode *PredI = *I; 2364 ProgramStateRef state = PredI->getState(); 2365 2366 state = processPointerEscapedOnBind(state, location, Val, LC); 2367 2368 // When binding the value, pass on the hint that this is a initialization. 2369 // For initializations, we do not need to inform clients of region 2370 // changes. 2371 state = state->bindLoc(location.castAs<Loc>(), 2372 Val, LC, /* notifyChanges = */ !atDeclInit); 2373 2374 const MemRegion *LocReg = nullptr; 2375 if (Optional<loc::MemRegionVal> LocRegVal = 2376 location.getAs<loc::MemRegionVal>()) { 2377 LocReg = LocRegVal->getRegion(); 2378 } 2379 2380 const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr); 2381 Bldr.generateNode(L, state, PredI); 2382 } 2383 } 2384 2385 /// evalStore - Handle the semantics of a store via an assignment. 2386 /// @param Dst The node set to store generated state nodes 2387 /// @param AssignE The assignment expression if the store happens in an 2388 /// assignment. 2389 /// @param LocationE The location expression that is stored to. 2390 /// @param state The current simulation state 2391 /// @param location The location to store the value 2392 /// @param Val The value to be stored 2393 void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE, 2394 const Expr *LocationE, 2395 ExplodedNode *Pred, 2396 ProgramStateRef state, SVal location, SVal Val, 2397 const ProgramPointTag *tag) { 2398 // Proceed with the store. We use AssignE as the anchor for the PostStore 2399 // ProgramPoint if it is non-NULL, and LocationE otherwise. 2400 const Expr *StoreE = AssignE ? AssignE : LocationE; 2401 2402 // Evaluate the location (checks for bad dereferences). 2403 ExplodedNodeSet Tmp; 2404 evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false); 2405 2406 if (Tmp.empty()) 2407 return; 2408 2409 if (location.isUndef()) 2410 return; 2411 2412 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) 2413 evalBind(Dst, StoreE, *NI, location, Val, false); 2414 } 2415 2416 void ExprEngine::evalLoad(ExplodedNodeSet &Dst, 2417 const Expr *NodeEx, 2418 const Expr *BoundEx, 2419 ExplodedNode *Pred, 2420 ProgramStateRef state, 2421 SVal location, 2422 const ProgramPointTag *tag, 2423 QualType LoadTy) 2424 { 2425 assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc."); 2426 2427 // Are we loading from a region? This actually results in two loads; one 2428 // to fetch the address of the referenced value and one to fetch the 2429 // referenced value. 2430 if (const TypedValueRegion *TR = 2431 dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) { 2432 2433 QualType ValTy = TR->getValueType(); 2434 if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) { 2435 static SimpleProgramPointTag 2436 loadReferenceTag(TagProviderName, "Load Reference"); 2437 ExplodedNodeSet Tmp; 2438 evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state, 2439 location, &loadReferenceTag, 2440 getContext().getPointerType(RT->getPointeeType())); 2441 2442 // Perform the load from the referenced value. 2443 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) { 2444 state = (*I)->getState(); 2445 location = state->getSVal(BoundEx, (*I)->getLocationContext()); 2446 evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy); 2447 } 2448 return; 2449 } 2450 } 2451 2452 evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy); 2453 } 2454 2455 void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst, 2456 const Expr *NodeEx, 2457 const Expr *BoundEx, 2458 ExplodedNode *Pred, 2459 ProgramStateRef state, 2460 SVal location, 2461 const ProgramPointTag *tag, 2462 QualType LoadTy) { 2463 assert(NodeEx); 2464 assert(BoundEx); 2465 // Evaluate the location (checks for bad dereferences). 2466 ExplodedNodeSet Tmp; 2467 evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true); 2468 if (Tmp.empty()) 2469 return; 2470 2471 StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 2472 if (location.isUndef()) 2473 return; 2474 2475 // Proceed with the load. 2476 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) { 2477 state = (*NI)->getState(); 2478 const LocationContext *LCtx = (*NI)->getLocationContext(); 2479 2480 SVal V = UnknownVal(); 2481 if (location.isValid()) { 2482 if (LoadTy.isNull()) 2483 LoadTy = BoundEx->getType(); 2484 V = state->getSVal(location.castAs<Loc>(), LoadTy); 2485 } 2486 2487 Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag, 2488 ProgramPoint::PostLoadKind); 2489 } 2490 } 2491 2492 void ExprEngine::evalLocation(ExplodedNodeSet &Dst, 2493 const Stmt *NodeEx, 2494 const Stmt *BoundEx, 2495 ExplodedNode *Pred, 2496 ProgramStateRef state, 2497 SVal location, 2498 const ProgramPointTag *tag, 2499 bool isLoad) { 2500 StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx); 2501 // Early checks for performance reason. 2502 if (location.isUnknown()) { 2503 return; 2504 } 2505 2506 ExplodedNodeSet Src; 2507 BldrTop.takeNodes(Pred); 2508 StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx); 2509 if (Pred->getState() != state) { 2510 // Associate this new state with an ExplodedNode. 2511 // FIXME: If I pass null tag, the graph is incorrect, e.g for 2512 // int *p; 2513 // p = 0; 2514 // *p = 0xDEADBEEF; 2515 // "p = 0" is not noted as "Null pointer value stored to 'p'" but 2516 // instead "int *p" is noted as 2517 // "Variable 'p' initialized to a null pointer value" 2518 2519 static SimpleProgramPointTag tag(TagProviderName, "Location"); 2520 Bldr.generateNode(NodeEx, Pred, state, &tag); 2521 } 2522 ExplodedNodeSet Tmp; 2523 getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad, 2524 NodeEx, BoundEx, *this); 2525 BldrTop.addNodes(Tmp); 2526 } 2527 2528 std::pair<const ProgramPointTag *, const ProgramPointTag*> 2529 ExprEngine::geteagerlyAssumeBinOpBifurcationTags() { 2530 static SimpleProgramPointTag 2531 eagerlyAssumeBinOpBifurcationTrue(TagProviderName, 2532 "Eagerly Assume True"), 2533 eagerlyAssumeBinOpBifurcationFalse(TagProviderName, 2534 "Eagerly Assume False"); 2535 return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue, 2536 &eagerlyAssumeBinOpBifurcationFalse); 2537 } 2538 2539 void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst, 2540 ExplodedNodeSet &Src, 2541 const Expr *Ex) { 2542 StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx); 2543 2544 for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) { 2545 ExplodedNode *Pred = *I; 2546 // Test if the previous node was as the same expression. This can happen 2547 // when the expression fails to evaluate to anything meaningful and 2548 // (as an optimization) we don't generate a node. 2549 ProgramPoint P = Pred->getLocation(); 2550 if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) { 2551 continue; 2552 } 2553 2554 ProgramStateRef state = Pred->getState(); 2555 SVal V = state->getSVal(Ex, Pred->getLocationContext()); 2556 Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>(); 2557 if (SEV && SEV->isExpression()) { 2558 const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags = 2559 geteagerlyAssumeBinOpBifurcationTags(); 2560 2561 ProgramStateRef StateTrue, StateFalse; 2562 std::tie(StateTrue, StateFalse) = state->assume(*SEV); 2563 2564 // First assume that the condition is true. 2565 if (StateTrue) { 2566 SVal Val = svalBuilder.makeIntVal(1U, Ex->getType()); 2567 StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val); 2568 Bldr.generateNode(Ex, Pred, StateTrue, tags.first); 2569 } 2570 2571 // Next, assume that the condition is false. 2572 if (StateFalse) { 2573 SVal Val = svalBuilder.makeIntVal(0U, Ex->getType()); 2574 StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val); 2575 Bldr.generateNode(Ex, Pred, StateFalse, tags.second); 2576 } 2577 } 2578 } 2579 } 2580 2581 void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred, 2582 ExplodedNodeSet &Dst) { 2583 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2584 // We have processed both the inputs and the outputs. All of the outputs 2585 // should evaluate to Locs. Nuke all of their values. 2586 2587 // FIXME: Some day in the future it would be nice to allow a "plug-in" 2588 // which interprets the inline asm and stores proper results in the 2589 // outputs. 2590 2591 ProgramStateRef state = Pred->getState(); 2592 2593 for (const Expr *O : A->outputs()) { 2594 SVal X = state->getSVal(O, Pred->getLocationContext()); 2595 assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef. 2596 2597 if (Optional<Loc> LV = X.getAs<Loc>()) 2598 state = state->bindLoc(*LV, UnknownVal(), Pred->getLocationContext()); 2599 } 2600 2601 Bldr.generateNode(A, Pred, state); 2602 } 2603 2604 void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred, 2605 ExplodedNodeSet &Dst) { 2606 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2607 Bldr.generateNode(A, Pred, Pred->getState()); 2608 } 2609 2610 //===----------------------------------------------------------------------===// 2611 // Visualization. 2612 //===----------------------------------------------------------------------===// 2613 2614 #ifndef NDEBUG 2615 static ExprEngine* GraphPrintCheckerState; 2616 static SourceManager* GraphPrintSourceManager; 2617 2618 namespace llvm { 2619 template<> 2620 struct DOTGraphTraits<ExplodedNode*> : 2621 public DefaultDOTGraphTraits { 2622 2623 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {} 2624 2625 // FIXME: Since we do not cache error nodes in ExprEngine now, this does not 2626 // work. 2627 static std::string getNodeAttributes(const ExplodedNode *N, void*) { 2628 return ""; 2629 } 2630 2631 // De-duplicate some source location pretty-printing. 2632 static void printLocation(raw_ostream &Out, SourceLocation SLoc) { 2633 if (SLoc.isFileID()) { 2634 Out << "\\lline=" 2635 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2636 << " col=" 2637 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc) 2638 << "\\l"; 2639 } 2640 } 2641 static void printLocation2(raw_ostream &Out, SourceLocation SLoc) { 2642 if (SLoc.isFileID() && GraphPrintSourceManager->isInMainFile(SLoc)) 2643 Out << "line " << GraphPrintSourceManager->getExpansionLineNumber(SLoc); 2644 else 2645 SLoc.print(Out, *GraphPrintSourceManager); 2646 } 2647 2648 static std::string getNodeLabel(const ExplodedNode *N, void*){ 2649 2650 std::string sbuf; 2651 llvm::raw_string_ostream Out(sbuf); 2652 2653 // Program Location. 2654 ProgramPoint Loc = N->getLocation(); 2655 2656 switch (Loc.getKind()) { 2657 case ProgramPoint::BlockEntranceKind: { 2658 Out << "Block Entrance: B" 2659 << Loc.castAs<BlockEntrance>().getBlock()->getBlockID(); 2660 break; 2661 } 2662 2663 case ProgramPoint::BlockExitKind: 2664 assert (false); 2665 break; 2666 2667 case ProgramPoint::CallEnterKind: 2668 Out << "CallEnter"; 2669 break; 2670 2671 case ProgramPoint::CallExitBeginKind: 2672 Out << "CallExitBegin"; 2673 break; 2674 2675 case ProgramPoint::CallExitEndKind: 2676 Out << "CallExitEnd"; 2677 break; 2678 2679 case ProgramPoint::PostStmtPurgeDeadSymbolsKind: 2680 Out << "PostStmtPurgeDeadSymbols"; 2681 break; 2682 2683 case ProgramPoint::PreStmtPurgeDeadSymbolsKind: 2684 Out << "PreStmtPurgeDeadSymbols"; 2685 break; 2686 2687 case ProgramPoint::EpsilonKind: 2688 Out << "Epsilon Point"; 2689 break; 2690 2691 case ProgramPoint::PreImplicitCallKind: { 2692 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2693 Out << "PreCall: "; 2694 2695 // FIXME: Get proper printing options. 2696 PC.getDecl()->print(Out, LangOptions()); 2697 printLocation(Out, PC.getLocation()); 2698 break; 2699 } 2700 2701 case ProgramPoint::PostImplicitCallKind: { 2702 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2703 Out << "PostCall: "; 2704 2705 // FIXME: Get proper printing options. 2706 PC.getDecl()->print(Out, LangOptions()); 2707 printLocation(Out, PC.getLocation()); 2708 break; 2709 } 2710 2711 case ProgramPoint::PostInitializerKind: { 2712 Out << "PostInitializer: "; 2713 const CXXCtorInitializer *Init = 2714 Loc.castAs<PostInitializer>().getInitializer(); 2715 if (const FieldDecl *FD = Init->getAnyMember()) 2716 Out << *FD; 2717 else { 2718 QualType Ty = Init->getTypeSourceInfo()->getType(); 2719 Ty = Ty.getLocalUnqualifiedType(); 2720 LangOptions LO; // FIXME. 2721 Ty.print(Out, LO); 2722 } 2723 break; 2724 } 2725 2726 case ProgramPoint::BlockEdgeKind: { 2727 const BlockEdge &E = Loc.castAs<BlockEdge>(); 2728 Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B" 2729 << E.getDst()->getBlockID() << ')'; 2730 2731 if (const Stmt *T = E.getSrc()->getTerminator()) { 2732 SourceLocation SLoc = T->getLocStart(); 2733 2734 Out << "\\|Terminator: "; 2735 LangOptions LO; // FIXME. 2736 E.getSrc()->printTerminator(Out, LO); 2737 2738 if (SLoc.isFileID()) { 2739 Out << "\\lline=" 2740 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2741 << " col=" 2742 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc); 2743 } 2744 2745 if (isa<SwitchStmt>(T)) { 2746 const Stmt *Label = E.getDst()->getLabel(); 2747 2748 if (Label) { 2749 if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) { 2750 Out << "\\lcase "; 2751 LangOptions LO; // FIXME. 2752 if (C->getLHS()) 2753 C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO)); 2754 2755 if (const Stmt *RHS = C->getRHS()) { 2756 Out << " .. "; 2757 RHS->printPretty(Out, nullptr, PrintingPolicy(LO)); 2758 } 2759 2760 Out << ":"; 2761 } 2762 else { 2763 assert (isa<DefaultStmt>(Label)); 2764 Out << "\\ldefault:"; 2765 } 2766 } 2767 else 2768 Out << "\\l(implicit) default:"; 2769 } 2770 else if (isa<IndirectGotoStmt>(T)) { 2771 // FIXME 2772 } 2773 else { 2774 Out << "\\lCondition: "; 2775 if (*E.getSrc()->succ_begin() == E.getDst()) 2776 Out << "true"; 2777 else 2778 Out << "false"; 2779 } 2780 2781 Out << "\\l"; 2782 } 2783 2784 break; 2785 } 2786 2787 default: { 2788 const Stmt *S = Loc.castAs<StmtPoint>().getStmt(); 2789 assert(S != nullptr && "Expecting non-null Stmt"); 2790 2791 Out << S->getStmtClassName() << ' ' << (const void*) S << ' '; 2792 LangOptions LO; // FIXME. 2793 S->printPretty(Out, nullptr, PrintingPolicy(LO)); 2794 printLocation(Out, S->getLocStart()); 2795 2796 if (Loc.getAs<PreStmt>()) 2797 Out << "\\lPreStmt\\l;"; 2798 else if (Loc.getAs<PostLoad>()) 2799 Out << "\\lPostLoad\\l;"; 2800 else if (Loc.getAs<PostStore>()) 2801 Out << "\\lPostStore\\l"; 2802 else if (Loc.getAs<PostLValue>()) 2803 Out << "\\lPostLValue\\l"; 2804 2805 break; 2806 } 2807 } 2808 2809 ProgramStateRef state = N->getState(); 2810 Out << "\\|StateID: " << (const void*) state.get() 2811 << " NodeID: " << (const void*) N << "\\|"; 2812 2813 // Analysis stack backtrace. 2814 Out << "Location context stack (from current to outer):\\l"; 2815 const LocationContext *LC = Loc.getLocationContext(); 2816 unsigned Idx = 0; 2817 for (; LC; LC = LC->getParent(), ++Idx) { 2818 Out << Idx << ". (" << (const void *)LC << ") "; 2819 switch (LC->getKind()) { 2820 case LocationContext::StackFrame: 2821 if (const NamedDecl *D = dyn_cast<NamedDecl>(LC->getDecl())) 2822 Out << "Calling " << D->getQualifiedNameAsString(); 2823 else 2824 Out << "Calling anonymous code"; 2825 if (const Stmt *S = cast<StackFrameContext>(LC)->getCallSite()) { 2826 Out << " at "; 2827 printLocation2(Out, S->getLocStart()); 2828 } 2829 break; 2830 case LocationContext::Block: 2831 Out << "Invoking block"; 2832 if (const Decl *D = cast<BlockInvocationContext>(LC)->getBlockDecl()) { 2833 Out << " defined at "; 2834 printLocation2(Out, D->getLocStart()); 2835 } 2836 break; 2837 case LocationContext::Scope: 2838 Out << "Entering scope"; 2839 // FIXME: Add more info once ScopeContext is activated. 2840 break; 2841 } 2842 Out << "\\l"; 2843 } 2844 Out << "\\l"; 2845 2846 state->printDOT(Out); 2847 2848 Out << "\\l"; 2849 2850 if (const ProgramPointTag *tag = Loc.getTag()) { 2851 Out << "\\|Tag: " << tag->getTagDescription(); 2852 Out << "\\l"; 2853 } 2854 return Out.str(); 2855 } 2856 }; 2857 } // end llvm namespace 2858 #endif 2859 2860 void ExprEngine::ViewGraph(bool trim) { 2861 #ifndef NDEBUG 2862 if (trim) { 2863 std::vector<const ExplodedNode*> Src; 2864 2865 // Flush any outstanding reports to make sure we cover all the nodes. 2866 // This does not cause them to get displayed. 2867 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I) 2868 const_cast<BugType*>(*I)->FlushReports(BR); 2869 2870 // Iterate through the reports and get their nodes. 2871 for (BugReporter::EQClasses_iterator 2872 EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) { 2873 ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode()); 2874 if (N) Src.push_back(N); 2875 } 2876 2877 ViewGraph(Src); 2878 } 2879 else { 2880 GraphPrintCheckerState = this; 2881 GraphPrintSourceManager = &getContext().getSourceManager(); 2882 2883 llvm::ViewGraph(*G.roots_begin(), "ExprEngine"); 2884 2885 GraphPrintCheckerState = nullptr; 2886 GraphPrintSourceManager = nullptr; 2887 } 2888 #endif 2889 } 2890 2891 void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) { 2892 #ifndef NDEBUG 2893 GraphPrintCheckerState = this; 2894 GraphPrintSourceManager = &getContext().getSourceManager(); 2895 2896 std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes)); 2897 2898 if (!TrimmedG.get()) 2899 llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n"; 2900 else 2901 llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine"); 2902 2903 GraphPrintCheckerState = nullptr; 2904 GraphPrintSourceManager = nullptr; 2905 #endif 2906 } 2907