1 //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-= 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a meta-engine for path-sensitive dataflow analysis that 11 // is built on GREngine, but provides the boilerplate to execute transfer 12 // functions and build the ExplodedGraph at the expression level. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 17 #include "PrettyStackTraceLocationContext.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/AST/ParentMap.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/AST/StmtObjC.h" 22 #include "clang/Basic/Builtins.h" 23 #include "clang/Basic/PrettyStackTrace.h" 24 #include "clang/Basic/SourceManager.h" 25 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 26 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 29 #include "llvm/ADT/ImmutableList.h" 30 #include "llvm/ADT/Statistic.h" 31 #include "llvm/Support/raw_ostream.h" 32 33 #ifndef NDEBUG 34 #include "llvm/Support/GraphWriter.h" 35 #endif 36 37 using namespace clang; 38 using namespace ento; 39 using llvm::APSInt; 40 41 #define DEBUG_TYPE "ExprEngine" 42 43 STATISTIC(NumRemoveDeadBindings, 44 "The # of times RemoveDeadBindings is called"); 45 STATISTIC(NumMaxBlockCountReached, 46 "The # of aborted paths due to reaching the maximum block count in " 47 "a top level function"); 48 STATISTIC(NumMaxBlockCountReachedInInlined, 49 "The # of aborted paths due to reaching the maximum block count in " 50 "an inlined function"); 51 STATISTIC(NumTimesRetriedWithoutInlining, 52 "The # of times we re-evaluated a call without inlining"); 53 54 typedef std::pair<const CXXBindTemporaryExpr *, const StackFrameContext *> 55 CXXBindTemporaryContext; 56 57 // Keeps track of whether CXXBindTemporaryExpr nodes have been evaluated. 58 // The StackFrameContext assures that nested calls due to inlined recursive 59 // functions do not interfere. 60 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedTemporariesSet, 61 llvm::ImmutableSet<CXXBindTemporaryContext>) 62 63 //===----------------------------------------------------------------------===// 64 // Engine construction and deletion. 65 //===----------------------------------------------------------------------===// 66 67 static const char* TagProviderName = "ExprEngine"; 68 69 ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled, 70 SetOfConstDecls *VisitedCalleesIn, 71 FunctionSummariesTy *FS, 72 InliningModes HowToInlineIn) 73 : AMgr(mgr), 74 AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()), 75 Engine(*this, FS), 76 G(Engine.getGraph()), 77 StateMgr(getContext(), mgr.getStoreManagerCreator(), 78 mgr.getConstraintManagerCreator(), G.getAllocator(), 79 this), 80 SymMgr(StateMgr.getSymbolManager()), 81 svalBuilder(StateMgr.getSValBuilder()), 82 currStmtIdx(0), currBldrCtx(nullptr), 83 ObjCNoRet(mgr.getASTContext()), 84 ObjCGCEnabled(gcEnabled), BR(mgr, *this), 85 VisitedCallees(VisitedCalleesIn), 86 HowToInline(HowToInlineIn) 87 { 88 unsigned TrimInterval = mgr.options.getGraphTrimInterval(); 89 if (TrimInterval != 0) { 90 // Enable eager node reclaimation when constructing the ExplodedGraph. 91 G.enableNodeReclamation(TrimInterval); 92 } 93 } 94 95 ExprEngine::~ExprEngine() { 96 BR.FlushReports(); 97 } 98 99 //===----------------------------------------------------------------------===// 100 // Utility methods. 101 //===----------------------------------------------------------------------===// 102 103 ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) { 104 ProgramStateRef state = StateMgr.getInitialState(InitLoc); 105 const Decl *D = InitLoc->getDecl(); 106 107 // Preconditions. 108 // FIXME: It would be nice if we had a more general mechanism to add 109 // such preconditions. Some day. 110 do { 111 112 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 113 // Precondition: the first argument of 'main' is an integer guaranteed 114 // to be > 0. 115 const IdentifierInfo *II = FD->getIdentifier(); 116 if (!II || !(II->getName() == "main" && FD->getNumParams() > 0)) 117 break; 118 119 const ParmVarDecl *PD = FD->getParamDecl(0); 120 QualType T = PD->getType(); 121 const BuiltinType *BT = dyn_cast<BuiltinType>(T); 122 if (!BT || !BT->isInteger()) 123 break; 124 125 const MemRegion *R = state->getRegion(PD, InitLoc); 126 if (!R) 127 break; 128 129 SVal V = state->getSVal(loc::MemRegionVal(R)); 130 SVal Constraint_untested = evalBinOp(state, BO_GT, V, 131 svalBuilder.makeZeroVal(T), 132 svalBuilder.getConditionType()); 133 134 Optional<DefinedOrUnknownSVal> Constraint = 135 Constraint_untested.getAs<DefinedOrUnknownSVal>(); 136 137 if (!Constraint) 138 break; 139 140 if (ProgramStateRef newState = state->assume(*Constraint, true)) 141 state = newState; 142 } 143 break; 144 } 145 while (0); 146 147 if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 148 // Precondition: 'self' is always non-null upon entry to an Objective-C 149 // method. 150 const ImplicitParamDecl *SelfD = MD->getSelfDecl(); 151 const MemRegion *R = state->getRegion(SelfD, InitLoc); 152 SVal V = state->getSVal(loc::MemRegionVal(R)); 153 154 if (Optional<Loc> LV = V.getAs<Loc>()) { 155 // Assume that the pointer value in 'self' is non-null. 156 state = state->assume(*LV, true); 157 assert(state && "'self' cannot be null"); 158 } 159 } 160 161 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) { 162 if (!MD->isStatic()) { 163 // Precondition: 'this' is always non-null upon entry to the 164 // top-level function. This is our starting assumption for 165 // analyzing an "open" program. 166 const StackFrameContext *SFC = InitLoc->getCurrentStackFrame(); 167 if (SFC->getParent() == nullptr) { 168 loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC); 169 SVal V = state->getSVal(L); 170 if (Optional<Loc> LV = V.getAs<Loc>()) { 171 state = state->assume(*LV, true); 172 assert(state && "'this' cannot be null"); 173 } 174 } 175 } 176 } 177 178 return state; 179 } 180 181 ProgramStateRef 182 ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State, 183 const LocationContext *LC, 184 const Expr *Ex, 185 const Expr *Result) { 186 SVal V = State->getSVal(Ex, LC); 187 if (!Result) { 188 // If we don't have an explicit result expression, we're in "if needed" 189 // mode. Only create a region if the current value is a NonLoc. 190 if (!V.getAs<NonLoc>()) 191 return State; 192 Result = Ex; 193 } else { 194 // We need to create a region no matter what. For sanity, make sure we don't 195 // try to stuff a Loc into a non-pointer temporary region. 196 assert(!V.getAs<Loc>() || Loc::isLocType(Result->getType()) || 197 Result->getType()->isMemberPointerType()); 198 } 199 200 ProgramStateManager &StateMgr = State->getStateManager(); 201 MemRegionManager &MRMgr = StateMgr.getRegionManager(); 202 StoreManager &StoreMgr = StateMgr.getStoreManager(); 203 204 // We need to be careful about treating a derived type's value as 205 // bindings for a base type. Unless we're creating a temporary pointer region, 206 // start by stripping and recording base casts. 207 SmallVector<const CastExpr *, 4> Casts; 208 const Expr *Inner = Ex->IgnoreParens(); 209 if (!Loc::isLocType(Result->getType())) { 210 while (const CastExpr *CE = dyn_cast<CastExpr>(Inner)) { 211 if (CE->getCastKind() == CK_DerivedToBase || 212 CE->getCastKind() == CK_UncheckedDerivedToBase) 213 Casts.push_back(CE); 214 else if (CE->getCastKind() != CK_NoOp) 215 break; 216 217 Inner = CE->getSubExpr()->IgnoreParens(); 218 } 219 } 220 221 // Create a temporary object region for the inner expression (which may have 222 // a more derived type) and bind the value into it. 223 const TypedValueRegion *TR = nullptr; 224 if (const MaterializeTemporaryExpr *MT = 225 dyn_cast<MaterializeTemporaryExpr>(Result)) { 226 StorageDuration SD = MT->getStorageDuration(); 227 // If this object is bound to a reference with static storage duration, we 228 // put it in a different region to prevent "address leakage" warnings. 229 if (SD == SD_Static || SD == SD_Thread) 230 TR = MRMgr.getCXXStaticTempObjectRegion(Inner); 231 } 232 if (!TR) 233 TR = MRMgr.getCXXTempObjectRegion(Inner, LC); 234 235 SVal Reg = loc::MemRegionVal(TR); 236 237 if (V.isUnknown()) 238 V = getSValBuilder().conjureSymbolVal(Result, LC, TR->getValueType(), 239 currBldrCtx->blockCount()); 240 State = State->bindLoc(Reg, V); 241 242 // Re-apply the casts (from innermost to outermost) for type sanity. 243 for (SmallVectorImpl<const CastExpr *>::reverse_iterator I = Casts.rbegin(), 244 E = Casts.rend(); 245 I != E; ++I) { 246 Reg = StoreMgr.evalDerivedToBase(Reg, *I); 247 } 248 249 State = State->BindExpr(Result, LC, Reg); 250 return State; 251 } 252 253 //===----------------------------------------------------------------------===// 254 // Top-level transfer function logic (Dispatcher). 255 //===----------------------------------------------------------------------===// 256 257 /// evalAssume - Called by ConstraintManager. Used to call checker-specific 258 /// logic for handling assumptions on symbolic values. 259 ProgramStateRef ExprEngine::processAssume(ProgramStateRef state, 260 SVal cond, bool assumption) { 261 return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption); 262 } 263 264 bool ExprEngine::wantsRegionChangeUpdate(ProgramStateRef state) { 265 return getCheckerManager().wantsRegionChangeUpdate(state); 266 } 267 268 ProgramStateRef 269 ExprEngine::processRegionChanges(ProgramStateRef state, 270 const InvalidatedSymbols *invalidated, 271 ArrayRef<const MemRegion *> Explicits, 272 ArrayRef<const MemRegion *> Regions, 273 const CallEvent *Call) { 274 return getCheckerManager().runCheckersForRegionChanges(state, invalidated, 275 Explicits, Regions, Call); 276 } 277 278 void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State, 279 const char *NL, const char *Sep) { 280 getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep); 281 } 282 283 void ExprEngine::processEndWorklist(bool hasWorkRemaining) { 284 getCheckerManager().runCheckersForEndAnalysis(G, BR, *this); 285 } 286 287 void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred, 288 unsigned StmtIdx, NodeBuilderContext *Ctx) { 289 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 290 currStmtIdx = StmtIdx; 291 currBldrCtx = Ctx; 292 293 switch (E.getKind()) { 294 case CFGElement::Statement: 295 ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred); 296 return; 297 case CFGElement::Initializer: 298 ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred); 299 return; 300 case CFGElement::NewAllocator: 301 ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(), 302 Pred); 303 return; 304 case CFGElement::AutomaticObjectDtor: 305 case CFGElement::DeleteDtor: 306 case CFGElement::BaseDtor: 307 case CFGElement::MemberDtor: 308 case CFGElement::TemporaryDtor: 309 ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred); 310 return; 311 } 312 } 313 314 static bool shouldRemoveDeadBindings(AnalysisManager &AMgr, 315 const CFGStmt S, 316 const ExplodedNode *Pred, 317 const LocationContext *LC) { 318 319 // Are we never purging state values? 320 if (AMgr.options.AnalysisPurgeOpt == PurgeNone) 321 return false; 322 323 // Is this the beginning of a basic block? 324 if (Pred->getLocation().getAs<BlockEntrance>()) 325 return true; 326 327 // Is this on a non-expression? 328 if (!isa<Expr>(S.getStmt())) 329 return true; 330 331 // Run before processing a call. 332 if (CallEvent::isCallStmt(S.getStmt())) 333 return true; 334 335 // Is this an expression that is consumed by another expression? If so, 336 // postpone cleaning out the state. 337 ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap(); 338 return !PM.isConsumedExpr(cast<Expr>(S.getStmt())); 339 } 340 341 void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out, 342 const Stmt *ReferenceStmt, 343 const LocationContext *LC, 344 const Stmt *DiagnosticStmt, 345 ProgramPoint::Kind K) { 346 assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind || 347 ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt)) 348 && "PostStmt is not generally supported by the SymbolReaper yet"); 349 assert(LC && "Must pass the current (or expiring) LocationContext"); 350 351 if (!DiagnosticStmt) { 352 DiagnosticStmt = ReferenceStmt; 353 assert(DiagnosticStmt && "Required for clearing a LocationContext"); 354 } 355 356 NumRemoveDeadBindings++; 357 ProgramStateRef CleanedState = Pred->getState(); 358 359 // LC is the location context being destroyed, but SymbolReaper wants a 360 // location context that is still live. (If this is the top-level stack 361 // frame, this will be null.) 362 if (!ReferenceStmt) { 363 assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind && 364 "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext"); 365 LC = LC->getParent(); 366 } 367 368 const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr; 369 SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager()); 370 371 getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper); 372 373 // Create a state in which dead bindings are removed from the environment 374 // and the store. TODO: The function should just return new env and store, 375 // not a new state. 376 CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper); 377 378 // Process any special transfer function for dead symbols. 379 // A tag to track convenience transitions, which can be removed at cleanup. 380 static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node"); 381 if (!SymReaper.hasDeadSymbols()) { 382 // Generate a CleanedNode that has the environment and store cleaned 383 // up. Since no symbols are dead, we can optimize and not clean out 384 // the constraint manager. 385 StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx); 386 Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K); 387 388 } else { 389 // Call checkers with the non-cleaned state so that they could query the 390 // values of the soon to be dead symbols. 391 ExplodedNodeSet CheckedSet; 392 getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper, 393 DiagnosticStmt, *this, K); 394 395 // For each node in CheckedSet, generate CleanedNodes that have the 396 // environment, the store, and the constraints cleaned up but have the 397 // user-supplied states as the predecessors. 398 StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx); 399 for (ExplodedNodeSet::const_iterator 400 I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { 401 ProgramStateRef CheckerState = (*I)->getState(); 402 403 // The constraint manager has not been cleaned up yet, so clean up now. 404 CheckerState = getConstraintManager().removeDeadBindings(CheckerState, 405 SymReaper); 406 407 assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) && 408 "Checkers are not allowed to modify the Environment as a part of " 409 "checkDeadSymbols processing."); 410 assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) && 411 "Checkers are not allowed to modify the Store as a part of " 412 "checkDeadSymbols processing."); 413 414 // Create a state based on CleanedState with CheckerState GDM and 415 // generate a transition to that state. 416 ProgramStateRef CleanedCheckerSt = 417 StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState); 418 Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K); 419 } 420 } 421 } 422 423 void ExprEngine::ProcessStmt(const CFGStmt S, 424 ExplodedNode *Pred) { 425 // Reclaim any unnecessary nodes in the ExplodedGraph. 426 G.reclaimRecentlyAllocatedNodes(); 427 428 const Stmt *currStmt = S.getStmt(); 429 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 430 currStmt->getLocStart(), 431 "Error evaluating statement"); 432 433 // Remove dead bindings and symbols. 434 ExplodedNodeSet CleanedStates; 435 if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){ 436 removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext()); 437 } else 438 CleanedStates.Add(Pred); 439 440 // Visit the statement. 441 ExplodedNodeSet Dst; 442 for (ExplodedNodeSet::iterator I = CleanedStates.begin(), 443 E = CleanedStates.end(); I != E; ++I) { 444 ExplodedNodeSet DstI; 445 // Visit the statement. 446 Visit(currStmt, *I, DstI); 447 Dst.insert(DstI); 448 } 449 450 // Enqueue the new nodes onto the work list. 451 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 452 } 453 454 void ExprEngine::ProcessInitializer(const CFGInitializer Init, 455 ExplodedNode *Pred) { 456 const CXXCtorInitializer *BMI = Init.getInitializer(); 457 458 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 459 BMI->getSourceLocation(), 460 "Error evaluating initializer"); 461 462 // We don't clean up dead bindings here. 463 const StackFrameContext *stackFrame = 464 cast<StackFrameContext>(Pred->getLocationContext()); 465 const CXXConstructorDecl *decl = 466 cast<CXXConstructorDecl>(stackFrame->getDecl()); 467 468 ProgramStateRef State = Pred->getState(); 469 SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame)); 470 471 ExplodedNodeSet Tmp(Pred); 472 SVal FieldLoc; 473 474 // Evaluate the initializer, if necessary 475 if (BMI->isAnyMemberInitializer()) { 476 // Constructors build the object directly in the field, 477 // but non-objects must be copied in from the initializer. 478 const Expr *Init = BMI->getInit()->IgnoreImplicit(); 479 if (!isa<CXXConstructExpr>(Init)) { 480 const ValueDecl *Field; 481 if (BMI->isIndirectMemberInitializer()) { 482 Field = BMI->getIndirectMember(); 483 FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal); 484 } else { 485 Field = BMI->getMember(); 486 FieldLoc = State->getLValue(BMI->getMember(), thisVal); 487 } 488 489 SVal InitVal; 490 if (BMI->getNumArrayIndices() > 0) { 491 // Handle arrays of trivial type. We can represent this with a 492 // primitive load/copy from the base array region. 493 const ArraySubscriptExpr *ASE; 494 while ((ASE = dyn_cast<ArraySubscriptExpr>(Init))) 495 Init = ASE->getBase()->IgnoreImplicit(); 496 497 SVal LValue = State->getSVal(Init, stackFrame); 498 if (Optional<Loc> LValueLoc = LValue.getAs<Loc>()) 499 InitVal = State->getSVal(*LValueLoc); 500 501 // If we fail to get the value for some reason, use a symbolic value. 502 if (InitVal.isUnknownOrUndef()) { 503 SValBuilder &SVB = getSValBuilder(); 504 InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame, 505 Field->getType(), 506 currBldrCtx->blockCount()); 507 } 508 } else { 509 InitVal = State->getSVal(BMI->getInit(), stackFrame); 510 } 511 512 assert(Tmp.size() == 1 && "have not generated any new nodes yet"); 513 assert(*Tmp.begin() == Pred && "have not generated any new nodes yet"); 514 Tmp.clear(); 515 516 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 517 evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP); 518 } 519 } else { 520 assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer()); 521 // We already did all the work when visiting the CXXConstructExpr. 522 } 523 524 // Construct PostInitializer nodes whether the state changed or not, 525 // so that the diagnostics don't get confused. 526 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 527 ExplodedNodeSet Dst; 528 NodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 529 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) { 530 ExplodedNode *N = *I; 531 Bldr.generateNode(PP, N->getState(), N); 532 } 533 534 // Enqueue the new nodes onto the work list. 535 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 536 } 537 538 void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D, 539 ExplodedNode *Pred) { 540 ExplodedNodeSet Dst; 541 switch (D.getKind()) { 542 case CFGElement::AutomaticObjectDtor: 543 ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst); 544 break; 545 case CFGElement::BaseDtor: 546 ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst); 547 break; 548 case CFGElement::MemberDtor: 549 ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst); 550 break; 551 case CFGElement::TemporaryDtor: 552 ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst); 553 break; 554 case CFGElement::DeleteDtor: 555 ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst); 556 break; 557 default: 558 llvm_unreachable("Unexpected dtor kind."); 559 } 560 561 // Enqueue the new nodes onto the work list. 562 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 563 } 564 565 void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE, 566 ExplodedNode *Pred) { 567 ExplodedNodeSet Dst; 568 AnalysisManager &AMgr = getAnalysisManager(); 569 AnalyzerOptions &Opts = AMgr.options; 570 // TODO: We're not evaluating allocators for all cases just yet as 571 // we're not handling the return value correctly, which causes false 572 // positives when the alpha.cplusplus.NewDeleteLeaks check is on. 573 if (Opts.mayInlineCXXAllocator()) 574 VisitCXXNewAllocatorCall(NE, Pred, Dst); 575 else { 576 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 577 const LocationContext *LCtx = Pred->getLocationContext(); 578 PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx); 579 Bldr.generateNode(PP, Pred->getState(), Pred); 580 } 581 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 582 } 583 584 void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor, 585 ExplodedNode *Pred, 586 ExplodedNodeSet &Dst) { 587 const VarDecl *varDecl = Dtor.getVarDecl(); 588 QualType varType = varDecl->getType(); 589 590 ProgramStateRef state = Pred->getState(); 591 SVal dest = state->getLValue(varDecl, Pred->getLocationContext()); 592 const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion(); 593 594 if (const ReferenceType *refType = varType->getAs<ReferenceType>()) { 595 varType = refType->getPointeeType(); 596 Region = state->getSVal(Region).getAsRegion(); 597 } 598 599 VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false, 600 Pred, Dst); 601 } 602 603 void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor, 604 ExplodedNode *Pred, 605 ExplodedNodeSet &Dst) { 606 ProgramStateRef State = Pred->getState(); 607 const LocationContext *LCtx = Pred->getLocationContext(); 608 const CXXDeleteExpr *DE = Dtor.getDeleteExpr(); 609 const Stmt *Arg = DE->getArgument(); 610 SVal ArgVal = State->getSVal(Arg, LCtx); 611 612 // If the argument to delete is known to be a null value, 613 // don't run destructor. 614 if (State->isNull(ArgVal).isConstrainedTrue()) { 615 QualType DTy = DE->getDestroyedType(); 616 QualType BTy = getContext().getBaseElementType(DTy); 617 const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl(); 618 const CXXDestructorDecl *Dtor = RD->getDestructor(); 619 620 PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx); 621 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 622 Bldr.generateNode(PP, Pred->getState(), Pred); 623 return; 624 } 625 626 VisitCXXDestructor(DE->getDestroyedType(), 627 ArgVal.getAsRegion(), 628 DE, /*IsBase=*/ false, 629 Pred, Dst); 630 } 631 632 void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D, 633 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 634 const LocationContext *LCtx = Pred->getLocationContext(); 635 636 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 637 Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor, 638 LCtx->getCurrentStackFrame()); 639 SVal ThisVal = Pred->getState()->getSVal(ThisPtr); 640 641 // Create the base object region. 642 const CXXBaseSpecifier *Base = D.getBaseSpecifier(); 643 QualType BaseTy = Base->getType(); 644 SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy, 645 Base->isVirtual()); 646 647 VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(), 648 CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst); 649 } 650 651 void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D, 652 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 653 const FieldDecl *Member = D.getFieldDecl(); 654 ProgramStateRef State = Pred->getState(); 655 const LocationContext *LCtx = Pred->getLocationContext(); 656 657 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 658 Loc ThisVal = getSValBuilder().getCXXThis(CurDtor, 659 LCtx->getCurrentStackFrame()); 660 SVal FieldVal = 661 State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>()); 662 663 VisitCXXDestructor(Member->getType(), 664 FieldVal.castAs<loc::MemRegionVal>().getRegion(), 665 CurDtor->getBody(), /*IsBase=*/false, Pred, Dst); 666 } 667 668 void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D, 669 ExplodedNode *Pred, 670 ExplodedNodeSet &Dst) { 671 ExplodedNodeSet CleanDtorState; 672 StmtNodeBuilder StmtBldr(Pred, CleanDtorState, *currBldrCtx); 673 ProgramStateRef State = Pred->getState(); 674 if (State->contains<InitializedTemporariesSet>( 675 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame()))) { 676 // FIXME: Currently we insert temporary destructors for default parameters, 677 // but we don't insert the constructors. 678 State = State->remove<InitializedTemporariesSet>( 679 std::make_pair(D.getBindTemporaryExpr(), Pred->getStackFrame())); 680 } 681 StmtBldr.generateNode(D.getBindTemporaryExpr(), Pred, State); 682 683 QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType(); 684 // FIXME: Currently CleanDtorState can be empty here due to temporaries being 685 // bound to default parameters. 686 assert(CleanDtorState.size() <= 1); 687 ExplodedNode *CleanPred = 688 CleanDtorState.empty() ? Pred : *CleanDtorState.begin(); 689 // FIXME: Inlining of temporary destructors is not supported yet anyway, so 690 // we just put a NULL region for now. This will need to be changed later. 691 VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(), 692 /*IsBase=*/false, CleanPred, Dst); 693 } 694 695 void ExprEngine::processCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 696 NodeBuilderContext &BldCtx, 697 ExplodedNode *Pred, 698 ExplodedNodeSet &Dst, 699 const CFGBlock *DstT, 700 const CFGBlock *DstF) { 701 BranchNodeBuilder TempDtorBuilder(Pred, Dst, BldCtx, DstT, DstF); 702 if (Pred->getState()->contains<InitializedTemporariesSet>( 703 std::make_pair(BTE, Pred->getStackFrame()))) { 704 TempDtorBuilder.markInfeasible(false); 705 TempDtorBuilder.generateNode(Pred->getState(), true, Pred); 706 } else { 707 TempDtorBuilder.markInfeasible(true); 708 TempDtorBuilder.generateNode(Pred->getState(), false, Pred); 709 } 710 } 711 712 void ExprEngine::VisitCXXBindTemporaryExpr(const CXXBindTemporaryExpr *BTE, 713 ExplodedNodeSet &PreVisit, 714 ExplodedNodeSet &Dst) { 715 if (!getAnalysisManager().options.includeTemporaryDtorsInCFG()) { 716 // In case we don't have temporary destructors in the CFG, do not mark 717 // the initialization - we would otherwise never clean it up. 718 Dst = PreVisit; 719 return; 720 } 721 StmtNodeBuilder StmtBldr(PreVisit, Dst, *currBldrCtx); 722 for (ExplodedNode *Node : PreVisit) { 723 ProgramStateRef State = Node->getState(); 724 725 if (!State->contains<InitializedTemporariesSet>( 726 std::make_pair(BTE, Node->getStackFrame()))) { 727 // FIXME: Currently the state might already contain the marker due to 728 // incorrect handling of temporaries bound to default parameters; for 729 // those, we currently skip the CXXBindTemporaryExpr but rely on adding 730 // temporary destructor nodes. 731 State = State->add<InitializedTemporariesSet>( 732 std::make_pair(BTE, Node->getStackFrame())); 733 } 734 StmtBldr.generateNode(BTE, Node, State); 735 } 736 } 737 738 void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred, 739 ExplodedNodeSet &DstTop) { 740 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 741 S->getLocStart(), 742 "Error evaluating statement"); 743 ExplodedNodeSet Dst; 744 StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx); 745 746 assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens()); 747 748 switch (S->getStmtClass()) { 749 // C++ and ARC stuff we don't support yet. 750 case Expr::ObjCIndirectCopyRestoreExprClass: 751 case Stmt::CXXDependentScopeMemberExprClass: 752 case Stmt::CXXTryStmtClass: 753 case Stmt::CXXTypeidExprClass: 754 case Stmt::CXXUuidofExprClass: 755 case Stmt::CXXFoldExprClass: 756 case Stmt::MSPropertyRefExprClass: 757 case Stmt::CXXUnresolvedConstructExprClass: 758 case Stmt::DependentScopeDeclRefExprClass: 759 case Stmt::ArrayTypeTraitExprClass: 760 case Stmt::ExpressionTraitExprClass: 761 case Stmt::UnresolvedLookupExprClass: 762 case Stmt::UnresolvedMemberExprClass: 763 case Stmt::TypoExprClass: 764 case Stmt::CXXNoexceptExprClass: 765 case Stmt::PackExpansionExprClass: 766 case Stmt::SubstNonTypeTemplateParmPackExprClass: 767 case Stmt::FunctionParmPackExprClass: 768 case Stmt::CoroutineBodyStmtClass: 769 case Stmt::CoawaitExprClass: 770 case Stmt::CoreturnStmtClass: 771 case Stmt::CoyieldExprClass: 772 case Stmt::SEHTryStmtClass: 773 case Stmt::SEHExceptStmtClass: 774 case Stmt::SEHLeaveStmtClass: 775 case Stmt::SEHFinallyStmtClass: { 776 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 777 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 778 break; 779 } 780 781 case Stmt::ParenExprClass: 782 llvm_unreachable("ParenExprs already handled."); 783 case Stmt::GenericSelectionExprClass: 784 llvm_unreachable("GenericSelectionExprs already handled."); 785 // Cases that should never be evaluated simply because they shouldn't 786 // appear in the CFG. 787 case Stmt::BreakStmtClass: 788 case Stmt::CaseStmtClass: 789 case Stmt::CompoundStmtClass: 790 case Stmt::ContinueStmtClass: 791 case Stmt::CXXForRangeStmtClass: 792 case Stmt::DefaultStmtClass: 793 case Stmt::DoStmtClass: 794 case Stmt::ForStmtClass: 795 case Stmt::GotoStmtClass: 796 case Stmt::IfStmtClass: 797 case Stmt::IndirectGotoStmtClass: 798 case Stmt::LabelStmtClass: 799 case Stmt::NoStmtClass: 800 case Stmt::NullStmtClass: 801 case Stmt::SwitchStmtClass: 802 case Stmt::WhileStmtClass: 803 case Expr::MSDependentExistsStmtClass: 804 case Stmt::CapturedStmtClass: 805 case Stmt::OMPParallelDirectiveClass: 806 case Stmt::OMPSimdDirectiveClass: 807 case Stmt::OMPForDirectiveClass: 808 case Stmt::OMPForSimdDirectiveClass: 809 case Stmt::OMPSectionsDirectiveClass: 810 case Stmt::OMPSectionDirectiveClass: 811 case Stmt::OMPSingleDirectiveClass: 812 case Stmt::OMPMasterDirectiveClass: 813 case Stmt::OMPCriticalDirectiveClass: 814 case Stmt::OMPParallelForDirectiveClass: 815 case Stmt::OMPParallelForSimdDirectiveClass: 816 case Stmt::OMPParallelSectionsDirectiveClass: 817 case Stmt::OMPTaskDirectiveClass: 818 case Stmt::OMPTaskyieldDirectiveClass: 819 case Stmt::OMPBarrierDirectiveClass: 820 case Stmt::OMPTaskwaitDirectiveClass: 821 case Stmt::OMPTaskgroupDirectiveClass: 822 case Stmt::OMPFlushDirectiveClass: 823 case Stmt::OMPOrderedDirectiveClass: 824 case Stmt::OMPAtomicDirectiveClass: 825 case Stmt::OMPTargetDirectiveClass: 826 case Stmt::OMPTargetDataDirectiveClass: 827 case Stmt::OMPTeamsDirectiveClass: 828 case Stmt::OMPCancellationPointDirectiveClass: 829 case Stmt::OMPCancelDirectiveClass: 830 llvm_unreachable("Stmt should not be in analyzer evaluation loop"); 831 832 case Stmt::ObjCSubscriptRefExprClass: 833 case Stmt::ObjCPropertyRefExprClass: 834 llvm_unreachable("These are handled by PseudoObjectExpr"); 835 836 case Stmt::GNUNullExprClass: { 837 // GNU __null is a pointer-width integer, not an actual pointer. 838 ProgramStateRef state = Pred->getState(); 839 state = state->BindExpr(S, Pred->getLocationContext(), 840 svalBuilder.makeIntValWithPtrWidth(0, false)); 841 Bldr.generateNode(S, Pred, state); 842 break; 843 } 844 845 case Stmt::ObjCAtSynchronizedStmtClass: 846 Bldr.takeNodes(Pred); 847 VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst); 848 Bldr.addNodes(Dst); 849 break; 850 851 case Stmt::ExprWithCleanupsClass: 852 // Handled due to fully linearised CFG. 853 break; 854 855 case Stmt::CXXBindTemporaryExprClass: { 856 Bldr.takeNodes(Pred); 857 ExplodedNodeSet PreVisit; 858 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 859 ExplodedNodeSet Next; 860 VisitCXXBindTemporaryExpr(cast<CXXBindTemporaryExpr>(S), PreVisit, Next); 861 getCheckerManager().runCheckersForPostStmt(Dst, Next, S, *this); 862 Bldr.addNodes(Dst); 863 break; 864 } 865 866 // Cases not handled yet; but will handle some day. 867 case Stmt::DesignatedInitExprClass: 868 case Stmt::DesignatedInitUpdateExprClass: 869 case Stmt::ExtVectorElementExprClass: 870 case Stmt::ImaginaryLiteralClass: 871 case Stmt::ObjCAtCatchStmtClass: 872 case Stmt::ObjCAtFinallyStmtClass: 873 case Stmt::ObjCAtTryStmtClass: 874 case Stmt::ObjCAutoreleasePoolStmtClass: 875 case Stmt::ObjCEncodeExprClass: 876 case Stmt::ObjCIsaExprClass: 877 case Stmt::ObjCProtocolExprClass: 878 case Stmt::ObjCSelectorExprClass: 879 case Stmt::ParenListExprClass: 880 case Stmt::ShuffleVectorExprClass: 881 case Stmt::ConvertVectorExprClass: 882 case Stmt::VAArgExprClass: 883 case Stmt::CUDAKernelCallExprClass: 884 case Stmt::OpaqueValueExprClass: 885 case Stmt::AsTypeExprClass: 886 case Stmt::AtomicExprClass: 887 // Fall through. 888 889 // Cases we intentionally don't evaluate, since they don't need 890 // to be explicitly evaluated. 891 case Stmt::PredefinedExprClass: 892 case Stmt::AddrLabelExprClass: 893 case Stmt::AttributedStmtClass: 894 case Stmt::IntegerLiteralClass: 895 case Stmt::CharacterLiteralClass: 896 case Stmt::ImplicitValueInitExprClass: 897 case Stmt::CXXScalarValueInitExprClass: 898 case Stmt::CXXBoolLiteralExprClass: 899 case Stmt::ObjCBoolLiteralExprClass: 900 case Stmt::FloatingLiteralClass: 901 case Stmt::NoInitExprClass: 902 case Stmt::SizeOfPackExprClass: 903 case Stmt::StringLiteralClass: 904 case Stmt::ObjCStringLiteralClass: 905 case Stmt::CXXPseudoDestructorExprClass: 906 case Stmt::SubstNonTypeTemplateParmExprClass: 907 case Stmt::CXXNullPtrLiteralExprClass: 908 case Stmt::OMPArraySectionExprClass: 909 case Stmt::TypeTraitExprClass: { 910 Bldr.takeNodes(Pred); 911 ExplodedNodeSet preVisit; 912 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 913 getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this); 914 Bldr.addNodes(Dst); 915 break; 916 } 917 918 case Stmt::CXXDefaultArgExprClass: 919 case Stmt::CXXDefaultInitExprClass: { 920 Bldr.takeNodes(Pred); 921 ExplodedNodeSet PreVisit; 922 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 923 924 ExplodedNodeSet Tmp; 925 StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx); 926 927 const Expr *ArgE; 928 if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S)) 929 ArgE = DefE->getExpr(); 930 else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S)) 931 ArgE = DefE->getExpr(); 932 else 933 llvm_unreachable("unknown constant wrapper kind"); 934 935 bool IsTemporary = false; 936 if (const MaterializeTemporaryExpr *MTE = 937 dyn_cast<MaterializeTemporaryExpr>(ArgE)) { 938 ArgE = MTE->GetTemporaryExpr(); 939 IsTemporary = true; 940 } 941 942 Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE); 943 if (!ConstantVal) 944 ConstantVal = UnknownVal(); 945 946 const LocationContext *LCtx = Pred->getLocationContext(); 947 for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end(); 948 I != E; ++I) { 949 ProgramStateRef State = (*I)->getState(); 950 State = State->BindExpr(S, LCtx, *ConstantVal); 951 if (IsTemporary) 952 State = createTemporaryRegionIfNeeded(State, LCtx, 953 cast<Expr>(S), 954 cast<Expr>(S)); 955 Bldr2.generateNode(S, *I, State); 956 } 957 958 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 959 Bldr.addNodes(Dst); 960 break; 961 } 962 963 // Cases we evaluate as opaque expressions, conjuring a symbol. 964 case Stmt::CXXStdInitializerListExprClass: 965 case Expr::ObjCArrayLiteralClass: 966 case Expr::ObjCDictionaryLiteralClass: 967 case Expr::ObjCBoxedExprClass: { 968 Bldr.takeNodes(Pred); 969 970 ExplodedNodeSet preVisit; 971 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 972 973 ExplodedNodeSet Tmp; 974 StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx); 975 976 const Expr *Ex = cast<Expr>(S); 977 QualType resultType = Ex->getType(); 978 979 for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end(); 980 it != et; ++it) { 981 ExplodedNode *N = *it; 982 const LocationContext *LCtx = N->getLocationContext(); 983 SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx, 984 resultType, 985 currBldrCtx->blockCount()); 986 ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result); 987 Bldr2.generateNode(S, N, state); 988 } 989 990 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 991 Bldr.addNodes(Dst); 992 break; 993 } 994 995 case Stmt::ArraySubscriptExprClass: 996 Bldr.takeNodes(Pred); 997 VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst); 998 Bldr.addNodes(Dst); 999 break; 1000 1001 case Stmt::GCCAsmStmtClass: 1002 Bldr.takeNodes(Pred); 1003 VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst); 1004 Bldr.addNodes(Dst); 1005 break; 1006 1007 case Stmt::MSAsmStmtClass: 1008 Bldr.takeNodes(Pred); 1009 VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst); 1010 Bldr.addNodes(Dst); 1011 break; 1012 1013 case Stmt::BlockExprClass: 1014 Bldr.takeNodes(Pred); 1015 VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst); 1016 Bldr.addNodes(Dst); 1017 break; 1018 1019 case Stmt::LambdaExprClass: 1020 if (AMgr.options.shouldInlineLambdas()) { 1021 Bldr.takeNodes(Pred); 1022 VisitLambdaExpr(cast<LambdaExpr>(S), Pred, Dst); 1023 Bldr.addNodes(Dst); 1024 } else { 1025 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 1026 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 1027 } 1028 break; 1029 1030 case Stmt::BinaryOperatorClass: { 1031 const BinaryOperator* B = cast<BinaryOperator>(S); 1032 if (B->isLogicalOp()) { 1033 Bldr.takeNodes(Pred); 1034 VisitLogicalExpr(B, Pred, Dst); 1035 Bldr.addNodes(Dst); 1036 break; 1037 } 1038 else if (B->getOpcode() == BO_Comma) { 1039 ProgramStateRef state = Pred->getState(); 1040 Bldr.generateNode(B, Pred, 1041 state->BindExpr(B, Pred->getLocationContext(), 1042 state->getSVal(B->getRHS(), 1043 Pred->getLocationContext()))); 1044 break; 1045 } 1046 1047 Bldr.takeNodes(Pred); 1048 1049 if (AMgr.options.eagerlyAssumeBinOpBifurcation && 1050 (B->isRelationalOp() || B->isEqualityOp())) { 1051 ExplodedNodeSet Tmp; 1052 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp); 1053 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S)); 1054 } 1055 else 1056 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1057 1058 Bldr.addNodes(Dst); 1059 break; 1060 } 1061 1062 case Stmt::CXXOperatorCallExprClass: { 1063 const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S); 1064 1065 // For instance method operators, make sure the 'this' argument has a 1066 // valid region. 1067 const Decl *Callee = OCE->getCalleeDecl(); 1068 if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) { 1069 if (MD->isInstance()) { 1070 ProgramStateRef State = Pred->getState(); 1071 const LocationContext *LCtx = Pred->getLocationContext(); 1072 ProgramStateRef NewState = 1073 createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0)); 1074 if (NewState != State) { 1075 Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr, 1076 ProgramPoint::PreStmtKind); 1077 // Did we cache out? 1078 if (!Pred) 1079 break; 1080 } 1081 } 1082 } 1083 // FALLTHROUGH 1084 } 1085 case Stmt::CallExprClass: 1086 case Stmt::CXXMemberCallExprClass: 1087 case Stmt::UserDefinedLiteralClass: { 1088 Bldr.takeNodes(Pred); 1089 VisitCallExpr(cast<CallExpr>(S), Pred, Dst); 1090 Bldr.addNodes(Dst); 1091 break; 1092 } 1093 1094 case Stmt::CXXCatchStmtClass: { 1095 Bldr.takeNodes(Pred); 1096 VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst); 1097 Bldr.addNodes(Dst); 1098 break; 1099 } 1100 1101 case Stmt::CXXTemporaryObjectExprClass: 1102 case Stmt::CXXConstructExprClass: { 1103 Bldr.takeNodes(Pred); 1104 VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst); 1105 Bldr.addNodes(Dst); 1106 break; 1107 } 1108 1109 case Stmt::CXXNewExprClass: { 1110 Bldr.takeNodes(Pred); 1111 ExplodedNodeSet PostVisit; 1112 VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit); 1113 getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this); 1114 Bldr.addNodes(Dst); 1115 break; 1116 } 1117 1118 case Stmt::CXXDeleteExprClass: { 1119 Bldr.takeNodes(Pred); 1120 ExplodedNodeSet PreVisit; 1121 const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S); 1122 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1123 1124 for (ExplodedNodeSet::iterator i = PreVisit.begin(), 1125 e = PreVisit.end(); i != e ; ++i) 1126 VisitCXXDeleteExpr(CDE, *i, Dst); 1127 1128 Bldr.addNodes(Dst); 1129 break; 1130 } 1131 // FIXME: ChooseExpr is really a constant. We need to fix 1132 // the CFG do not model them as explicit control-flow. 1133 1134 case Stmt::ChooseExprClass: { // __builtin_choose_expr 1135 Bldr.takeNodes(Pred); 1136 const ChooseExpr *C = cast<ChooseExpr>(S); 1137 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); 1138 Bldr.addNodes(Dst); 1139 break; 1140 } 1141 1142 case Stmt::CompoundAssignOperatorClass: 1143 Bldr.takeNodes(Pred); 1144 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1145 Bldr.addNodes(Dst); 1146 break; 1147 1148 case Stmt::CompoundLiteralExprClass: 1149 Bldr.takeNodes(Pred); 1150 VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst); 1151 Bldr.addNodes(Dst); 1152 break; 1153 1154 case Stmt::BinaryConditionalOperatorClass: 1155 case Stmt::ConditionalOperatorClass: { // '?' operator 1156 Bldr.takeNodes(Pred); 1157 const AbstractConditionalOperator *C 1158 = cast<AbstractConditionalOperator>(S); 1159 VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst); 1160 Bldr.addNodes(Dst); 1161 break; 1162 } 1163 1164 case Stmt::CXXThisExprClass: 1165 Bldr.takeNodes(Pred); 1166 VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst); 1167 Bldr.addNodes(Dst); 1168 break; 1169 1170 case Stmt::DeclRefExprClass: { 1171 Bldr.takeNodes(Pred); 1172 const DeclRefExpr *DE = cast<DeclRefExpr>(S); 1173 VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst); 1174 Bldr.addNodes(Dst); 1175 break; 1176 } 1177 1178 case Stmt::DeclStmtClass: 1179 Bldr.takeNodes(Pred); 1180 VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst); 1181 Bldr.addNodes(Dst); 1182 break; 1183 1184 case Stmt::ImplicitCastExprClass: 1185 case Stmt::CStyleCastExprClass: 1186 case Stmt::CXXStaticCastExprClass: 1187 case Stmt::CXXDynamicCastExprClass: 1188 case Stmt::CXXReinterpretCastExprClass: 1189 case Stmt::CXXConstCastExprClass: 1190 case Stmt::CXXFunctionalCastExprClass: 1191 case Stmt::ObjCBridgedCastExprClass: { 1192 Bldr.takeNodes(Pred); 1193 const CastExpr *C = cast<CastExpr>(S); 1194 // Handle the previsit checks. 1195 ExplodedNodeSet dstPrevisit; 1196 getCheckerManager().runCheckersForPreStmt(dstPrevisit, Pred, C, *this); 1197 1198 // Handle the expression itself. 1199 ExplodedNodeSet dstExpr; 1200 for (ExplodedNodeSet::iterator i = dstPrevisit.begin(), 1201 e = dstPrevisit.end(); i != e ; ++i) { 1202 VisitCast(C, C->getSubExpr(), *i, dstExpr); 1203 } 1204 1205 // Handle the postvisit checks. 1206 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this); 1207 Bldr.addNodes(Dst); 1208 break; 1209 } 1210 1211 case Expr::MaterializeTemporaryExprClass: { 1212 Bldr.takeNodes(Pred); 1213 const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S); 1214 CreateCXXTemporaryObject(MTE, Pred, Dst); 1215 Bldr.addNodes(Dst); 1216 break; 1217 } 1218 1219 case Stmt::InitListExprClass: 1220 Bldr.takeNodes(Pred); 1221 VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst); 1222 Bldr.addNodes(Dst); 1223 break; 1224 1225 case Stmt::MemberExprClass: 1226 Bldr.takeNodes(Pred); 1227 VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst); 1228 Bldr.addNodes(Dst); 1229 break; 1230 1231 case Stmt::ObjCIvarRefExprClass: 1232 Bldr.takeNodes(Pred); 1233 VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst); 1234 Bldr.addNodes(Dst); 1235 break; 1236 1237 case Stmt::ObjCForCollectionStmtClass: 1238 Bldr.takeNodes(Pred); 1239 VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst); 1240 Bldr.addNodes(Dst); 1241 break; 1242 1243 case Stmt::ObjCMessageExprClass: 1244 Bldr.takeNodes(Pred); 1245 VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst); 1246 Bldr.addNodes(Dst); 1247 break; 1248 1249 case Stmt::ObjCAtThrowStmtClass: 1250 case Stmt::CXXThrowExprClass: 1251 // FIXME: This is not complete. We basically treat @throw as 1252 // an abort. 1253 Bldr.generateSink(S, Pred, Pred->getState()); 1254 break; 1255 1256 case Stmt::ReturnStmtClass: 1257 Bldr.takeNodes(Pred); 1258 VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst); 1259 Bldr.addNodes(Dst); 1260 break; 1261 1262 case Stmt::OffsetOfExprClass: 1263 Bldr.takeNodes(Pred); 1264 VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst); 1265 Bldr.addNodes(Dst); 1266 break; 1267 1268 case Stmt::UnaryExprOrTypeTraitExprClass: 1269 Bldr.takeNodes(Pred); 1270 VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S), 1271 Pred, Dst); 1272 Bldr.addNodes(Dst); 1273 break; 1274 1275 case Stmt::StmtExprClass: { 1276 const StmtExpr *SE = cast<StmtExpr>(S); 1277 1278 if (SE->getSubStmt()->body_empty()) { 1279 // Empty statement expression. 1280 assert(SE->getType() == getContext().VoidTy 1281 && "Empty statement expression must have void type."); 1282 break; 1283 } 1284 1285 if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) { 1286 ProgramStateRef state = Pred->getState(); 1287 Bldr.generateNode(SE, Pred, 1288 state->BindExpr(SE, Pred->getLocationContext(), 1289 state->getSVal(LastExpr, 1290 Pred->getLocationContext()))); 1291 } 1292 break; 1293 } 1294 1295 case Stmt::UnaryOperatorClass: { 1296 Bldr.takeNodes(Pred); 1297 const UnaryOperator *U = cast<UnaryOperator>(S); 1298 if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) { 1299 ExplodedNodeSet Tmp; 1300 VisitUnaryOperator(U, Pred, Tmp); 1301 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U); 1302 } 1303 else 1304 VisitUnaryOperator(U, Pred, Dst); 1305 Bldr.addNodes(Dst); 1306 break; 1307 } 1308 1309 case Stmt::PseudoObjectExprClass: { 1310 Bldr.takeNodes(Pred); 1311 ProgramStateRef state = Pred->getState(); 1312 const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S); 1313 if (const Expr *Result = PE->getResultExpr()) { 1314 SVal V = state->getSVal(Result, Pred->getLocationContext()); 1315 Bldr.generateNode(S, Pred, 1316 state->BindExpr(S, Pred->getLocationContext(), V)); 1317 } 1318 else 1319 Bldr.generateNode(S, Pred, 1320 state->BindExpr(S, Pred->getLocationContext(), 1321 UnknownVal())); 1322 1323 Bldr.addNodes(Dst); 1324 break; 1325 } 1326 } 1327 } 1328 1329 bool ExprEngine::replayWithoutInlining(ExplodedNode *N, 1330 const LocationContext *CalleeLC) { 1331 const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1332 const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame(); 1333 assert(CalleeSF && CallerSF); 1334 ExplodedNode *BeforeProcessingCall = nullptr; 1335 const Stmt *CE = CalleeSF->getCallSite(); 1336 1337 // Find the first node before we started processing the call expression. 1338 while (N) { 1339 ProgramPoint L = N->getLocation(); 1340 BeforeProcessingCall = N; 1341 N = N->pred_empty() ? nullptr : *(N->pred_begin()); 1342 1343 // Skip the nodes corresponding to the inlined code. 1344 if (L.getLocationContext()->getCurrentStackFrame() != CallerSF) 1345 continue; 1346 // We reached the caller. Find the node right before we started 1347 // processing the call. 1348 if (L.isPurgeKind()) 1349 continue; 1350 if (L.getAs<PreImplicitCall>()) 1351 continue; 1352 if (L.getAs<CallEnter>()) 1353 continue; 1354 if (Optional<StmtPoint> SP = L.getAs<StmtPoint>()) 1355 if (SP->getStmt() == CE) 1356 continue; 1357 break; 1358 } 1359 1360 if (!BeforeProcessingCall) 1361 return false; 1362 1363 // TODO: Clean up the unneeded nodes. 1364 1365 // Build an Epsilon node from which we will restart the analyzes. 1366 // Note that CE is permitted to be NULL! 1367 ProgramPoint NewNodeLoc = 1368 EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE); 1369 // Add the special flag to GDM to signal retrying with no inlining. 1370 // Note, changing the state ensures that we are not going to cache out. 1371 ProgramStateRef NewNodeState = BeforeProcessingCall->getState(); 1372 NewNodeState = 1373 NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE)); 1374 1375 // Make the new node a successor of BeforeProcessingCall. 1376 bool IsNew = false; 1377 ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew); 1378 // We cached out at this point. Caching out is common due to us backtracking 1379 // from the inlined function, which might spawn several paths. 1380 if (!IsNew) 1381 return true; 1382 1383 NewNode->addPredecessor(BeforeProcessingCall, G); 1384 1385 // Add the new node to the work list. 1386 Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(), 1387 CalleeSF->getIndex()); 1388 NumTimesRetriedWithoutInlining++; 1389 return true; 1390 } 1391 1392 /// Block entrance. (Update counters). 1393 void ExprEngine::processCFGBlockEntrance(const BlockEdge &L, 1394 NodeBuilderWithSinks &nodeBuilder, 1395 ExplodedNode *Pred) { 1396 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1397 1398 // FIXME: Refactor this into a checker. 1399 if (nodeBuilder.getContext().blockCount() >= AMgr.options.maxBlockVisitOnPath) { 1400 static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded"); 1401 const ExplodedNode *Sink = 1402 nodeBuilder.generateSink(Pred->getState(), Pred, &tag); 1403 1404 // Check if we stopped at the top level function or not. 1405 // Root node should have the location context of the top most function. 1406 const LocationContext *CalleeLC = Pred->getLocation().getLocationContext(); 1407 const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1408 const LocationContext *RootLC = 1409 (*G.roots_begin())->getLocation().getLocationContext(); 1410 if (RootLC->getCurrentStackFrame() != CalleeSF) { 1411 Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl()); 1412 1413 // Re-run the call evaluation without inlining it, by storing the 1414 // no-inlining policy in the state and enqueuing the new work item on 1415 // the list. Replay should almost never fail. Use the stats to catch it 1416 // if it does. 1417 if ((!AMgr.options.NoRetryExhausted && 1418 replayWithoutInlining(Pred, CalleeLC))) 1419 return; 1420 NumMaxBlockCountReachedInInlined++; 1421 } else 1422 NumMaxBlockCountReached++; 1423 1424 // Make sink nodes as exhausted(for stats) only if retry failed. 1425 Engine.blocksExhausted.push_back(std::make_pair(L, Sink)); 1426 } 1427 } 1428 1429 //===----------------------------------------------------------------------===// 1430 // Branch processing. 1431 //===----------------------------------------------------------------------===// 1432 1433 /// RecoverCastedSymbol - A helper function for ProcessBranch that is used 1434 /// to try to recover some path-sensitivity for casts of symbolic 1435 /// integers that promote their values (which are currently not tracked well). 1436 /// This function returns the SVal bound to Condition->IgnoreCasts if all the 1437 // cast(s) did was sign-extend the original value. 1438 static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr, 1439 ProgramStateRef state, 1440 const Stmt *Condition, 1441 const LocationContext *LCtx, 1442 ASTContext &Ctx) { 1443 1444 const Expr *Ex = dyn_cast<Expr>(Condition); 1445 if (!Ex) 1446 return UnknownVal(); 1447 1448 uint64_t bits = 0; 1449 bool bitsInit = false; 1450 1451 while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) { 1452 QualType T = CE->getType(); 1453 1454 if (!T->isIntegralOrEnumerationType()) 1455 return UnknownVal(); 1456 1457 uint64_t newBits = Ctx.getTypeSize(T); 1458 if (!bitsInit || newBits < bits) { 1459 bitsInit = true; 1460 bits = newBits; 1461 } 1462 1463 Ex = CE->getSubExpr(); 1464 } 1465 1466 // We reached a non-cast. Is it a symbolic value? 1467 QualType T = Ex->getType(); 1468 1469 if (!bitsInit || !T->isIntegralOrEnumerationType() || 1470 Ctx.getTypeSize(T) > bits) 1471 return UnknownVal(); 1472 1473 return state->getSVal(Ex, LCtx); 1474 } 1475 1476 #ifndef NDEBUG 1477 static const Stmt *getRightmostLeaf(const Stmt *Condition) { 1478 while (Condition) { 1479 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1480 if (!BO || !BO->isLogicalOp()) { 1481 return Condition; 1482 } 1483 Condition = BO->getRHS()->IgnoreParens(); 1484 } 1485 return nullptr; 1486 } 1487 #endif 1488 1489 // Returns the condition the branch at the end of 'B' depends on and whose value 1490 // has been evaluated within 'B'. 1491 // In most cases, the terminator condition of 'B' will be evaluated fully in 1492 // the last statement of 'B'; in those cases, the resolved condition is the 1493 // given 'Condition'. 1494 // If the condition of the branch is a logical binary operator tree, the CFG is 1495 // optimized: in that case, we know that the expression formed by all but the 1496 // rightmost leaf of the logical binary operator tree must be true, and thus 1497 // the branch condition is at this point equivalent to the truth value of that 1498 // rightmost leaf; the CFG block thus only evaluates this rightmost leaf 1499 // expression in its final statement. As the full condition in that case was 1500 // not evaluated, and is thus not in the SVal cache, we need to use that leaf 1501 // expression to evaluate the truth value of the condition in the current state 1502 // space. 1503 static const Stmt *ResolveCondition(const Stmt *Condition, 1504 const CFGBlock *B) { 1505 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1506 Condition = Ex->IgnoreParens(); 1507 1508 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1509 if (!BO || !BO->isLogicalOp()) 1510 return Condition; 1511 1512 assert(!B->getTerminator().isTemporaryDtorsBranch() && 1513 "Temporary destructor branches handled by processBindTemporary."); 1514 1515 // For logical operations, we still have the case where some branches 1516 // use the traditional "merge" approach and others sink the branch 1517 // directly into the basic blocks representing the logical operation. 1518 // We need to distinguish between those two cases here. 1519 1520 // The invariants are still shifting, but it is possible that the 1521 // last element in a CFGBlock is not a CFGStmt. Look for the last 1522 // CFGStmt as the value of the condition. 1523 CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend(); 1524 for (; I != E; ++I) { 1525 CFGElement Elem = *I; 1526 Optional<CFGStmt> CS = Elem.getAs<CFGStmt>(); 1527 if (!CS) 1528 continue; 1529 const Stmt *LastStmt = CS->getStmt(); 1530 assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition)); 1531 return LastStmt; 1532 } 1533 llvm_unreachable("could not resolve condition"); 1534 } 1535 1536 void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term, 1537 NodeBuilderContext& BldCtx, 1538 ExplodedNode *Pred, 1539 ExplodedNodeSet &Dst, 1540 const CFGBlock *DstT, 1541 const CFGBlock *DstF) { 1542 assert((!Condition || !isa<CXXBindTemporaryExpr>(Condition)) && 1543 "CXXBindTemporaryExprs are handled by processBindTemporary."); 1544 const LocationContext *LCtx = Pred->getLocationContext(); 1545 PrettyStackTraceLocationContext StackCrashInfo(LCtx); 1546 currBldrCtx = &BldCtx; 1547 1548 // Check for NULL conditions; e.g. "for(;;)" 1549 if (!Condition) { 1550 BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF); 1551 NullCondBldr.markInfeasible(false); 1552 NullCondBldr.generateNode(Pred->getState(), true, Pred); 1553 return; 1554 } 1555 1556 1557 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1558 Condition = Ex->IgnoreParens(); 1559 1560 Condition = ResolveCondition(Condition, BldCtx.getBlock()); 1561 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 1562 Condition->getLocStart(), 1563 "Error evaluating branch"); 1564 1565 ExplodedNodeSet CheckersOutSet; 1566 getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet, 1567 Pred, *this); 1568 // We generated only sinks. 1569 if (CheckersOutSet.empty()) 1570 return; 1571 1572 BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF); 1573 for (NodeBuilder::iterator I = CheckersOutSet.begin(), 1574 E = CheckersOutSet.end(); E != I; ++I) { 1575 ExplodedNode *PredI = *I; 1576 1577 if (PredI->isSink()) 1578 continue; 1579 1580 ProgramStateRef PrevState = PredI->getState(); 1581 SVal X = PrevState->getSVal(Condition, PredI->getLocationContext()); 1582 1583 if (X.isUnknownOrUndef()) { 1584 // Give it a chance to recover from unknown. 1585 if (const Expr *Ex = dyn_cast<Expr>(Condition)) { 1586 if (Ex->getType()->isIntegralOrEnumerationType()) { 1587 // Try to recover some path-sensitivity. Right now casts of symbolic 1588 // integers that promote their values are currently not tracked well. 1589 // If 'Condition' is such an expression, try and recover the 1590 // underlying value and use that instead. 1591 SVal recovered = RecoverCastedSymbol(getStateManager(), 1592 PrevState, Condition, 1593 PredI->getLocationContext(), 1594 getContext()); 1595 1596 if (!recovered.isUnknown()) { 1597 X = recovered; 1598 } 1599 } 1600 } 1601 } 1602 1603 // If the condition is still unknown, give up. 1604 if (X.isUnknownOrUndef()) { 1605 builder.generateNode(PrevState, true, PredI); 1606 builder.generateNode(PrevState, false, PredI); 1607 continue; 1608 } 1609 1610 DefinedSVal V = X.castAs<DefinedSVal>(); 1611 1612 ProgramStateRef StTrue, StFalse; 1613 std::tie(StTrue, StFalse) = PrevState->assume(V); 1614 1615 // Process the true branch. 1616 if (builder.isFeasible(true)) { 1617 if (StTrue) 1618 builder.generateNode(StTrue, true, PredI); 1619 else 1620 builder.markInfeasible(true); 1621 } 1622 1623 // Process the false branch. 1624 if (builder.isFeasible(false)) { 1625 if (StFalse) 1626 builder.generateNode(StFalse, false, PredI); 1627 else 1628 builder.markInfeasible(false); 1629 } 1630 } 1631 currBldrCtx = nullptr; 1632 } 1633 1634 /// The GDM component containing the set of global variables which have been 1635 /// previously initialized with explicit initializers. 1636 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet, 1637 llvm::ImmutableSet<const VarDecl *>) 1638 1639 void ExprEngine::processStaticInitializer(const DeclStmt *DS, 1640 NodeBuilderContext &BuilderCtx, 1641 ExplodedNode *Pred, 1642 clang::ento::ExplodedNodeSet &Dst, 1643 const CFGBlock *DstT, 1644 const CFGBlock *DstF) { 1645 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1646 currBldrCtx = &BuilderCtx; 1647 1648 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl()); 1649 ProgramStateRef state = Pred->getState(); 1650 bool initHasRun = state->contains<InitializedGlobalsSet>(VD); 1651 BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF); 1652 1653 if (!initHasRun) { 1654 state = state->add<InitializedGlobalsSet>(VD); 1655 } 1656 1657 builder.generateNode(state, initHasRun, Pred); 1658 builder.markInfeasible(!initHasRun); 1659 1660 currBldrCtx = nullptr; 1661 } 1662 1663 /// processIndirectGoto - Called by CoreEngine. Used to generate successor 1664 /// nodes by processing the 'effects' of a computed goto jump. 1665 void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) { 1666 1667 ProgramStateRef state = builder.getState(); 1668 SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext()); 1669 1670 // Three possibilities: 1671 // 1672 // (1) We know the computed label. 1673 // (2) The label is NULL (or some other constant), or Undefined. 1674 // (3) We have no clue about the label. Dispatch to all targets. 1675 // 1676 1677 typedef IndirectGotoNodeBuilder::iterator iterator; 1678 1679 if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) { 1680 const LabelDecl *L = LV->getLabel(); 1681 1682 for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) { 1683 if (I.getLabel() == L) { 1684 builder.generateNode(I, state); 1685 return; 1686 } 1687 } 1688 1689 llvm_unreachable("No block with label."); 1690 } 1691 1692 if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) { 1693 // Dispatch to the first target and mark it as a sink. 1694 //ExplodedNode* N = builder.generateNode(builder.begin(), state, true); 1695 // FIXME: add checker visit. 1696 // UndefBranches.insert(N); 1697 return; 1698 } 1699 1700 // This is really a catch-all. We don't support symbolics yet. 1701 // FIXME: Implement dispatch for symbolic pointers. 1702 1703 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I) 1704 builder.generateNode(I, state); 1705 } 1706 1707 #if 0 1708 static bool stackFrameDoesNotContainInitializedTemporaries(ExplodedNode &Pred) { 1709 const StackFrameContext* Frame = Pred.getStackFrame(); 1710 const llvm::ImmutableSet<CXXBindTemporaryContext> &Set = 1711 Pred.getState()->get<InitializedTemporariesSet>(); 1712 return std::find_if(Set.begin(), Set.end(), 1713 [&](const CXXBindTemporaryContext &Ctx) { 1714 if (Ctx.second == Frame) { 1715 Ctx.first->dump(); 1716 llvm::errs() << "\n"; 1717 } 1718 return Ctx.second == Frame; 1719 }) == Set.end(); 1720 } 1721 #endif 1722 1723 /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path 1724 /// nodes when the control reaches the end of a function. 1725 void ExprEngine::processEndOfFunction(NodeBuilderContext& BC, 1726 ExplodedNode *Pred) { 1727 // FIXME: Assert that stackFrameDoesNotContainInitializedTemporaries(*Pred)). 1728 // We currently cannot enable this assert, as lifetime extended temporaries 1729 // are not modelled correctly. 1730 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1731 StateMgr.EndPath(Pred->getState()); 1732 1733 ExplodedNodeSet Dst; 1734 if (Pred->getLocationContext()->inTopFrame()) { 1735 // Remove dead symbols. 1736 ExplodedNodeSet AfterRemovedDead; 1737 removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead); 1738 1739 // Notify checkers. 1740 for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(), 1741 E = AfterRemovedDead.end(); I != E; ++I) { 1742 getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this); 1743 } 1744 } else { 1745 getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this); 1746 } 1747 1748 Engine.enqueueEndOfFunction(Dst); 1749 } 1750 1751 /// ProcessSwitch - Called by CoreEngine. Used to generate successor 1752 /// nodes by processing the 'effects' of a switch statement. 1753 void ExprEngine::processSwitch(SwitchNodeBuilder& builder) { 1754 typedef SwitchNodeBuilder::iterator iterator; 1755 ProgramStateRef state = builder.getState(); 1756 const Expr *CondE = builder.getCondition(); 1757 SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext()); 1758 1759 if (CondV_untested.isUndef()) { 1760 //ExplodedNode* N = builder.generateDefaultCaseNode(state, true); 1761 // FIXME: add checker 1762 //UndefBranches.insert(N); 1763 1764 return; 1765 } 1766 DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>(); 1767 1768 ProgramStateRef DefaultSt = state; 1769 1770 iterator I = builder.begin(), EI = builder.end(); 1771 bool defaultIsFeasible = I == EI; 1772 1773 for ( ; I != EI; ++I) { 1774 // Successor may be pruned out during CFG construction. 1775 if (!I.getBlock()) 1776 continue; 1777 1778 const CaseStmt *Case = I.getCase(); 1779 1780 // Evaluate the LHS of the case value. 1781 llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext()); 1782 assert(V1.getBitWidth() == getContext().getTypeSize(CondE->getType())); 1783 1784 // Get the RHS of the case, if it exists. 1785 llvm::APSInt V2; 1786 if (const Expr *E = Case->getRHS()) 1787 V2 = E->EvaluateKnownConstInt(getContext()); 1788 else 1789 V2 = V1; 1790 1791 ProgramStateRef StateCase; 1792 if (Optional<NonLoc> NL = CondV.getAs<NonLoc>()) 1793 std::tie(StateCase, DefaultSt) = 1794 DefaultSt->assumeWithinInclusiveRange(*NL, V1, V2); 1795 else // UnknownVal 1796 StateCase = DefaultSt; 1797 1798 if (StateCase) 1799 builder.generateCaseStmtNode(I, StateCase); 1800 1801 // Now "assume" that the case doesn't match. Add this state 1802 // to the default state (if it is feasible). 1803 if (DefaultSt) 1804 defaultIsFeasible = true; 1805 else { 1806 defaultIsFeasible = false; 1807 break; 1808 } 1809 } 1810 1811 if (!defaultIsFeasible) 1812 return; 1813 1814 // If we have switch(enum value), the default branch is not 1815 // feasible if all of the enum constants not covered by 'case:' statements 1816 // are not feasible values for the switch condition. 1817 // 1818 // Note that this isn't as accurate as it could be. Even if there isn't 1819 // a case for a particular enum value as long as that enum value isn't 1820 // feasible then it shouldn't be considered for making 'default:' reachable. 1821 const SwitchStmt *SS = builder.getSwitch(); 1822 const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts(); 1823 if (CondExpr->getType()->getAs<EnumType>()) { 1824 if (SS->isAllEnumCasesCovered()) 1825 return; 1826 } 1827 1828 builder.generateDefaultCaseNode(DefaultSt); 1829 } 1830 1831 //===----------------------------------------------------------------------===// 1832 // Transfer functions: Loads and stores. 1833 //===----------------------------------------------------------------------===// 1834 1835 void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D, 1836 ExplodedNode *Pred, 1837 ExplodedNodeSet &Dst) { 1838 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1839 1840 ProgramStateRef state = Pred->getState(); 1841 const LocationContext *LCtx = Pred->getLocationContext(); 1842 1843 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) { 1844 // C permits "extern void v", and if you cast the address to a valid type, 1845 // you can even do things with it. We simply pretend 1846 assert(Ex->isGLValue() || VD->getType()->isVoidType()); 1847 const LocationContext *LocCtxt = Pred->getLocationContext(); 1848 const Decl *D = LocCtxt->getDecl(); 1849 const auto *MD = D ? dyn_cast<CXXMethodDecl>(D) : nullptr; 1850 const auto *DeclRefEx = dyn_cast<DeclRefExpr>(Ex); 1851 SVal V; 1852 bool CaptureByReference = false; 1853 if (AMgr.options.shouldInlineLambdas() && DeclRefEx && 1854 DeclRefEx->refersToEnclosingVariableOrCapture() && MD && 1855 MD->getParent()->isLambda()) { 1856 // Lookup the field of the lambda. 1857 const CXXRecordDecl *CXXRec = MD->getParent(); 1858 llvm::DenseMap<const VarDecl *, FieldDecl *> LambdaCaptureFields; 1859 FieldDecl *LambdaThisCaptureField; 1860 CXXRec->getCaptureFields(LambdaCaptureFields, LambdaThisCaptureField); 1861 const FieldDecl *FD = LambdaCaptureFields[VD]; 1862 if (!FD) { 1863 // When a constant is captured, sometimes no corresponding field is 1864 // created in the lambda object. 1865 assert(VD->getType().isConstQualified()); 1866 V = state->getLValue(VD, LocCtxt); 1867 } else { 1868 Loc CXXThis = 1869 svalBuilder.getCXXThis(MD, LocCtxt->getCurrentStackFrame()); 1870 SVal CXXThisVal = state->getSVal(CXXThis); 1871 V = state->getLValue(FD, CXXThisVal); 1872 if (FD->getType()->isReferenceType() && 1873 !VD->getType()->isReferenceType()) 1874 CaptureByReference = true; 1875 } 1876 } else { 1877 V = state->getLValue(VD, LocCtxt); 1878 } 1879 1880 // For references, the 'lvalue' is the pointer address stored in the 1881 // reference region. 1882 if (VD->getType()->isReferenceType() || CaptureByReference) { 1883 if (const MemRegion *R = V.getAsRegion()) 1884 V = state->getSVal(R); 1885 else 1886 V = UnknownVal(); 1887 } 1888 1889 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1890 ProgramPoint::PostLValueKind); 1891 return; 1892 } 1893 if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) { 1894 assert(!Ex->isGLValue()); 1895 SVal V = svalBuilder.makeIntVal(ED->getInitVal()); 1896 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V)); 1897 return; 1898 } 1899 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 1900 SVal V = svalBuilder.getFunctionPointer(FD); 1901 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1902 ProgramPoint::PostLValueKind); 1903 return; 1904 } 1905 if (isa<FieldDecl>(D)) { 1906 // FIXME: Compute lvalue of field pointers-to-member. 1907 // Right now we just use a non-null void pointer, so that it gives proper 1908 // results in boolean contexts. 1909 SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy, 1910 currBldrCtx->blockCount()); 1911 state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true); 1912 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1913 ProgramPoint::PostLValueKind); 1914 return; 1915 } 1916 1917 llvm_unreachable("Support for this Decl not implemented."); 1918 } 1919 1920 /// VisitArraySubscriptExpr - Transfer function for array accesses 1921 void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A, 1922 ExplodedNode *Pred, 1923 ExplodedNodeSet &Dst){ 1924 1925 const Expr *Base = A->getBase()->IgnoreParens(); 1926 const Expr *Idx = A->getIdx()->IgnoreParens(); 1927 1928 1929 ExplodedNodeSet checkerPreStmt; 1930 getCheckerManager().runCheckersForPreStmt(checkerPreStmt, Pred, A, *this); 1931 1932 StmtNodeBuilder Bldr(checkerPreStmt, Dst, *currBldrCtx); 1933 assert(A->isGLValue() || 1934 (!AMgr.getLangOpts().CPlusPlus && 1935 A->getType().isCForbiddenLValueType())); 1936 1937 for (ExplodedNodeSet::iterator it = checkerPreStmt.begin(), 1938 ei = checkerPreStmt.end(); it != ei; ++it) { 1939 const LocationContext *LCtx = (*it)->getLocationContext(); 1940 ProgramStateRef state = (*it)->getState(); 1941 SVal V = state->getLValue(A->getType(), 1942 state->getSVal(Idx, LCtx), 1943 state->getSVal(Base, LCtx)); 1944 Bldr.generateNode(A, *it, state->BindExpr(A, LCtx, V), nullptr, 1945 ProgramPoint::PostLValueKind); 1946 } 1947 } 1948 1949 /// VisitMemberExpr - Transfer function for member expressions. 1950 void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred, 1951 ExplodedNodeSet &Dst) { 1952 1953 // FIXME: Prechecks eventually go in ::Visit(). 1954 ExplodedNodeSet CheckedSet; 1955 getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this); 1956 1957 ExplodedNodeSet EvalSet; 1958 ValueDecl *Member = M->getMemberDecl(); 1959 1960 // Handle static member variables and enum constants accessed via 1961 // member syntax. 1962 if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) { 1963 ExplodedNodeSet Dst; 1964 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 1965 I != E; ++I) { 1966 VisitCommonDeclRefExpr(M, Member, Pred, EvalSet); 1967 } 1968 } else { 1969 StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); 1970 ExplodedNodeSet Tmp; 1971 1972 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 1973 I != E; ++I) { 1974 ProgramStateRef state = (*I)->getState(); 1975 const LocationContext *LCtx = (*I)->getLocationContext(); 1976 Expr *BaseExpr = M->getBase(); 1977 1978 // Handle C++ method calls. 1979 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) { 1980 if (MD->isInstance()) 1981 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 1982 1983 SVal MDVal = svalBuilder.getFunctionPointer(MD); 1984 state = state->BindExpr(M, LCtx, MDVal); 1985 1986 Bldr.generateNode(M, *I, state); 1987 continue; 1988 } 1989 1990 // Handle regular struct fields / member variables. 1991 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 1992 SVal baseExprVal = state->getSVal(BaseExpr, LCtx); 1993 1994 FieldDecl *field = cast<FieldDecl>(Member); 1995 SVal L = state->getLValue(field, baseExprVal); 1996 1997 if (M->isGLValue() || M->getType()->isArrayType()) { 1998 // We special-case rvalues of array type because the analyzer cannot 1999 // reason about them, since we expect all regions to be wrapped in Locs. 2000 // We instead treat these as lvalues and assume that they will decay to 2001 // pointers as soon as they are used. 2002 if (!M->isGLValue()) { 2003 assert(M->getType()->isArrayType()); 2004 const ImplicitCastExpr *PE = 2005 dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParent(M)); 2006 if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) { 2007 llvm_unreachable("should always be wrapped in ArrayToPointerDecay"); 2008 } 2009 } 2010 2011 if (field->getType()->isReferenceType()) { 2012 if (const MemRegion *R = L.getAsRegion()) 2013 L = state->getSVal(R); 2014 else 2015 L = UnknownVal(); 2016 } 2017 2018 Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr, 2019 ProgramPoint::PostLValueKind); 2020 } else { 2021 Bldr.takeNodes(*I); 2022 evalLoad(Tmp, M, M, *I, state, L); 2023 Bldr.addNodes(Tmp); 2024 } 2025 } 2026 } 2027 2028 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this); 2029 } 2030 2031 namespace { 2032 class CollectReachableSymbolsCallback final : public SymbolVisitor { 2033 InvalidatedSymbols Symbols; 2034 public: 2035 CollectReachableSymbolsCallback(ProgramStateRef State) {} 2036 const InvalidatedSymbols &getSymbols() const { return Symbols; } 2037 2038 bool VisitSymbol(SymbolRef Sym) override { 2039 Symbols.insert(Sym); 2040 return true; 2041 } 2042 }; 2043 } // end anonymous namespace 2044 2045 // A value escapes in three possible cases: 2046 // (1) We are binding to something that is not a memory region. 2047 // (2) We are binding to a MemrRegion that does not have stack storage. 2048 // (3) We are binding to a MemRegion with stack storage that the store 2049 // does not understand. 2050 ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State, 2051 SVal Loc, SVal Val) { 2052 // Are we storing to something that causes the value to "escape"? 2053 bool escapes = true; 2054 2055 // TODO: Move to StoreManager. 2056 if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) { 2057 escapes = !regionLoc->getRegion()->hasStackStorage(); 2058 2059 if (!escapes) { 2060 // To test (3), generate a new state with the binding added. If it is 2061 // the same state, then it escapes (since the store cannot represent 2062 // the binding). 2063 // Do this only if we know that the store is not supposed to generate the 2064 // same state. 2065 SVal StoredVal = State->getSVal(regionLoc->getRegion()); 2066 if (StoredVal != Val) 2067 escapes = (State == (State->bindLoc(*regionLoc, Val))); 2068 } 2069 } 2070 2071 // If our store can represent the binding and we aren't storing to something 2072 // that doesn't have local storage then just return and have the simulation 2073 // state continue as is. 2074 if (!escapes) 2075 return State; 2076 2077 // Otherwise, find all symbols referenced by 'val' that we are tracking 2078 // and stop tracking them. 2079 CollectReachableSymbolsCallback Scanner = 2080 State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val); 2081 const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols(); 2082 State = getCheckerManager().runCheckersForPointerEscape(State, 2083 EscapedSymbols, 2084 /*CallEvent*/ nullptr, 2085 PSK_EscapeOnBind, 2086 nullptr); 2087 2088 return State; 2089 } 2090 2091 ProgramStateRef 2092 ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State, 2093 const InvalidatedSymbols *Invalidated, 2094 ArrayRef<const MemRegion *> ExplicitRegions, 2095 ArrayRef<const MemRegion *> Regions, 2096 const CallEvent *Call, 2097 RegionAndSymbolInvalidationTraits &ITraits) { 2098 2099 if (!Invalidated || Invalidated->empty()) 2100 return State; 2101 2102 if (!Call) 2103 return getCheckerManager().runCheckersForPointerEscape(State, 2104 *Invalidated, 2105 nullptr, 2106 PSK_EscapeOther, 2107 &ITraits); 2108 2109 // If the symbols were invalidated by a call, we want to find out which ones 2110 // were invalidated directly due to being arguments to the call. 2111 InvalidatedSymbols SymbolsDirectlyInvalidated; 2112 for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(), 2113 E = ExplicitRegions.end(); I != E; ++I) { 2114 if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>()) 2115 SymbolsDirectlyInvalidated.insert(R->getSymbol()); 2116 } 2117 2118 InvalidatedSymbols SymbolsIndirectlyInvalidated; 2119 for (InvalidatedSymbols::const_iterator I=Invalidated->begin(), 2120 E = Invalidated->end(); I!=E; ++I) { 2121 SymbolRef sym = *I; 2122 if (SymbolsDirectlyInvalidated.count(sym)) 2123 continue; 2124 SymbolsIndirectlyInvalidated.insert(sym); 2125 } 2126 2127 if (!SymbolsDirectlyInvalidated.empty()) 2128 State = getCheckerManager().runCheckersForPointerEscape(State, 2129 SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits); 2130 2131 // Notify about the symbols that get indirectly invalidated by the call. 2132 if (!SymbolsIndirectlyInvalidated.empty()) 2133 State = getCheckerManager().runCheckersForPointerEscape(State, 2134 SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits); 2135 2136 return State; 2137 } 2138 2139 /// evalBind - Handle the semantics of binding a value to a specific location. 2140 /// This method is used by evalStore and (soon) VisitDeclStmt, and others. 2141 void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE, 2142 ExplodedNode *Pred, 2143 SVal location, SVal Val, 2144 bool atDeclInit, const ProgramPoint *PP) { 2145 2146 const LocationContext *LC = Pred->getLocationContext(); 2147 PostStmt PS(StoreE, LC); 2148 if (!PP) 2149 PP = &PS; 2150 2151 // Do a previsit of the bind. 2152 ExplodedNodeSet CheckedSet; 2153 getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val, 2154 StoreE, *this, *PP); 2155 2156 2157 StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx); 2158 2159 // If the location is not a 'Loc', it will already be handled by 2160 // the checkers. There is nothing left to do. 2161 if (!location.getAs<Loc>()) { 2162 const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr, 2163 /*tag*/nullptr); 2164 ProgramStateRef state = Pred->getState(); 2165 state = processPointerEscapedOnBind(state, location, Val); 2166 Bldr.generateNode(L, state, Pred); 2167 return; 2168 } 2169 2170 2171 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2172 I!=E; ++I) { 2173 ExplodedNode *PredI = *I; 2174 ProgramStateRef state = PredI->getState(); 2175 2176 state = processPointerEscapedOnBind(state, location, Val); 2177 2178 // When binding the value, pass on the hint that this is a initialization. 2179 // For initializations, we do not need to inform clients of region 2180 // changes. 2181 state = state->bindLoc(location.castAs<Loc>(), 2182 Val, /* notifyChanges = */ !atDeclInit); 2183 2184 const MemRegion *LocReg = nullptr; 2185 if (Optional<loc::MemRegionVal> LocRegVal = 2186 location.getAs<loc::MemRegionVal>()) { 2187 LocReg = LocRegVal->getRegion(); 2188 } 2189 2190 const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr); 2191 Bldr.generateNode(L, state, PredI); 2192 } 2193 } 2194 2195 /// evalStore - Handle the semantics of a store via an assignment. 2196 /// @param Dst The node set to store generated state nodes 2197 /// @param AssignE The assignment expression if the store happens in an 2198 /// assignment. 2199 /// @param LocationE The location expression that is stored to. 2200 /// @param state The current simulation state 2201 /// @param location The location to store the value 2202 /// @param Val The value to be stored 2203 void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE, 2204 const Expr *LocationE, 2205 ExplodedNode *Pred, 2206 ProgramStateRef state, SVal location, SVal Val, 2207 const ProgramPointTag *tag) { 2208 // Proceed with the store. We use AssignE as the anchor for the PostStore 2209 // ProgramPoint if it is non-NULL, and LocationE otherwise. 2210 const Expr *StoreE = AssignE ? AssignE : LocationE; 2211 2212 // Evaluate the location (checks for bad dereferences). 2213 ExplodedNodeSet Tmp; 2214 evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false); 2215 2216 if (Tmp.empty()) 2217 return; 2218 2219 if (location.isUndef()) 2220 return; 2221 2222 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) 2223 evalBind(Dst, StoreE, *NI, location, Val, false); 2224 } 2225 2226 void ExprEngine::evalLoad(ExplodedNodeSet &Dst, 2227 const Expr *NodeEx, 2228 const Expr *BoundEx, 2229 ExplodedNode *Pred, 2230 ProgramStateRef state, 2231 SVal location, 2232 const ProgramPointTag *tag, 2233 QualType LoadTy) 2234 { 2235 assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc."); 2236 2237 // Are we loading from a region? This actually results in two loads; one 2238 // to fetch the address of the referenced value and one to fetch the 2239 // referenced value. 2240 if (const TypedValueRegion *TR = 2241 dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) { 2242 2243 QualType ValTy = TR->getValueType(); 2244 if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) { 2245 static SimpleProgramPointTag 2246 loadReferenceTag(TagProviderName, "Load Reference"); 2247 ExplodedNodeSet Tmp; 2248 evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state, 2249 location, &loadReferenceTag, 2250 getContext().getPointerType(RT->getPointeeType())); 2251 2252 // Perform the load from the referenced value. 2253 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) { 2254 state = (*I)->getState(); 2255 location = state->getSVal(BoundEx, (*I)->getLocationContext()); 2256 evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy); 2257 } 2258 return; 2259 } 2260 } 2261 2262 evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy); 2263 } 2264 2265 void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst, 2266 const Expr *NodeEx, 2267 const Expr *BoundEx, 2268 ExplodedNode *Pred, 2269 ProgramStateRef state, 2270 SVal location, 2271 const ProgramPointTag *tag, 2272 QualType LoadTy) { 2273 assert(NodeEx); 2274 assert(BoundEx); 2275 // Evaluate the location (checks for bad dereferences). 2276 ExplodedNodeSet Tmp; 2277 evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true); 2278 if (Tmp.empty()) 2279 return; 2280 2281 StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 2282 if (location.isUndef()) 2283 return; 2284 2285 // Proceed with the load. 2286 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) { 2287 state = (*NI)->getState(); 2288 const LocationContext *LCtx = (*NI)->getLocationContext(); 2289 2290 SVal V = UnknownVal(); 2291 if (location.isValid()) { 2292 if (LoadTy.isNull()) 2293 LoadTy = BoundEx->getType(); 2294 V = state->getSVal(location.castAs<Loc>(), LoadTy); 2295 } 2296 2297 Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag, 2298 ProgramPoint::PostLoadKind); 2299 } 2300 } 2301 2302 void ExprEngine::evalLocation(ExplodedNodeSet &Dst, 2303 const Stmt *NodeEx, 2304 const Stmt *BoundEx, 2305 ExplodedNode *Pred, 2306 ProgramStateRef state, 2307 SVal location, 2308 const ProgramPointTag *tag, 2309 bool isLoad) { 2310 StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx); 2311 // Early checks for performance reason. 2312 if (location.isUnknown()) { 2313 return; 2314 } 2315 2316 ExplodedNodeSet Src; 2317 BldrTop.takeNodes(Pred); 2318 StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx); 2319 if (Pred->getState() != state) { 2320 // Associate this new state with an ExplodedNode. 2321 // FIXME: If I pass null tag, the graph is incorrect, e.g for 2322 // int *p; 2323 // p = 0; 2324 // *p = 0xDEADBEEF; 2325 // "p = 0" is not noted as "Null pointer value stored to 'p'" but 2326 // instead "int *p" is noted as 2327 // "Variable 'p' initialized to a null pointer value" 2328 2329 static SimpleProgramPointTag tag(TagProviderName, "Location"); 2330 Bldr.generateNode(NodeEx, Pred, state, &tag); 2331 } 2332 ExplodedNodeSet Tmp; 2333 getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad, 2334 NodeEx, BoundEx, *this); 2335 BldrTop.addNodes(Tmp); 2336 } 2337 2338 std::pair<const ProgramPointTag *, const ProgramPointTag*> 2339 ExprEngine::geteagerlyAssumeBinOpBifurcationTags() { 2340 static SimpleProgramPointTag 2341 eagerlyAssumeBinOpBifurcationTrue(TagProviderName, 2342 "Eagerly Assume True"), 2343 eagerlyAssumeBinOpBifurcationFalse(TagProviderName, 2344 "Eagerly Assume False"); 2345 return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue, 2346 &eagerlyAssumeBinOpBifurcationFalse); 2347 } 2348 2349 void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst, 2350 ExplodedNodeSet &Src, 2351 const Expr *Ex) { 2352 StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx); 2353 2354 for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) { 2355 ExplodedNode *Pred = *I; 2356 // Test if the previous node was as the same expression. This can happen 2357 // when the expression fails to evaluate to anything meaningful and 2358 // (as an optimization) we don't generate a node. 2359 ProgramPoint P = Pred->getLocation(); 2360 if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) { 2361 continue; 2362 } 2363 2364 ProgramStateRef state = Pred->getState(); 2365 SVal V = state->getSVal(Ex, Pred->getLocationContext()); 2366 Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>(); 2367 if (SEV && SEV->isExpression()) { 2368 const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags = 2369 geteagerlyAssumeBinOpBifurcationTags(); 2370 2371 ProgramStateRef StateTrue, StateFalse; 2372 std::tie(StateTrue, StateFalse) = state->assume(*SEV); 2373 2374 // First assume that the condition is true. 2375 if (StateTrue) { 2376 SVal Val = svalBuilder.makeIntVal(1U, Ex->getType()); 2377 StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val); 2378 Bldr.generateNode(Ex, Pred, StateTrue, tags.first); 2379 } 2380 2381 // Next, assume that the condition is false. 2382 if (StateFalse) { 2383 SVal Val = svalBuilder.makeIntVal(0U, Ex->getType()); 2384 StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val); 2385 Bldr.generateNode(Ex, Pred, StateFalse, tags.second); 2386 } 2387 } 2388 } 2389 } 2390 2391 void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred, 2392 ExplodedNodeSet &Dst) { 2393 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2394 // We have processed both the inputs and the outputs. All of the outputs 2395 // should evaluate to Locs. Nuke all of their values. 2396 2397 // FIXME: Some day in the future it would be nice to allow a "plug-in" 2398 // which interprets the inline asm and stores proper results in the 2399 // outputs. 2400 2401 ProgramStateRef state = Pred->getState(); 2402 2403 for (const Expr *O : A->outputs()) { 2404 SVal X = state->getSVal(O, Pred->getLocationContext()); 2405 assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef. 2406 2407 if (Optional<Loc> LV = X.getAs<Loc>()) 2408 state = state->bindLoc(*LV, UnknownVal()); 2409 } 2410 2411 Bldr.generateNode(A, Pred, state); 2412 } 2413 2414 void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred, 2415 ExplodedNodeSet &Dst) { 2416 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2417 Bldr.generateNode(A, Pred, Pred->getState()); 2418 } 2419 2420 //===----------------------------------------------------------------------===// 2421 // Visualization. 2422 //===----------------------------------------------------------------------===// 2423 2424 #ifndef NDEBUG 2425 static ExprEngine* GraphPrintCheckerState; 2426 static SourceManager* GraphPrintSourceManager; 2427 2428 namespace llvm { 2429 template<> 2430 struct DOTGraphTraits<ExplodedNode*> : 2431 public DefaultDOTGraphTraits { 2432 2433 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {} 2434 2435 // FIXME: Since we do not cache error nodes in ExprEngine now, this does not 2436 // work. 2437 static std::string getNodeAttributes(const ExplodedNode *N, void*) { 2438 2439 #if 0 2440 // FIXME: Replace with a general scheme to tell if the node is 2441 // an error node. 2442 if (GraphPrintCheckerState->isImplicitNullDeref(N) || 2443 GraphPrintCheckerState->isExplicitNullDeref(N) || 2444 GraphPrintCheckerState->isUndefDeref(N) || 2445 GraphPrintCheckerState->isUndefStore(N) || 2446 GraphPrintCheckerState->isUndefControlFlow(N) || 2447 GraphPrintCheckerState->isUndefResult(N) || 2448 GraphPrintCheckerState->isBadCall(N) || 2449 GraphPrintCheckerState->isUndefArg(N)) 2450 return "color=\"red\",style=\"filled\""; 2451 2452 if (GraphPrintCheckerState->isNoReturnCall(N)) 2453 return "color=\"blue\",style=\"filled\""; 2454 #endif 2455 return ""; 2456 } 2457 2458 static void printLocation(raw_ostream &Out, SourceLocation SLoc) { 2459 if (SLoc.isFileID()) { 2460 Out << "\\lline=" 2461 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2462 << " col=" 2463 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc) 2464 << "\\l"; 2465 } 2466 } 2467 2468 static std::string getNodeLabel(const ExplodedNode *N, void*){ 2469 2470 std::string sbuf; 2471 llvm::raw_string_ostream Out(sbuf); 2472 2473 // Program Location. 2474 ProgramPoint Loc = N->getLocation(); 2475 2476 switch (Loc.getKind()) { 2477 case ProgramPoint::BlockEntranceKind: { 2478 Out << "Block Entrance: B" 2479 << Loc.castAs<BlockEntrance>().getBlock()->getBlockID(); 2480 if (const NamedDecl *ND = 2481 dyn_cast<NamedDecl>(Loc.getLocationContext()->getDecl())) { 2482 Out << " ("; 2483 ND->printName(Out); 2484 Out << ")"; 2485 } 2486 break; 2487 } 2488 2489 case ProgramPoint::BlockExitKind: 2490 assert (false); 2491 break; 2492 2493 case ProgramPoint::CallEnterKind: 2494 Out << "CallEnter"; 2495 break; 2496 2497 case ProgramPoint::CallExitBeginKind: 2498 Out << "CallExitBegin"; 2499 break; 2500 2501 case ProgramPoint::CallExitEndKind: 2502 Out << "CallExitEnd"; 2503 break; 2504 2505 case ProgramPoint::PostStmtPurgeDeadSymbolsKind: 2506 Out << "PostStmtPurgeDeadSymbols"; 2507 break; 2508 2509 case ProgramPoint::PreStmtPurgeDeadSymbolsKind: 2510 Out << "PreStmtPurgeDeadSymbols"; 2511 break; 2512 2513 case ProgramPoint::EpsilonKind: 2514 Out << "Epsilon Point"; 2515 break; 2516 2517 case ProgramPoint::PreImplicitCallKind: { 2518 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2519 Out << "PreCall: "; 2520 2521 // FIXME: Get proper printing options. 2522 PC.getDecl()->print(Out, LangOptions()); 2523 printLocation(Out, PC.getLocation()); 2524 break; 2525 } 2526 2527 case ProgramPoint::PostImplicitCallKind: { 2528 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2529 Out << "PostCall: "; 2530 2531 // FIXME: Get proper printing options. 2532 PC.getDecl()->print(Out, LangOptions()); 2533 printLocation(Out, PC.getLocation()); 2534 break; 2535 } 2536 2537 case ProgramPoint::PostInitializerKind: { 2538 Out << "PostInitializer: "; 2539 const CXXCtorInitializer *Init = 2540 Loc.castAs<PostInitializer>().getInitializer(); 2541 if (const FieldDecl *FD = Init->getAnyMember()) 2542 Out << *FD; 2543 else { 2544 QualType Ty = Init->getTypeSourceInfo()->getType(); 2545 Ty = Ty.getLocalUnqualifiedType(); 2546 LangOptions LO; // FIXME. 2547 Ty.print(Out, LO); 2548 } 2549 break; 2550 } 2551 2552 case ProgramPoint::BlockEdgeKind: { 2553 const BlockEdge &E = Loc.castAs<BlockEdge>(); 2554 Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B" 2555 << E.getDst()->getBlockID() << ')'; 2556 2557 if (const Stmt *T = E.getSrc()->getTerminator()) { 2558 SourceLocation SLoc = T->getLocStart(); 2559 2560 Out << "\\|Terminator: "; 2561 LangOptions LO; // FIXME. 2562 E.getSrc()->printTerminator(Out, LO); 2563 2564 if (SLoc.isFileID()) { 2565 Out << "\\lline=" 2566 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2567 << " col=" 2568 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc); 2569 } 2570 2571 if (isa<SwitchStmt>(T)) { 2572 const Stmt *Label = E.getDst()->getLabel(); 2573 2574 if (Label) { 2575 if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) { 2576 Out << "\\lcase "; 2577 LangOptions LO; // FIXME. 2578 if (C->getLHS()) 2579 C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO)); 2580 2581 if (const Stmt *RHS = C->getRHS()) { 2582 Out << " .. "; 2583 RHS->printPretty(Out, nullptr, PrintingPolicy(LO)); 2584 } 2585 2586 Out << ":"; 2587 } 2588 else { 2589 assert (isa<DefaultStmt>(Label)); 2590 Out << "\\ldefault:"; 2591 } 2592 } 2593 else 2594 Out << "\\l(implicit) default:"; 2595 } 2596 else if (isa<IndirectGotoStmt>(T)) { 2597 // FIXME 2598 } 2599 else { 2600 Out << "\\lCondition: "; 2601 if (*E.getSrc()->succ_begin() == E.getDst()) 2602 Out << "true"; 2603 else 2604 Out << "false"; 2605 } 2606 2607 Out << "\\l"; 2608 } 2609 2610 #if 0 2611 // FIXME: Replace with a general scheme to determine 2612 // the name of the check. 2613 if (GraphPrintCheckerState->isUndefControlFlow(N)) { 2614 Out << "\\|Control-flow based on\\lUndefined value.\\l"; 2615 } 2616 #endif 2617 break; 2618 } 2619 2620 default: { 2621 const Stmt *S = Loc.castAs<StmtPoint>().getStmt(); 2622 assert(S != nullptr && "Expecting non-null Stmt"); 2623 2624 Out << S->getStmtClassName() << ' ' << (const void*) S << ' '; 2625 LangOptions LO; // FIXME. 2626 S->printPretty(Out, nullptr, PrintingPolicy(LO)); 2627 printLocation(Out, S->getLocStart()); 2628 2629 if (Loc.getAs<PreStmt>()) 2630 Out << "\\lPreStmt\\l;"; 2631 else if (Loc.getAs<PostLoad>()) 2632 Out << "\\lPostLoad\\l;"; 2633 else if (Loc.getAs<PostStore>()) 2634 Out << "\\lPostStore\\l"; 2635 else if (Loc.getAs<PostLValue>()) 2636 Out << "\\lPostLValue\\l"; 2637 2638 #if 0 2639 // FIXME: Replace with a general scheme to determine 2640 // the name of the check. 2641 if (GraphPrintCheckerState->isImplicitNullDeref(N)) 2642 Out << "\\|Implicit-Null Dereference.\\l"; 2643 else if (GraphPrintCheckerState->isExplicitNullDeref(N)) 2644 Out << "\\|Explicit-Null Dereference.\\l"; 2645 else if (GraphPrintCheckerState->isUndefDeref(N)) 2646 Out << "\\|Dereference of undefialied value.\\l"; 2647 else if (GraphPrintCheckerState->isUndefStore(N)) 2648 Out << "\\|Store to Undefined Loc."; 2649 else if (GraphPrintCheckerState->isUndefResult(N)) 2650 Out << "\\|Result of operation is undefined."; 2651 else if (GraphPrintCheckerState->isNoReturnCall(N)) 2652 Out << "\\|Call to function marked \"noreturn\"."; 2653 else if (GraphPrintCheckerState->isBadCall(N)) 2654 Out << "\\|Call to NULL/Undefined."; 2655 else if (GraphPrintCheckerState->isUndefArg(N)) 2656 Out << "\\|Argument in call is undefined"; 2657 #endif 2658 2659 break; 2660 } 2661 } 2662 2663 ProgramStateRef state = N->getState(); 2664 Out << "\\|StateID: " << (const void*) state.get() 2665 << " NodeID: " << (const void*) N << "\\|"; 2666 state->printDOT(Out); 2667 2668 Out << "\\l"; 2669 2670 if (const ProgramPointTag *tag = Loc.getTag()) { 2671 Out << "\\|Tag: " << tag->getTagDescription(); 2672 Out << "\\l"; 2673 } 2674 return Out.str(); 2675 } 2676 }; 2677 } // end llvm namespace 2678 #endif 2679 2680 void ExprEngine::ViewGraph(bool trim) { 2681 #ifndef NDEBUG 2682 if (trim) { 2683 std::vector<const ExplodedNode*> Src; 2684 2685 // Flush any outstanding reports to make sure we cover all the nodes. 2686 // This does not cause them to get displayed. 2687 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I) 2688 const_cast<BugType*>(*I)->FlushReports(BR); 2689 2690 // Iterate through the reports and get their nodes. 2691 for (BugReporter::EQClasses_iterator 2692 EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) { 2693 ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode()); 2694 if (N) Src.push_back(N); 2695 } 2696 2697 ViewGraph(Src); 2698 } 2699 else { 2700 GraphPrintCheckerState = this; 2701 GraphPrintSourceManager = &getContext().getSourceManager(); 2702 2703 llvm::ViewGraph(*G.roots_begin(), "ExprEngine"); 2704 2705 GraphPrintCheckerState = nullptr; 2706 GraphPrintSourceManager = nullptr; 2707 } 2708 #endif 2709 } 2710 2711 void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) { 2712 #ifndef NDEBUG 2713 GraphPrintCheckerState = this; 2714 GraphPrintSourceManager = &getContext().getSourceManager(); 2715 2716 std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes)); 2717 2718 if (!TrimmedG.get()) 2719 llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n"; 2720 else 2721 llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine"); 2722 2723 GraphPrintCheckerState = nullptr; 2724 GraphPrintSourceManager = nullptr; 2725 #endif 2726 } 2727