1 //=-- ExprEngine.cpp - Path-Sensitive Expression-Level Dataflow ---*- C++ -*-= 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a meta-engine for path-sensitive dataflow analysis that 11 // is built on GREngine, but provides the boilerplate to execute transfer 12 // functions and build the ExplodedGraph at the expression level. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 17 #include "PrettyStackTraceLocationContext.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/AST/ParentMap.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "clang/AST/StmtObjC.h" 22 #include "clang/Basic/Builtins.h" 23 #include "clang/Basic/PrettyStackTrace.h" 24 #include "clang/Basic/SourceManager.h" 25 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h" 26 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 29 #include "llvm/ADT/ImmutableList.h" 30 #include "llvm/ADT/Statistic.h" 31 #include "llvm/Support/raw_ostream.h" 32 33 #ifndef NDEBUG 34 #include "llvm/Support/GraphWriter.h" 35 #endif 36 37 using namespace clang; 38 using namespace ento; 39 using llvm::APSInt; 40 41 #define DEBUG_TYPE "ExprEngine" 42 43 STATISTIC(NumRemoveDeadBindings, 44 "The # of times RemoveDeadBindings is called"); 45 STATISTIC(NumMaxBlockCountReached, 46 "The # of aborted paths due to reaching the maximum block count in " 47 "a top level function"); 48 STATISTIC(NumMaxBlockCountReachedInInlined, 49 "The # of aborted paths due to reaching the maximum block count in " 50 "an inlined function"); 51 STATISTIC(NumTimesRetriedWithoutInlining, 52 "The # of times we re-evaluated a call without inlining"); 53 54 //===----------------------------------------------------------------------===// 55 // Engine construction and deletion. 56 //===----------------------------------------------------------------------===// 57 58 static const char* TagProviderName = "ExprEngine"; 59 60 ExprEngine::ExprEngine(AnalysisManager &mgr, bool gcEnabled, 61 SetOfConstDecls *VisitedCalleesIn, 62 FunctionSummariesTy *FS, 63 InliningModes HowToInlineIn) 64 : AMgr(mgr), 65 AnalysisDeclContexts(mgr.getAnalysisDeclContextManager()), 66 Engine(*this, FS), 67 G(Engine.getGraph()), 68 StateMgr(getContext(), mgr.getStoreManagerCreator(), 69 mgr.getConstraintManagerCreator(), G.getAllocator(), 70 this), 71 SymMgr(StateMgr.getSymbolManager()), 72 svalBuilder(StateMgr.getSValBuilder()), 73 currStmtIdx(0), currBldrCtx(nullptr), 74 ObjCNoRet(mgr.getASTContext()), 75 ObjCGCEnabled(gcEnabled), BR(mgr, *this), 76 VisitedCallees(VisitedCalleesIn), 77 HowToInline(HowToInlineIn) 78 { 79 unsigned TrimInterval = mgr.options.getGraphTrimInterval(); 80 if (TrimInterval != 0) { 81 // Enable eager node reclaimation when constructing the ExplodedGraph. 82 G.enableNodeReclamation(TrimInterval); 83 } 84 } 85 86 ExprEngine::~ExprEngine() { 87 BR.FlushReports(); 88 } 89 90 //===----------------------------------------------------------------------===// 91 // Utility methods. 92 //===----------------------------------------------------------------------===// 93 94 ProgramStateRef ExprEngine::getInitialState(const LocationContext *InitLoc) { 95 ProgramStateRef state = StateMgr.getInitialState(InitLoc); 96 const Decl *D = InitLoc->getDecl(); 97 98 // Preconditions. 99 // FIXME: It would be nice if we had a more general mechanism to add 100 // such preconditions. Some day. 101 do { 102 103 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 104 // Precondition: the first argument of 'main' is an integer guaranteed 105 // to be > 0. 106 const IdentifierInfo *II = FD->getIdentifier(); 107 if (!II || !(II->getName() == "main" && FD->getNumParams() > 0)) 108 break; 109 110 const ParmVarDecl *PD = FD->getParamDecl(0); 111 QualType T = PD->getType(); 112 const BuiltinType *BT = dyn_cast<BuiltinType>(T); 113 if (!BT || !BT->isInteger()) 114 break; 115 116 const MemRegion *R = state->getRegion(PD, InitLoc); 117 if (!R) 118 break; 119 120 SVal V = state->getSVal(loc::MemRegionVal(R)); 121 SVal Constraint_untested = evalBinOp(state, BO_GT, V, 122 svalBuilder.makeZeroVal(T), 123 svalBuilder.getConditionType()); 124 125 Optional<DefinedOrUnknownSVal> Constraint = 126 Constraint_untested.getAs<DefinedOrUnknownSVal>(); 127 128 if (!Constraint) 129 break; 130 131 if (ProgramStateRef newState = state->assume(*Constraint, true)) 132 state = newState; 133 } 134 break; 135 } 136 while (0); 137 138 if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) { 139 // Precondition: 'self' is always non-null upon entry to an Objective-C 140 // method. 141 const ImplicitParamDecl *SelfD = MD->getSelfDecl(); 142 const MemRegion *R = state->getRegion(SelfD, InitLoc); 143 SVal V = state->getSVal(loc::MemRegionVal(R)); 144 145 if (Optional<Loc> LV = V.getAs<Loc>()) { 146 // Assume that the pointer value in 'self' is non-null. 147 state = state->assume(*LV, true); 148 assert(state && "'self' cannot be null"); 149 } 150 } 151 152 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(D)) { 153 if (!MD->isStatic()) { 154 // Precondition: 'this' is always non-null upon entry to the 155 // top-level function. This is our starting assumption for 156 // analyzing an "open" program. 157 const StackFrameContext *SFC = InitLoc->getCurrentStackFrame(); 158 if (SFC->getParent() == nullptr) { 159 loc::MemRegionVal L = svalBuilder.getCXXThis(MD, SFC); 160 SVal V = state->getSVal(L); 161 if (Optional<Loc> LV = V.getAs<Loc>()) { 162 state = state->assume(*LV, true); 163 assert(state && "'this' cannot be null"); 164 } 165 } 166 } 167 } 168 169 return state; 170 } 171 172 ProgramStateRef 173 ExprEngine::createTemporaryRegionIfNeeded(ProgramStateRef State, 174 const LocationContext *LC, 175 const Expr *Ex, 176 const Expr *Result) { 177 SVal V = State->getSVal(Ex, LC); 178 if (!Result) { 179 // If we don't have an explicit result expression, we're in "if needed" 180 // mode. Only create a region if the current value is a NonLoc. 181 if (!V.getAs<NonLoc>()) 182 return State; 183 Result = Ex; 184 } else { 185 // We need to create a region no matter what. For sanity, make sure we don't 186 // try to stuff a Loc into a non-pointer temporary region. 187 assert(!V.getAs<Loc>() || Loc::isLocType(Result->getType()) || 188 Result->getType()->isMemberPointerType()); 189 } 190 191 ProgramStateManager &StateMgr = State->getStateManager(); 192 MemRegionManager &MRMgr = StateMgr.getRegionManager(); 193 StoreManager &StoreMgr = StateMgr.getStoreManager(); 194 195 // We need to be careful about treating a derived type's value as 196 // bindings for a base type. Unless we're creating a temporary pointer region, 197 // start by stripping and recording base casts. 198 SmallVector<const CastExpr *, 4> Casts; 199 const Expr *Inner = Ex->IgnoreParens(); 200 if (!Loc::isLocType(Result->getType())) { 201 while (const CastExpr *CE = dyn_cast<CastExpr>(Inner)) { 202 if (CE->getCastKind() == CK_DerivedToBase || 203 CE->getCastKind() == CK_UncheckedDerivedToBase) 204 Casts.push_back(CE); 205 else if (CE->getCastKind() != CK_NoOp) 206 break; 207 208 Inner = CE->getSubExpr()->IgnoreParens(); 209 } 210 } 211 212 // Create a temporary object region for the inner expression (which may have 213 // a more derived type) and bind the value into it. 214 const TypedValueRegion *TR = nullptr; 215 if (const MaterializeTemporaryExpr *MT = 216 dyn_cast<MaterializeTemporaryExpr>(Result)) { 217 StorageDuration SD = MT->getStorageDuration(); 218 // If this object is bound to a reference with static storage duration, we 219 // put it in a different region to prevent "address leakage" warnings. 220 if (SD == SD_Static || SD == SD_Thread) 221 TR = MRMgr.getCXXStaticTempObjectRegion(Inner); 222 } 223 if (!TR) 224 TR = MRMgr.getCXXTempObjectRegion(Inner, LC); 225 226 SVal Reg = loc::MemRegionVal(TR); 227 228 if (V.isUnknown()) 229 V = getSValBuilder().conjureSymbolVal(Result, LC, TR->getValueType(), 230 currBldrCtx->blockCount()); 231 State = State->bindLoc(Reg, V); 232 233 // Re-apply the casts (from innermost to outermost) for type sanity. 234 for (SmallVectorImpl<const CastExpr *>::reverse_iterator I = Casts.rbegin(), 235 E = Casts.rend(); 236 I != E; ++I) { 237 Reg = StoreMgr.evalDerivedToBase(Reg, *I); 238 } 239 240 State = State->BindExpr(Result, LC, Reg); 241 return State; 242 } 243 244 //===----------------------------------------------------------------------===// 245 // Top-level transfer function logic (Dispatcher). 246 //===----------------------------------------------------------------------===// 247 248 /// evalAssume - Called by ConstraintManager. Used to call checker-specific 249 /// logic for handling assumptions on symbolic values. 250 ProgramStateRef ExprEngine::processAssume(ProgramStateRef state, 251 SVal cond, bool assumption) { 252 return getCheckerManager().runCheckersForEvalAssume(state, cond, assumption); 253 } 254 255 bool ExprEngine::wantsRegionChangeUpdate(ProgramStateRef state) { 256 return getCheckerManager().wantsRegionChangeUpdate(state); 257 } 258 259 ProgramStateRef 260 ExprEngine::processRegionChanges(ProgramStateRef state, 261 const InvalidatedSymbols *invalidated, 262 ArrayRef<const MemRegion *> Explicits, 263 ArrayRef<const MemRegion *> Regions, 264 const CallEvent *Call) { 265 return getCheckerManager().runCheckersForRegionChanges(state, invalidated, 266 Explicits, Regions, Call); 267 } 268 269 void ExprEngine::printState(raw_ostream &Out, ProgramStateRef State, 270 const char *NL, const char *Sep) { 271 getCheckerManager().runCheckersForPrintState(Out, State, NL, Sep); 272 } 273 274 void ExprEngine::processEndWorklist(bool hasWorkRemaining) { 275 getCheckerManager().runCheckersForEndAnalysis(G, BR, *this); 276 } 277 278 void ExprEngine::processCFGElement(const CFGElement E, ExplodedNode *Pred, 279 unsigned StmtIdx, NodeBuilderContext *Ctx) { 280 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 281 currStmtIdx = StmtIdx; 282 currBldrCtx = Ctx; 283 284 switch (E.getKind()) { 285 case CFGElement::Statement: 286 ProcessStmt(const_cast<Stmt*>(E.castAs<CFGStmt>().getStmt()), Pred); 287 return; 288 case CFGElement::Initializer: 289 ProcessInitializer(E.castAs<CFGInitializer>().getInitializer(), Pred); 290 return; 291 case CFGElement::NewAllocator: 292 ProcessNewAllocator(E.castAs<CFGNewAllocator>().getAllocatorExpr(), 293 Pred); 294 return; 295 case CFGElement::AutomaticObjectDtor: 296 case CFGElement::DeleteDtor: 297 case CFGElement::BaseDtor: 298 case CFGElement::MemberDtor: 299 case CFGElement::TemporaryDtor: 300 ProcessImplicitDtor(E.castAs<CFGImplicitDtor>(), Pred); 301 return; 302 } 303 } 304 305 static bool shouldRemoveDeadBindings(AnalysisManager &AMgr, 306 const CFGStmt S, 307 const ExplodedNode *Pred, 308 const LocationContext *LC) { 309 310 // Are we never purging state values? 311 if (AMgr.options.AnalysisPurgeOpt == PurgeNone) 312 return false; 313 314 // Is this the beginning of a basic block? 315 if (Pred->getLocation().getAs<BlockEntrance>()) 316 return true; 317 318 // Is this on a non-expression? 319 if (!isa<Expr>(S.getStmt())) 320 return true; 321 322 // Run before processing a call. 323 if (CallEvent::isCallStmt(S.getStmt())) 324 return true; 325 326 // Is this an expression that is consumed by another expression? If so, 327 // postpone cleaning out the state. 328 ParentMap &PM = LC->getAnalysisDeclContext()->getParentMap(); 329 return !PM.isConsumedExpr(cast<Expr>(S.getStmt())); 330 } 331 332 void ExprEngine::removeDead(ExplodedNode *Pred, ExplodedNodeSet &Out, 333 const Stmt *ReferenceStmt, 334 const LocationContext *LC, 335 const Stmt *DiagnosticStmt, 336 ProgramPoint::Kind K) { 337 assert((K == ProgramPoint::PreStmtPurgeDeadSymbolsKind || 338 ReferenceStmt == nullptr || isa<ReturnStmt>(ReferenceStmt)) 339 && "PostStmt is not generally supported by the SymbolReaper yet"); 340 assert(LC && "Must pass the current (or expiring) LocationContext"); 341 342 if (!DiagnosticStmt) { 343 DiagnosticStmt = ReferenceStmt; 344 assert(DiagnosticStmt && "Required for clearing a LocationContext"); 345 } 346 347 NumRemoveDeadBindings++; 348 ProgramStateRef CleanedState = Pred->getState(); 349 350 // LC is the location context being destroyed, but SymbolReaper wants a 351 // location context that is still live. (If this is the top-level stack 352 // frame, this will be null.) 353 if (!ReferenceStmt) { 354 assert(K == ProgramPoint::PostStmtPurgeDeadSymbolsKind && 355 "Use PostStmtPurgeDeadSymbolsKind for clearing a LocationContext"); 356 LC = LC->getParent(); 357 } 358 359 const StackFrameContext *SFC = LC ? LC->getCurrentStackFrame() : nullptr; 360 SymbolReaper SymReaper(SFC, ReferenceStmt, SymMgr, getStoreManager()); 361 362 getCheckerManager().runCheckersForLiveSymbols(CleanedState, SymReaper); 363 364 // Create a state in which dead bindings are removed from the environment 365 // and the store. TODO: The function should just return new env and store, 366 // not a new state. 367 CleanedState = StateMgr.removeDeadBindings(CleanedState, SFC, SymReaper); 368 369 // Process any special transfer function for dead symbols. 370 // A tag to track convenience transitions, which can be removed at cleanup. 371 static SimpleProgramPointTag cleanupTag(TagProviderName, "Clean Node"); 372 if (!SymReaper.hasDeadSymbols()) { 373 // Generate a CleanedNode that has the environment and store cleaned 374 // up. Since no symbols are dead, we can optimize and not clean out 375 // the constraint manager. 376 StmtNodeBuilder Bldr(Pred, Out, *currBldrCtx); 377 Bldr.generateNode(DiagnosticStmt, Pred, CleanedState, &cleanupTag, K); 378 379 } else { 380 // Call checkers with the non-cleaned state so that they could query the 381 // values of the soon to be dead symbols. 382 ExplodedNodeSet CheckedSet; 383 getCheckerManager().runCheckersForDeadSymbols(CheckedSet, Pred, SymReaper, 384 DiagnosticStmt, *this, K); 385 386 // For each node in CheckedSet, generate CleanedNodes that have the 387 // environment, the store, and the constraints cleaned up but have the 388 // user-supplied states as the predecessors. 389 StmtNodeBuilder Bldr(CheckedSet, Out, *currBldrCtx); 390 for (ExplodedNodeSet::const_iterator 391 I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { 392 ProgramStateRef CheckerState = (*I)->getState(); 393 394 // The constraint manager has not been cleaned up yet, so clean up now. 395 CheckerState = getConstraintManager().removeDeadBindings(CheckerState, 396 SymReaper); 397 398 assert(StateMgr.haveEqualEnvironments(CheckerState, Pred->getState()) && 399 "Checkers are not allowed to modify the Environment as a part of " 400 "checkDeadSymbols processing."); 401 assert(StateMgr.haveEqualStores(CheckerState, Pred->getState()) && 402 "Checkers are not allowed to modify the Store as a part of " 403 "checkDeadSymbols processing."); 404 405 // Create a state based on CleanedState with CheckerState GDM and 406 // generate a transition to that state. 407 ProgramStateRef CleanedCheckerSt = 408 StateMgr.getPersistentStateWithGDM(CleanedState, CheckerState); 409 Bldr.generateNode(DiagnosticStmt, *I, CleanedCheckerSt, &cleanupTag, K); 410 } 411 } 412 } 413 414 void ExprEngine::ProcessStmt(const CFGStmt S, 415 ExplodedNode *Pred) { 416 // Reclaim any unnecessary nodes in the ExplodedGraph. 417 G.reclaimRecentlyAllocatedNodes(); 418 419 const Stmt *currStmt = S.getStmt(); 420 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 421 currStmt->getLocStart(), 422 "Error evaluating statement"); 423 424 // Remove dead bindings and symbols. 425 ExplodedNodeSet CleanedStates; 426 if (shouldRemoveDeadBindings(AMgr, S, Pred, Pred->getLocationContext())){ 427 removeDead(Pred, CleanedStates, currStmt, Pred->getLocationContext()); 428 } else 429 CleanedStates.Add(Pred); 430 431 // Visit the statement. 432 ExplodedNodeSet Dst; 433 for (ExplodedNodeSet::iterator I = CleanedStates.begin(), 434 E = CleanedStates.end(); I != E; ++I) { 435 ExplodedNodeSet DstI; 436 // Visit the statement. 437 Visit(currStmt, *I, DstI); 438 Dst.insert(DstI); 439 } 440 441 // Enqueue the new nodes onto the work list. 442 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 443 } 444 445 void ExprEngine::ProcessInitializer(const CFGInitializer Init, 446 ExplodedNode *Pred) { 447 const CXXCtorInitializer *BMI = Init.getInitializer(); 448 449 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 450 BMI->getSourceLocation(), 451 "Error evaluating initializer"); 452 453 // We don't clean up dead bindings here. 454 const StackFrameContext *stackFrame = 455 cast<StackFrameContext>(Pred->getLocationContext()); 456 const CXXConstructorDecl *decl = 457 cast<CXXConstructorDecl>(stackFrame->getDecl()); 458 459 ProgramStateRef State = Pred->getState(); 460 SVal thisVal = State->getSVal(svalBuilder.getCXXThis(decl, stackFrame)); 461 462 ExplodedNodeSet Tmp(Pred); 463 SVal FieldLoc; 464 465 // Evaluate the initializer, if necessary 466 if (BMI->isAnyMemberInitializer()) { 467 // Constructors build the object directly in the field, 468 // but non-objects must be copied in from the initializer. 469 const Expr *Init = BMI->getInit()->IgnoreImplicit(); 470 if (!isa<CXXConstructExpr>(Init)) { 471 const ValueDecl *Field; 472 if (BMI->isIndirectMemberInitializer()) { 473 Field = BMI->getIndirectMember(); 474 FieldLoc = State->getLValue(BMI->getIndirectMember(), thisVal); 475 } else { 476 Field = BMI->getMember(); 477 FieldLoc = State->getLValue(BMI->getMember(), thisVal); 478 } 479 480 SVal InitVal; 481 if (BMI->getNumArrayIndices() > 0) { 482 // Handle arrays of trivial type. We can represent this with a 483 // primitive load/copy from the base array region. 484 const ArraySubscriptExpr *ASE; 485 while ((ASE = dyn_cast<ArraySubscriptExpr>(Init))) 486 Init = ASE->getBase()->IgnoreImplicit(); 487 488 SVal LValue = State->getSVal(Init, stackFrame); 489 if (Optional<Loc> LValueLoc = LValue.getAs<Loc>()) 490 InitVal = State->getSVal(*LValueLoc); 491 492 // If we fail to get the value for some reason, use a symbolic value. 493 if (InitVal.isUnknownOrUndef()) { 494 SValBuilder &SVB = getSValBuilder(); 495 InitVal = SVB.conjureSymbolVal(BMI->getInit(), stackFrame, 496 Field->getType(), 497 currBldrCtx->blockCount()); 498 } 499 } else { 500 InitVal = State->getSVal(BMI->getInit(), stackFrame); 501 } 502 503 assert(Tmp.size() == 1 && "have not generated any new nodes yet"); 504 assert(*Tmp.begin() == Pred && "have not generated any new nodes yet"); 505 Tmp.clear(); 506 507 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 508 evalBind(Tmp, Init, Pred, FieldLoc, InitVal, /*isInit=*/true, &PP); 509 } 510 } else { 511 assert(BMI->isBaseInitializer() || BMI->isDelegatingInitializer()); 512 // We already did all the work when visiting the CXXConstructExpr. 513 } 514 515 // Construct PostInitializer nodes whether the state changed or not, 516 // so that the diagnostics don't get confused. 517 PostInitializer PP(BMI, FieldLoc.getAsRegion(), stackFrame); 518 ExplodedNodeSet Dst; 519 NodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 520 for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) { 521 ExplodedNode *N = *I; 522 Bldr.generateNode(PP, N->getState(), N); 523 } 524 525 // Enqueue the new nodes onto the work list. 526 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 527 } 528 529 void ExprEngine::ProcessImplicitDtor(const CFGImplicitDtor D, 530 ExplodedNode *Pred) { 531 ExplodedNodeSet Dst; 532 switch (D.getKind()) { 533 case CFGElement::AutomaticObjectDtor: 534 ProcessAutomaticObjDtor(D.castAs<CFGAutomaticObjDtor>(), Pred, Dst); 535 break; 536 case CFGElement::BaseDtor: 537 ProcessBaseDtor(D.castAs<CFGBaseDtor>(), Pred, Dst); 538 break; 539 case CFGElement::MemberDtor: 540 ProcessMemberDtor(D.castAs<CFGMemberDtor>(), Pred, Dst); 541 break; 542 case CFGElement::TemporaryDtor: 543 ProcessTemporaryDtor(D.castAs<CFGTemporaryDtor>(), Pred, Dst); 544 break; 545 case CFGElement::DeleteDtor: 546 ProcessDeleteDtor(D.castAs<CFGDeleteDtor>(), Pred, Dst); 547 break; 548 default: 549 llvm_unreachable("Unexpected dtor kind."); 550 } 551 552 // Enqueue the new nodes onto the work list. 553 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 554 } 555 556 void ExprEngine::ProcessNewAllocator(const CXXNewExpr *NE, 557 ExplodedNode *Pred) { 558 ExplodedNodeSet Dst; 559 AnalysisManager &AMgr = getAnalysisManager(); 560 AnalyzerOptions &Opts = AMgr.options; 561 // TODO: We're not evaluating allocators for all cases just yet as 562 // we're not handling the return value correctly, which causes false 563 // positives when the alpha.cplusplus.NewDeleteLeaks check is on. 564 if (Opts.mayInlineCXXAllocator()) 565 VisitCXXNewAllocatorCall(NE, Pred, Dst); 566 else { 567 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 568 const LocationContext *LCtx = Pred->getLocationContext(); 569 PostImplicitCall PP(NE->getOperatorNew(), NE->getLocStart(), LCtx); 570 Bldr.generateNode(PP, Pred->getState(), Pred); 571 } 572 Engine.enqueue(Dst, currBldrCtx->getBlock(), currStmtIdx); 573 } 574 575 void ExprEngine::ProcessAutomaticObjDtor(const CFGAutomaticObjDtor Dtor, 576 ExplodedNode *Pred, 577 ExplodedNodeSet &Dst) { 578 const VarDecl *varDecl = Dtor.getVarDecl(); 579 QualType varType = varDecl->getType(); 580 581 ProgramStateRef state = Pred->getState(); 582 SVal dest = state->getLValue(varDecl, Pred->getLocationContext()); 583 const MemRegion *Region = dest.castAs<loc::MemRegionVal>().getRegion(); 584 585 if (const ReferenceType *refType = varType->getAs<ReferenceType>()) { 586 varType = refType->getPointeeType(); 587 Region = state->getSVal(Region).getAsRegion(); 588 } 589 590 VisitCXXDestructor(varType, Region, Dtor.getTriggerStmt(), /*IsBase=*/ false, 591 Pred, Dst); 592 } 593 594 void ExprEngine::ProcessDeleteDtor(const CFGDeleteDtor Dtor, 595 ExplodedNode *Pred, 596 ExplodedNodeSet &Dst) { 597 ProgramStateRef State = Pred->getState(); 598 const LocationContext *LCtx = Pred->getLocationContext(); 599 const CXXDeleteExpr *DE = Dtor.getDeleteExpr(); 600 const Stmt *Arg = DE->getArgument(); 601 SVal ArgVal = State->getSVal(Arg, LCtx); 602 603 // If the argument to delete is known to be a null value, 604 // don't run destructor. 605 if (State->isNull(ArgVal).isConstrainedTrue()) { 606 QualType DTy = DE->getDestroyedType(); 607 QualType BTy = getContext().getBaseElementType(DTy); 608 const CXXRecordDecl *RD = BTy->getAsCXXRecordDecl(); 609 const CXXDestructorDecl *Dtor = RD->getDestructor(); 610 611 PostImplicitCall PP(Dtor, DE->getLocStart(), LCtx); 612 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 613 Bldr.generateNode(PP, Pred->getState(), Pred); 614 return; 615 } 616 617 VisitCXXDestructor(DE->getDestroyedType(), 618 ArgVal.getAsRegion(), 619 DE, /*IsBase=*/ false, 620 Pred, Dst); 621 } 622 623 void ExprEngine::ProcessBaseDtor(const CFGBaseDtor D, 624 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 625 const LocationContext *LCtx = Pred->getLocationContext(); 626 627 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 628 Loc ThisPtr = getSValBuilder().getCXXThis(CurDtor, 629 LCtx->getCurrentStackFrame()); 630 SVal ThisVal = Pred->getState()->getSVal(ThisPtr); 631 632 // Create the base object region. 633 const CXXBaseSpecifier *Base = D.getBaseSpecifier(); 634 QualType BaseTy = Base->getType(); 635 SVal BaseVal = getStoreManager().evalDerivedToBase(ThisVal, BaseTy, 636 Base->isVirtual()); 637 638 VisitCXXDestructor(BaseTy, BaseVal.castAs<loc::MemRegionVal>().getRegion(), 639 CurDtor->getBody(), /*IsBase=*/ true, Pred, Dst); 640 } 641 642 void ExprEngine::ProcessMemberDtor(const CFGMemberDtor D, 643 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 644 const FieldDecl *Member = D.getFieldDecl(); 645 ProgramStateRef State = Pred->getState(); 646 const LocationContext *LCtx = Pred->getLocationContext(); 647 648 const CXXDestructorDecl *CurDtor = cast<CXXDestructorDecl>(LCtx->getDecl()); 649 Loc ThisVal = getSValBuilder().getCXXThis(CurDtor, 650 LCtx->getCurrentStackFrame()); 651 SVal FieldVal = 652 State->getLValue(Member, State->getSVal(ThisVal).castAs<Loc>()); 653 654 VisitCXXDestructor(Member->getType(), 655 FieldVal.castAs<loc::MemRegionVal>().getRegion(), 656 CurDtor->getBody(), /*IsBase=*/false, Pred, Dst); 657 } 658 659 void ExprEngine::ProcessTemporaryDtor(const CFGTemporaryDtor D, 660 ExplodedNode *Pred, 661 ExplodedNodeSet &Dst) { 662 663 QualType varType = D.getBindTemporaryExpr()->getSubExpr()->getType(); 664 665 // FIXME: Inlining of temporary destructors is not supported yet anyway, so we 666 // just put a NULL region for now. This will need to be changed later. 667 VisitCXXDestructor(varType, nullptr, D.getBindTemporaryExpr(), 668 /*IsBase=*/ false, Pred, Dst); 669 } 670 671 void ExprEngine::Visit(const Stmt *S, ExplodedNode *Pred, 672 ExplodedNodeSet &DstTop) { 673 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 674 S->getLocStart(), 675 "Error evaluating statement"); 676 ExplodedNodeSet Dst; 677 StmtNodeBuilder Bldr(Pred, DstTop, *currBldrCtx); 678 679 assert(!isa<Expr>(S) || S == cast<Expr>(S)->IgnoreParens()); 680 681 switch (S->getStmtClass()) { 682 // C++ and ARC stuff we don't support yet. 683 case Expr::ObjCIndirectCopyRestoreExprClass: 684 case Stmt::CXXDependentScopeMemberExprClass: 685 case Stmt::CXXTryStmtClass: 686 case Stmt::CXXTypeidExprClass: 687 case Stmt::CXXUuidofExprClass: 688 case Stmt::MSPropertyRefExprClass: 689 case Stmt::CXXUnresolvedConstructExprClass: 690 case Stmt::DependentScopeDeclRefExprClass: 691 case Stmt::TypeTraitExprClass: 692 case Stmt::ArrayTypeTraitExprClass: 693 case Stmt::ExpressionTraitExprClass: 694 case Stmt::UnresolvedLookupExprClass: 695 case Stmt::UnresolvedMemberExprClass: 696 case Stmt::CXXNoexceptExprClass: 697 case Stmt::PackExpansionExprClass: 698 case Stmt::SubstNonTypeTemplateParmPackExprClass: 699 case Stmt::FunctionParmPackExprClass: 700 case Stmt::SEHTryStmtClass: 701 case Stmt::SEHExceptStmtClass: 702 case Stmt::SEHLeaveStmtClass: 703 case Stmt::LambdaExprClass: 704 case Stmt::SEHFinallyStmtClass: { 705 const ExplodedNode *node = Bldr.generateSink(S, Pred, Pred->getState()); 706 Engine.addAbortedBlock(node, currBldrCtx->getBlock()); 707 break; 708 } 709 710 case Stmt::ParenExprClass: 711 llvm_unreachable("ParenExprs already handled."); 712 case Stmt::GenericSelectionExprClass: 713 llvm_unreachable("GenericSelectionExprs already handled."); 714 // Cases that should never be evaluated simply because they shouldn't 715 // appear in the CFG. 716 case Stmt::BreakStmtClass: 717 case Stmt::CaseStmtClass: 718 case Stmt::CompoundStmtClass: 719 case Stmt::ContinueStmtClass: 720 case Stmt::CXXForRangeStmtClass: 721 case Stmt::DefaultStmtClass: 722 case Stmt::DoStmtClass: 723 case Stmt::ForStmtClass: 724 case Stmt::GotoStmtClass: 725 case Stmt::IfStmtClass: 726 case Stmt::IndirectGotoStmtClass: 727 case Stmt::LabelStmtClass: 728 case Stmt::NoStmtClass: 729 case Stmt::NullStmtClass: 730 case Stmt::SwitchStmtClass: 731 case Stmt::WhileStmtClass: 732 case Expr::MSDependentExistsStmtClass: 733 case Stmt::CapturedStmtClass: 734 case Stmt::OMPParallelDirectiveClass: 735 case Stmt::OMPSimdDirectiveClass: 736 case Stmt::OMPForDirectiveClass: 737 case Stmt::OMPSectionsDirectiveClass: 738 case Stmt::OMPSectionDirectiveClass: 739 case Stmt::OMPSingleDirectiveClass: 740 case Stmt::OMPParallelForDirectiveClass: 741 case Stmt::OMPParallelSectionsDirectiveClass: 742 case Stmt::OMPTaskDirectiveClass: 743 llvm_unreachable("Stmt should not be in analyzer evaluation loop"); 744 745 case Stmt::ObjCSubscriptRefExprClass: 746 case Stmt::ObjCPropertyRefExprClass: 747 llvm_unreachable("These are handled by PseudoObjectExpr"); 748 749 case Stmt::GNUNullExprClass: { 750 // GNU __null is a pointer-width integer, not an actual pointer. 751 ProgramStateRef state = Pred->getState(); 752 state = state->BindExpr(S, Pred->getLocationContext(), 753 svalBuilder.makeIntValWithPtrWidth(0, false)); 754 Bldr.generateNode(S, Pred, state); 755 break; 756 } 757 758 case Stmt::ObjCAtSynchronizedStmtClass: 759 Bldr.takeNodes(Pred); 760 VisitObjCAtSynchronizedStmt(cast<ObjCAtSynchronizedStmt>(S), Pred, Dst); 761 Bldr.addNodes(Dst); 762 break; 763 764 case Stmt::ExprWithCleanupsClass: 765 // Handled due to fully linearised CFG. 766 break; 767 768 // Cases not handled yet; but will handle some day. 769 case Stmt::DesignatedInitExprClass: 770 case Stmt::ExtVectorElementExprClass: 771 case Stmt::ImaginaryLiteralClass: 772 case Stmt::ObjCAtCatchStmtClass: 773 case Stmt::ObjCAtFinallyStmtClass: 774 case Stmt::ObjCAtTryStmtClass: 775 case Stmt::ObjCAutoreleasePoolStmtClass: 776 case Stmt::ObjCEncodeExprClass: 777 case Stmt::ObjCIsaExprClass: 778 case Stmt::ObjCProtocolExprClass: 779 case Stmt::ObjCSelectorExprClass: 780 case Stmt::ParenListExprClass: 781 case Stmt::PredefinedExprClass: 782 case Stmt::ShuffleVectorExprClass: 783 case Stmt::ConvertVectorExprClass: 784 case Stmt::VAArgExprClass: 785 case Stmt::CUDAKernelCallExprClass: 786 case Stmt::OpaqueValueExprClass: 787 case Stmt::AsTypeExprClass: 788 case Stmt::AtomicExprClass: 789 // Fall through. 790 791 // Cases we intentionally don't evaluate, since they don't need 792 // to be explicitly evaluated. 793 case Stmt::AddrLabelExprClass: 794 case Stmt::AttributedStmtClass: 795 case Stmt::IntegerLiteralClass: 796 case Stmt::CharacterLiteralClass: 797 case Stmt::ImplicitValueInitExprClass: 798 case Stmt::CXXScalarValueInitExprClass: 799 case Stmt::CXXBoolLiteralExprClass: 800 case Stmt::ObjCBoolLiteralExprClass: 801 case Stmt::FloatingLiteralClass: 802 case Stmt::SizeOfPackExprClass: 803 case Stmt::StringLiteralClass: 804 case Stmt::ObjCStringLiteralClass: 805 case Stmt::CXXBindTemporaryExprClass: 806 case Stmt::CXXPseudoDestructorExprClass: 807 case Stmt::SubstNonTypeTemplateParmExprClass: 808 case Stmt::CXXNullPtrLiteralExprClass: { 809 Bldr.takeNodes(Pred); 810 ExplodedNodeSet preVisit; 811 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 812 getCheckerManager().runCheckersForPostStmt(Dst, preVisit, S, *this); 813 Bldr.addNodes(Dst); 814 break; 815 } 816 817 case Stmt::CXXDefaultArgExprClass: 818 case Stmt::CXXDefaultInitExprClass: { 819 Bldr.takeNodes(Pred); 820 ExplodedNodeSet PreVisit; 821 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 822 823 ExplodedNodeSet Tmp; 824 StmtNodeBuilder Bldr2(PreVisit, Tmp, *currBldrCtx); 825 826 const Expr *ArgE; 827 if (const CXXDefaultArgExpr *DefE = dyn_cast<CXXDefaultArgExpr>(S)) 828 ArgE = DefE->getExpr(); 829 else if (const CXXDefaultInitExpr *DefE = dyn_cast<CXXDefaultInitExpr>(S)) 830 ArgE = DefE->getExpr(); 831 else 832 llvm_unreachable("unknown constant wrapper kind"); 833 834 bool IsTemporary = false; 835 if (const MaterializeTemporaryExpr *MTE = 836 dyn_cast<MaterializeTemporaryExpr>(ArgE)) { 837 ArgE = MTE->GetTemporaryExpr(); 838 IsTemporary = true; 839 } 840 841 Optional<SVal> ConstantVal = svalBuilder.getConstantVal(ArgE); 842 if (!ConstantVal) 843 ConstantVal = UnknownVal(); 844 845 const LocationContext *LCtx = Pred->getLocationContext(); 846 for (ExplodedNodeSet::iterator I = PreVisit.begin(), E = PreVisit.end(); 847 I != E; ++I) { 848 ProgramStateRef State = (*I)->getState(); 849 State = State->BindExpr(S, LCtx, *ConstantVal); 850 if (IsTemporary) 851 State = createTemporaryRegionIfNeeded(State, LCtx, 852 cast<Expr>(S), 853 cast<Expr>(S)); 854 Bldr2.generateNode(S, *I, State); 855 } 856 857 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 858 Bldr.addNodes(Dst); 859 break; 860 } 861 862 // Cases we evaluate as opaque expressions, conjuring a symbol. 863 case Stmt::CXXStdInitializerListExprClass: 864 case Expr::ObjCArrayLiteralClass: 865 case Expr::ObjCDictionaryLiteralClass: 866 case Expr::ObjCBoxedExprClass: { 867 Bldr.takeNodes(Pred); 868 869 ExplodedNodeSet preVisit; 870 getCheckerManager().runCheckersForPreStmt(preVisit, Pred, S, *this); 871 872 ExplodedNodeSet Tmp; 873 StmtNodeBuilder Bldr2(preVisit, Tmp, *currBldrCtx); 874 875 const Expr *Ex = cast<Expr>(S); 876 QualType resultType = Ex->getType(); 877 878 for (ExplodedNodeSet::iterator it = preVisit.begin(), et = preVisit.end(); 879 it != et; ++it) { 880 ExplodedNode *N = *it; 881 const LocationContext *LCtx = N->getLocationContext(); 882 SVal result = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx, 883 resultType, 884 currBldrCtx->blockCount()); 885 ProgramStateRef state = N->getState()->BindExpr(Ex, LCtx, result); 886 Bldr2.generateNode(S, N, state); 887 } 888 889 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, S, *this); 890 Bldr.addNodes(Dst); 891 break; 892 } 893 894 case Stmt::ArraySubscriptExprClass: 895 Bldr.takeNodes(Pred); 896 VisitLvalArraySubscriptExpr(cast<ArraySubscriptExpr>(S), Pred, Dst); 897 Bldr.addNodes(Dst); 898 break; 899 900 case Stmt::GCCAsmStmtClass: 901 Bldr.takeNodes(Pred); 902 VisitGCCAsmStmt(cast<GCCAsmStmt>(S), Pred, Dst); 903 Bldr.addNodes(Dst); 904 break; 905 906 case Stmt::MSAsmStmtClass: 907 Bldr.takeNodes(Pred); 908 VisitMSAsmStmt(cast<MSAsmStmt>(S), Pred, Dst); 909 Bldr.addNodes(Dst); 910 break; 911 912 case Stmt::BlockExprClass: 913 Bldr.takeNodes(Pred); 914 VisitBlockExpr(cast<BlockExpr>(S), Pred, Dst); 915 Bldr.addNodes(Dst); 916 break; 917 918 case Stmt::BinaryOperatorClass: { 919 const BinaryOperator* B = cast<BinaryOperator>(S); 920 if (B->isLogicalOp()) { 921 Bldr.takeNodes(Pred); 922 VisitLogicalExpr(B, Pred, Dst); 923 Bldr.addNodes(Dst); 924 break; 925 } 926 else if (B->getOpcode() == BO_Comma) { 927 ProgramStateRef state = Pred->getState(); 928 Bldr.generateNode(B, Pred, 929 state->BindExpr(B, Pred->getLocationContext(), 930 state->getSVal(B->getRHS(), 931 Pred->getLocationContext()))); 932 break; 933 } 934 935 Bldr.takeNodes(Pred); 936 937 if (AMgr.options.eagerlyAssumeBinOpBifurcation && 938 (B->isRelationalOp() || B->isEqualityOp())) { 939 ExplodedNodeSet Tmp; 940 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Tmp); 941 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, cast<Expr>(S)); 942 } 943 else 944 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 945 946 Bldr.addNodes(Dst); 947 break; 948 } 949 950 case Stmt::CXXOperatorCallExprClass: { 951 const CXXOperatorCallExpr *OCE = cast<CXXOperatorCallExpr>(S); 952 953 // For instance method operators, make sure the 'this' argument has a 954 // valid region. 955 const Decl *Callee = OCE->getCalleeDecl(); 956 if (const CXXMethodDecl *MD = dyn_cast_or_null<CXXMethodDecl>(Callee)) { 957 if (MD->isInstance()) { 958 ProgramStateRef State = Pred->getState(); 959 const LocationContext *LCtx = Pred->getLocationContext(); 960 ProgramStateRef NewState = 961 createTemporaryRegionIfNeeded(State, LCtx, OCE->getArg(0)); 962 if (NewState != State) { 963 Pred = Bldr.generateNode(OCE, Pred, NewState, /*Tag=*/nullptr, 964 ProgramPoint::PreStmtKind); 965 // Did we cache out? 966 if (!Pred) 967 break; 968 } 969 } 970 } 971 // FALLTHROUGH 972 } 973 case Stmt::CallExprClass: 974 case Stmt::CXXMemberCallExprClass: 975 case Stmt::UserDefinedLiteralClass: { 976 Bldr.takeNodes(Pred); 977 VisitCallExpr(cast<CallExpr>(S), Pred, Dst); 978 Bldr.addNodes(Dst); 979 break; 980 } 981 982 case Stmt::CXXCatchStmtClass: { 983 Bldr.takeNodes(Pred); 984 VisitCXXCatchStmt(cast<CXXCatchStmt>(S), Pred, Dst); 985 Bldr.addNodes(Dst); 986 break; 987 } 988 989 case Stmt::CXXTemporaryObjectExprClass: 990 case Stmt::CXXConstructExprClass: { 991 Bldr.takeNodes(Pred); 992 VisitCXXConstructExpr(cast<CXXConstructExpr>(S), Pred, Dst); 993 Bldr.addNodes(Dst); 994 break; 995 } 996 997 case Stmt::CXXNewExprClass: { 998 Bldr.takeNodes(Pred); 999 ExplodedNodeSet PostVisit; 1000 VisitCXXNewExpr(cast<CXXNewExpr>(S), Pred, PostVisit); 1001 getCheckerManager().runCheckersForPostStmt(Dst, PostVisit, S, *this); 1002 Bldr.addNodes(Dst); 1003 break; 1004 } 1005 1006 case Stmt::CXXDeleteExprClass: { 1007 Bldr.takeNodes(Pred); 1008 ExplodedNodeSet PreVisit; 1009 const CXXDeleteExpr *CDE = cast<CXXDeleteExpr>(S); 1010 getCheckerManager().runCheckersForPreStmt(PreVisit, Pred, S, *this); 1011 1012 for (ExplodedNodeSet::iterator i = PreVisit.begin(), 1013 e = PreVisit.end(); i != e ; ++i) 1014 VisitCXXDeleteExpr(CDE, *i, Dst); 1015 1016 Bldr.addNodes(Dst); 1017 break; 1018 } 1019 // FIXME: ChooseExpr is really a constant. We need to fix 1020 // the CFG do not model them as explicit control-flow. 1021 1022 case Stmt::ChooseExprClass: { // __builtin_choose_expr 1023 Bldr.takeNodes(Pred); 1024 const ChooseExpr *C = cast<ChooseExpr>(S); 1025 VisitGuardedExpr(C, C->getLHS(), C->getRHS(), Pred, Dst); 1026 Bldr.addNodes(Dst); 1027 break; 1028 } 1029 1030 case Stmt::CompoundAssignOperatorClass: 1031 Bldr.takeNodes(Pred); 1032 VisitBinaryOperator(cast<BinaryOperator>(S), Pred, Dst); 1033 Bldr.addNodes(Dst); 1034 break; 1035 1036 case Stmt::CompoundLiteralExprClass: 1037 Bldr.takeNodes(Pred); 1038 VisitCompoundLiteralExpr(cast<CompoundLiteralExpr>(S), Pred, Dst); 1039 Bldr.addNodes(Dst); 1040 break; 1041 1042 case Stmt::BinaryConditionalOperatorClass: 1043 case Stmt::ConditionalOperatorClass: { // '?' operator 1044 Bldr.takeNodes(Pred); 1045 const AbstractConditionalOperator *C 1046 = cast<AbstractConditionalOperator>(S); 1047 VisitGuardedExpr(C, C->getTrueExpr(), C->getFalseExpr(), Pred, Dst); 1048 Bldr.addNodes(Dst); 1049 break; 1050 } 1051 1052 case Stmt::CXXThisExprClass: 1053 Bldr.takeNodes(Pred); 1054 VisitCXXThisExpr(cast<CXXThisExpr>(S), Pred, Dst); 1055 Bldr.addNodes(Dst); 1056 break; 1057 1058 case Stmt::DeclRefExprClass: { 1059 Bldr.takeNodes(Pred); 1060 const DeclRefExpr *DE = cast<DeclRefExpr>(S); 1061 VisitCommonDeclRefExpr(DE, DE->getDecl(), Pred, Dst); 1062 Bldr.addNodes(Dst); 1063 break; 1064 } 1065 1066 case Stmt::DeclStmtClass: 1067 Bldr.takeNodes(Pred); 1068 VisitDeclStmt(cast<DeclStmt>(S), Pred, Dst); 1069 Bldr.addNodes(Dst); 1070 break; 1071 1072 case Stmt::ImplicitCastExprClass: 1073 case Stmt::CStyleCastExprClass: 1074 case Stmt::CXXStaticCastExprClass: 1075 case Stmt::CXXDynamicCastExprClass: 1076 case Stmt::CXXReinterpretCastExprClass: 1077 case Stmt::CXXConstCastExprClass: 1078 case Stmt::CXXFunctionalCastExprClass: 1079 case Stmt::ObjCBridgedCastExprClass: { 1080 Bldr.takeNodes(Pred); 1081 const CastExpr *C = cast<CastExpr>(S); 1082 // Handle the previsit checks. 1083 ExplodedNodeSet dstPrevisit; 1084 getCheckerManager().runCheckersForPreStmt(dstPrevisit, Pred, C, *this); 1085 1086 // Handle the expression itself. 1087 ExplodedNodeSet dstExpr; 1088 for (ExplodedNodeSet::iterator i = dstPrevisit.begin(), 1089 e = dstPrevisit.end(); i != e ; ++i) { 1090 VisitCast(C, C->getSubExpr(), *i, dstExpr); 1091 } 1092 1093 // Handle the postvisit checks. 1094 getCheckerManager().runCheckersForPostStmt(Dst, dstExpr, C, *this); 1095 Bldr.addNodes(Dst); 1096 break; 1097 } 1098 1099 case Expr::MaterializeTemporaryExprClass: { 1100 Bldr.takeNodes(Pred); 1101 const MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S); 1102 CreateCXXTemporaryObject(MTE, Pred, Dst); 1103 Bldr.addNodes(Dst); 1104 break; 1105 } 1106 1107 case Stmt::InitListExprClass: 1108 Bldr.takeNodes(Pred); 1109 VisitInitListExpr(cast<InitListExpr>(S), Pred, Dst); 1110 Bldr.addNodes(Dst); 1111 break; 1112 1113 case Stmt::MemberExprClass: 1114 Bldr.takeNodes(Pred); 1115 VisitMemberExpr(cast<MemberExpr>(S), Pred, Dst); 1116 Bldr.addNodes(Dst); 1117 break; 1118 1119 case Stmt::ObjCIvarRefExprClass: 1120 Bldr.takeNodes(Pred); 1121 VisitLvalObjCIvarRefExpr(cast<ObjCIvarRefExpr>(S), Pred, Dst); 1122 Bldr.addNodes(Dst); 1123 break; 1124 1125 case Stmt::ObjCForCollectionStmtClass: 1126 Bldr.takeNodes(Pred); 1127 VisitObjCForCollectionStmt(cast<ObjCForCollectionStmt>(S), Pred, Dst); 1128 Bldr.addNodes(Dst); 1129 break; 1130 1131 case Stmt::ObjCMessageExprClass: 1132 Bldr.takeNodes(Pred); 1133 VisitObjCMessage(cast<ObjCMessageExpr>(S), Pred, Dst); 1134 Bldr.addNodes(Dst); 1135 break; 1136 1137 case Stmt::ObjCAtThrowStmtClass: 1138 case Stmt::CXXThrowExprClass: 1139 // FIXME: This is not complete. We basically treat @throw as 1140 // an abort. 1141 Bldr.generateSink(S, Pred, Pred->getState()); 1142 break; 1143 1144 case Stmt::ReturnStmtClass: 1145 Bldr.takeNodes(Pred); 1146 VisitReturnStmt(cast<ReturnStmt>(S), Pred, Dst); 1147 Bldr.addNodes(Dst); 1148 break; 1149 1150 case Stmt::OffsetOfExprClass: 1151 Bldr.takeNodes(Pred); 1152 VisitOffsetOfExpr(cast<OffsetOfExpr>(S), Pred, Dst); 1153 Bldr.addNodes(Dst); 1154 break; 1155 1156 case Stmt::UnaryExprOrTypeTraitExprClass: 1157 Bldr.takeNodes(Pred); 1158 VisitUnaryExprOrTypeTraitExpr(cast<UnaryExprOrTypeTraitExpr>(S), 1159 Pred, Dst); 1160 Bldr.addNodes(Dst); 1161 break; 1162 1163 case Stmt::StmtExprClass: { 1164 const StmtExpr *SE = cast<StmtExpr>(S); 1165 1166 if (SE->getSubStmt()->body_empty()) { 1167 // Empty statement expression. 1168 assert(SE->getType() == getContext().VoidTy 1169 && "Empty statement expression must have void type."); 1170 break; 1171 } 1172 1173 if (Expr *LastExpr = dyn_cast<Expr>(*SE->getSubStmt()->body_rbegin())) { 1174 ProgramStateRef state = Pred->getState(); 1175 Bldr.generateNode(SE, Pred, 1176 state->BindExpr(SE, Pred->getLocationContext(), 1177 state->getSVal(LastExpr, 1178 Pred->getLocationContext()))); 1179 } 1180 break; 1181 } 1182 1183 case Stmt::UnaryOperatorClass: { 1184 Bldr.takeNodes(Pred); 1185 const UnaryOperator *U = cast<UnaryOperator>(S); 1186 if (AMgr.options.eagerlyAssumeBinOpBifurcation && (U->getOpcode() == UO_LNot)) { 1187 ExplodedNodeSet Tmp; 1188 VisitUnaryOperator(U, Pred, Tmp); 1189 evalEagerlyAssumeBinOpBifurcation(Dst, Tmp, U); 1190 } 1191 else 1192 VisitUnaryOperator(U, Pred, Dst); 1193 Bldr.addNodes(Dst); 1194 break; 1195 } 1196 1197 case Stmt::PseudoObjectExprClass: { 1198 Bldr.takeNodes(Pred); 1199 ProgramStateRef state = Pred->getState(); 1200 const PseudoObjectExpr *PE = cast<PseudoObjectExpr>(S); 1201 if (const Expr *Result = PE->getResultExpr()) { 1202 SVal V = state->getSVal(Result, Pred->getLocationContext()); 1203 Bldr.generateNode(S, Pred, 1204 state->BindExpr(S, Pred->getLocationContext(), V)); 1205 } 1206 else 1207 Bldr.generateNode(S, Pred, 1208 state->BindExpr(S, Pred->getLocationContext(), 1209 UnknownVal())); 1210 1211 Bldr.addNodes(Dst); 1212 break; 1213 } 1214 } 1215 } 1216 1217 bool ExprEngine::replayWithoutInlining(ExplodedNode *N, 1218 const LocationContext *CalleeLC) { 1219 const StackFrameContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1220 const StackFrameContext *CallerSF = CalleeSF->getParent()->getCurrentStackFrame(); 1221 assert(CalleeSF && CallerSF); 1222 ExplodedNode *BeforeProcessingCall = nullptr; 1223 const Stmt *CE = CalleeSF->getCallSite(); 1224 1225 // Find the first node before we started processing the call expression. 1226 while (N) { 1227 ProgramPoint L = N->getLocation(); 1228 BeforeProcessingCall = N; 1229 N = N->pred_empty() ? nullptr : *(N->pred_begin()); 1230 1231 // Skip the nodes corresponding to the inlined code. 1232 if (L.getLocationContext()->getCurrentStackFrame() != CallerSF) 1233 continue; 1234 // We reached the caller. Find the node right before we started 1235 // processing the call. 1236 if (L.isPurgeKind()) 1237 continue; 1238 if (L.getAs<PreImplicitCall>()) 1239 continue; 1240 if (L.getAs<CallEnter>()) 1241 continue; 1242 if (Optional<StmtPoint> SP = L.getAs<StmtPoint>()) 1243 if (SP->getStmt() == CE) 1244 continue; 1245 break; 1246 } 1247 1248 if (!BeforeProcessingCall) 1249 return false; 1250 1251 // TODO: Clean up the unneeded nodes. 1252 1253 // Build an Epsilon node from which we will restart the analyzes. 1254 // Note that CE is permitted to be NULL! 1255 ProgramPoint NewNodeLoc = 1256 EpsilonPoint(BeforeProcessingCall->getLocationContext(), CE); 1257 // Add the special flag to GDM to signal retrying with no inlining. 1258 // Note, changing the state ensures that we are not going to cache out. 1259 ProgramStateRef NewNodeState = BeforeProcessingCall->getState(); 1260 NewNodeState = 1261 NewNodeState->set<ReplayWithoutInlining>(const_cast<Stmt *>(CE)); 1262 1263 // Make the new node a successor of BeforeProcessingCall. 1264 bool IsNew = false; 1265 ExplodedNode *NewNode = G.getNode(NewNodeLoc, NewNodeState, false, &IsNew); 1266 // We cached out at this point. Caching out is common due to us backtracking 1267 // from the inlined function, which might spawn several paths. 1268 if (!IsNew) 1269 return true; 1270 1271 NewNode->addPredecessor(BeforeProcessingCall, G); 1272 1273 // Add the new node to the work list. 1274 Engine.enqueueStmtNode(NewNode, CalleeSF->getCallSiteBlock(), 1275 CalleeSF->getIndex()); 1276 NumTimesRetriedWithoutInlining++; 1277 return true; 1278 } 1279 1280 /// Block entrance. (Update counters). 1281 void ExprEngine::processCFGBlockEntrance(const BlockEdge &L, 1282 NodeBuilderWithSinks &nodeBuilder, 1283 ExplodedNode *Pred) { 1284 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1285 1286 // FIXME: Refactor this into a checker. 1287 if (nodeBuilder.getContext().blockCount() >= AMgr.options.maxBlockVisitOnPath) { 1288 static SimpleProgramPointTag tag(TagProviderName, "Block count exceeded"); 1289 const ExplodedNode *Sink = 1290 nodeBuilder.generateSink(Pred->getState(), Pred, &tag); 1291 1292 // Check if we stopped at the top level function or not. 1293 // Root node should have the location context of the top most function. 1294 const LocationContext *CalleeLC = Pred->getLocation().getLocationContext(); 1295 const LocationContext *CalleeSF = CalleeLC->getCurrentStackFrame(); 1296 const LocationContext *RootLC = 1297 (*G.roots_begin())->getLocation().getLocationContext(); 1298 if (RootLC->getCurrentStackFrame() != CalleeSF) { 1299 Engine.FunctionSummaries->markReachedMaxBlockCount(CalleeSF->getDecl()); 1300 1301 // Re-run the call evaluation without inlining it, by storing the 1302 // no-inlining policy in the state and enqueuing the new work item on 1303 // the list. Replay should almost never fail. Use the stats to catch it 1304 // if it does. 1305 if ((!AMgr.options.NoRetryExhausted && 1306 replayWithoutInlining(Pred, CalleeLC))) 1307 return; 1308 NumMaxBlockCountReachedInInlined++; 1309 } else 1310 NumMaxBlockCountReached++; 1311 1312 // Make sink nodes as exhausted(for stats) only if retry failed. 1313 Engine.blocksExhausted.push_back(std::make_pair(L, Sink)); 1314 } 1315 } 1316 1317 //===----------------------------------------------------------------------===// 1318 // Branch processing. 1319 //===----------------------------------------------------------------------===// 1320 1321 /// RecoverCastedSymbol - A helper function for ProcessBranch that is used 1322 /// to try to recover some path-sensitivity for casts of symbolic 1323 /// integers that promote their values (which are currently not tracked well). 1324 /// This function returns the SVal bound to Condition->IgnoreCasts if all the 1325 // cast(s) did was sign-extend the original value. 1326 static SVal RecoverCastedSymbol(ProgramStateManager& StateMgr, 1327 ProgramStateRef state, 1328 const Stmt *Condition, 1329 const LocationContext *LCtx, 1330 ASTContext &Ctx) { 1331 1332 const Expr *Ex = dyn_cast<Expr>(Condition); 1333 if (!Ex) 1334 return UnknownVal(); 1335 1336 uint64_t bits = 0; 1337 bool bitsInit = false; 1338 1339 while (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) { 1340 QualType T = CE->getType(); 1341 1342 if (!T->isIntegralOrEnumerationType()) 1343 return UnknownVal(); 1344 1345 uint64_t newBits = Ctx.getTypeSize(T); 1346 if (!bitsInit || newBits < bits) { 1347 bitsInit = true; 1348 bits = newBits; 1349 } 1350 1351 Ex = CE->getSubExpr(); 1352 } 1353 1354 // We reached a non-cast. Is it a symbolic value? 1355 QualType T = Ex->getType(); 1356 1357 if (!bitsInit || !T->isIntegralOrEnumerationType() || 1358 Ctx.getTypeSize(T) > bits) 1359 return UnknownVal(); 1360 1361 return state->getSVal(Ex, LCtx); 1362 } 1363 1364 #ifndef NDEBUG 1365 static const Stmt *getRightmostLeaf(const Stmt *Condition) { 1366 while (Condition) { 1367 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1368 if (!BO || !BO->isLogicalOp()) { 1369 return Condition; 1370 } 1371 Condition = BO->getRHS()->IgnoreParens(); 1372 } 1373 return nullptr; 1374 } 1375 #endif 1376 1377 // Returns the condition the branch at the end of 'B' depends on and whose value 1378 // has been evaluated within 'B'. 1379 // In most cases, the terminator condition of 'B' will be evaluated fully in 1380 // the last statement of 'B'; in those cases, the resolved condition is the 1381 // given 'Condition'. 1382 // If the condition of the branch is a logical binary operator tree, the CFG is 1383 // optimized: in that case, we know that the expression formed by all but the 1384 // rightmost leaf of the logical binary operator tree must be true, and thus 1385 // the branch condition is at this point equivalent to the truth value of that 1386 // rightmost leaf; the CFG block thus only evaluates this rightmost leaf 1387 // expression in its final statement. As the full condition in that case was 1388 // not evaluated, and is thus not in the SVal cache, we need to use that leaf 1389 // expression to evaluate the truth value of the condition in the current state 1390 // space. 1391 static const Stmt *ResolveCondition(const Stmt *Condition, 1392 const CFGBlock *B) { 1393 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1394 Condition = Ex->IgnoreParens(); 1395 1396 const BinaryOperator *BO = dyn_cast<BinaryOperator>(Condition); 1397 if (!BO || !BO->isLogicalOp()) 1398 return Condition; 1399 1400 // FIXME: This is a workaround until we handle temporary destructor branches 1401 // correctly; currently, temporary destructor branches lead to blocks that 1402 // only have a terminator (and no statements). These blocks violate the 1403 // invariant this function assumes. 1404 if (B->getTerminator().isTemporaryDtorsBranch()) return Condition; 1405 1406 // For logical operations, we still have the case where some branches 1407 // use the traditional "merge" approach and others sink the branch 1408 // directly into the basic blocks representing the logical operation. 1409 // We need to distinguish between those two cases here. 1410 1411 // The invariants are still shifting, but it is possible that the 1412 // last element in a CFGBlock is not a CFGStmt. Look for the last 1413 // CFGStmt as the value of the condition. 1414 CFGBlock::const_reverse_iterator I = B->rbegin(), E = B->rend(); 1415 for (; I != E; ++I) { 1416 CFGElement Elem = *I; 1417 Optional<CFGStmt> CS = Elem.getAs<CFGStmt>(); 1418 if (!CS) 1419 continue; 1420 const Stmt *LastStmt = CS->getStmt(); 1421 assert(LastStmt == Condition || LastStmt == getRightmostLeaf(Condition)); 1422 return LastStmt; 1423 } 1424 llvm_unreachable("could not resolve condition"); 1425 } 1426 1427 void ExprEngine::processBranch(const Stmt *Condition, const Stmt *Term, 1428 NodeBuilderContext& BldCtx, 1429 ExplodedNode *Pred, 1430 ExplodedNodeSet &Dst, 1431 const CFGBlock *DstT, 1432 const CFGBlock *DstF) { 1433 const LocationContext *LCtx = Pred->getLocationContext(); 1434 PrettyStackTraceLocationContext StackCrashInfo(LCtx); 1435 currBldrCtx = &BldCtx; 1436 1437 // Check for NULL conditions; e.g. "for(;;)" 1438 if (!Condition) { 1439 BranchNodeBuilder NullCondBldr(Pred, Dst, BldCtx, DstT, DstF); 1440 NullCondBldr.markInfeasible(false); 1441 NullCondBldr.generateNode(Pred->getState(), true, Pred); 1442 return; 1443 } 1444 1445 1446 if (const Expr *Ex = dyn_cast<Expr>(Condition)) 1447 Condition = Ex->IgnoreParens(); 1448 1449 Condition = ResolveCondition(Condition, BldCtx.getBlock()); 1450 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 1451 Condition->getLocStart(), 1452 "Error evaluating branch"); 1453 1454 ExplodedNodeSet CheckersOutSet; 1455 getCheckerManager().runCheckersForBranchCondition(Condition, CheckersOutSet, 1456 Pred, *this); 1457 // We generated only sinks. 1458 if (CheckersOutSet.empty()) 1459 return; 1460 1461 BranchNodeBuilder builder(CheckersOutSet, Dst, BldCtx, DstT, DstF); 1462 for (NodeBuilder::iterator I = CheckersOutSet.begin(), 1463 E = CheckersOutSet.end(); E != I; ++I) { 1464 ExplodedNode *PredI = *I; 1465 1466 if (PredI->isSink()) 1467 continue; 1468 1469 ProgramStateRef PrevState = PredI->getState(); 1470 SVal X = PrevState->getSVal(Condition, PredI->getLocationContext()); 1471 1472 if (X.isUnknownOrUndef()) { 1473 // Give it a chance to recover from unknown. 1474 if (const Expr *Ex = dyn_cast<Expr>(Condition)) { 1475 if (Ex->getType()->isIntegralOrEnumerationType()) { 1476 // Try to recover some path-sensitivity. Right now casts of symbolic 1477 // integers that promote their values are currently not tracked well. 1478 // If 'Condition' is such an expression, try and recover the 1479 // underlying value and use that instead. 1480 SVal recovered = RecoverCastedSymbol(getStateManager(), 1481 PrevState, Condition, 1482 PredI->getLocationContext(), 1483 getContext()); 1484 1485 if (!recovered.isUnknown()) { 1486 X = recovered; 1487 } 1488 } 1489 } 1490 } 1491 1492 // If the condition is still unknown, give up. 1493 if (X.isUnknownOrUndef()) { 1494 builder.generateNode(PrevState, true, PredI); 1495 builder.generateNode(PrevState, false, PredI); 1496 continue; 1497 } 1498 1499 DefinedSVal V = X.castAs<DefinedSVal>(); 1500 1501 ProgramStateRef StTrue, StFalse; 1502 std::tie(StTrue, StFalse) = PrevState->assume(V); 1503 1504 // Process the true branch. 1505 if (builder.isFeasible(true)) { 1506 if (StTrue) 1507 builder.generateNode(StTrue, true, PredI); 1508 else 1509 builder.markInfeasible(true); 1510 } 1511 1512 // Process the false branch. 1513 if (builder.isFeasible(false)) { 1514 if (StFalse) 1515 builder.generateNode(StFalse, false, PredI); 1516 else 1517 builder.markInfeasible(false); 1518 } 1519 } 1520 currBldrCtx = nullptr; 1521 } 1522 1523 /// The GDM component containing the set of global variables which have been 1524 /// previously initialized with explicit initializers. 1525 REGISTER_TRAIT_WITH_PROGRAMSTATE(InitializedGlobalsSet, 1526 llvm::ImmutableSet<const VarDecl *>) 1527 1528 void ExprEngine::processStaticInitializer(const DeclStmt *DS, 1529 NodeBuilderContext &BuilderCtx, 1530 ExplodedNode *Pred, 1531 clang::ento::ExplodedNodeSet &Dst, 1532 const CFGBlock *DstT, 1533 const CFGBlock *DstF) { 1534 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1535 currBldrCtx = &BuilderCtx; 1536 1537 const VarDecl *VD = cast<VarDecl>(DS->getSingleDecl()); 1538 ProgramStateRef state = Pred->getState(); 1539 bool initHasRun = state->contains<InitializedGlobalsSet>(VD); 1540 BranchNodeBuilder builder(Pred, Dst, BuilderCtx, DstT, DstF); 1541 1542 if (!initHasRun) { 1543 state = state->add<InitializedGlobalsSet>(VD); 1544 } 1545 1546 builder.generateNode(state, initHasRun, Pred); 1547 builder.markInfeasible(!initHasRun); 1548 1549 currBldrCtx = nullptr; 1550 } 1551 1552 /// processIndirectGoto - Called by CoreEngine. Used to generate successor 1553 /// nodes by processing the 'effects' of a computed goto jump. 1554 void ExprEngine::processIndirectGoto(IndirectGotoNodeBuilder &builder) { 1555 1556 ProgramStateRef state = builder.getState(); 1557 SVal V = state->getSVal(builder.getTarget(), builder.getLocationContext()); 1558 1559 // Three possibilities: 1560 // 1561 // (1) We know the computed label. 1562 // (2) The label is NULL (or some other constant), or Undefined. 1563 // (3) We have no clue about the label. Dispatch to all targets. 1564 // 1565 1566 typedef IndirectGotoNodeBuilder::iterator iterator; 1567 1568 if (Optional<loc::GotoLabel> LV = V.getAs<loc::GotoLabel>()) { 1569 const LabelDecl *L = LV->getLabel(); 1570 1571 for (iterator I = builder.begin(), E = builder.end(); I != E; ++I) { 1572 if (I.getLabel() == L) { 1573 builder.generateNode(I, state); 1574 return; 1575 } 1576 } 1577 1578 llvm_unreachable("No block with label."); 1579 } 1580 1581 if (V.getAs<loc::ConcreteInt>() || V.getAs<UndefinedVal>()) { 1582 // Dispatch to the first target and mark it as a sink. 1583 //ExplodedNode* N = builder.generateNode(builder.begin(), state, true); 1584 // FIXME: add checker visit. 1585 // UndefBranches.insert(N); 1586 return; 1587 } 1588 1589 // This is really a catch-all. We don't support symbolics yet. 1590 // FIXME: Implement dispatch for symbolic pointers. 1591 1592 for (iterator I=builder.begin(), E=builder.end(); I != E; ++I) 1593 builder.generateNode(I, state); 1594 } 1595 1596 /// ProcessEndPath - Called by CoreEngine. Used to generate end-of-path 1597 /// nodes when the control reaches the end of a function. 1598 void ExprEngine::processEndOfFunction(NodeBuilderContext& BC, 1599 ExplodedNode *Pred) { 1600 PrettyStackTraceLocationContext CrashInfo(Pred->getLocationContext()); 1601 StateMgr.EndPath(Pred->getState()); 1602 1603 ExplodedNodeSet Dst; 1604 if (Pred->getLocationContext()->inTopFrame()) { 1605 // Remove dead symbols. 1606 ExplodedNodeSet AfterRemovedDead; 1607 removeDeadOnEndOfFunction(BC, Pred, AfterRemovedDead); 1608 1609 // Notify checkers. 1610 for (ExplodedNodeSet::iterator I = AfterRemovedDead.begin(), 1611 E = AfterRemovedDead.end(); I != E; ++I) { 1612 getCheckerManager().runCheckersForEndFunction(BC, Dst, *I, *this); 1613 } 1614 } else { 1615 getCheckerManager().runCheckersForEndFunction(BC, Dst, Pred, *this); 1616 } 1617 1618 Engine.enqueueEndOfFunction(Dst); 1619 } 1620 1621 /// ProcessSwitch - Called by CoreEngine. Used to generate successor 1622 /// nodes by processing the 'effects' of a switch statement. 1623 void ExprEngine::processSwitch(SwitchNodeBuilder& builder) { 1624 typedef SwitchNodeBuilder::iterator iterator; 1625 ProgramStateRef state = builder.getState(); 1626 const Expr *CondE = builder.getCondition(); 1627 SVal CondV_untested = state->getSVal(CondE, builder.getLocationContext()); 1628 1629 if (CondV_untested.isUndef()) { 1630 //ExplodedNode* N = builder.generateDefaultCaseNode(state, true); 1631 // FIXME: add checker 1632 //UndefBranches.insert(N); 1633 1634 return; 1635 } 1636 DefinedOrUnknownSVal CondV = CondV_untested.castAs<DefinedOrUnknownSVal>(); 1637 1638 ProgramStateRef DefaultSt = state; 1639 1640 iterator I = builder.begin(), EI = builder.end(); 1641 bool defaultIsFeasible = I == EI; 1642 1643 for ( ; I != EI; ++I) { 1644 // Successor may be pruned out during CFG construction. 1645 if (!I.getBlock()) 1646 continue; 1647 1648 const CaseStmt *Case = I.getCase(); 1649 1650 // Evaluate the LHS of the case value. 1651 llvm::APSInt V1 = Case->getLHS()->EvaluateKnownConstInt(getContext()); 1652 assert(V1.getBitWidth() == getContext().getTypeSize(CondE->getType())); 1653 1654 // Get the RHS of the case, if it exists. 1655 llvm::APSInt V2; 1656 if (const Expr *E = Case->getRHS()) 1657 V2 = E->EvaluateKnownConstInt(getContext()); 1658 else 1659 V2 = V1; 1660 1661 // FIXME: Eventually we should replace the logic below with a range 1662 // comparison, rather than concretize the values within the range. 1663 // This should be easy once we have "ranges" for NonLVals. 1664 1665 do { 1666 nonloc::ConcreteInt CaseVal(getBasicVals().getValue(V1)); 1667 DefinedOrUnknownSVal Res = svalBuilder.evalEQ(DefaultSt ? DefaultSt : state, 1668 CondV, CaseVal); 1669 1670 // Now "assume" that the case matches. 1671 if (ProgramStateRef stateNew = state->assume(Res, true)) { 1672 builder.generateCaseStmtNode(I, stateNew); 1673 1674 // If CondV evaluates to a constant, then we know that this 1675 // is the *only* case that we can take, so stop evaluating the 1676 // others. 1677 if (CondV.getAs<nonloc::ConcreteInt>()) 1678 return; 1679 } 1680 1681 // Now "assume" that the case doesn't match. Add this state 1682 // to the default state (if it is feasible). 1683 if (DefaultSt) { 1684 if (ProgramStateRef stateNew = DefaultSt->assume(Res, false)) { 1685 defaultIsFeasible = true; 1686 DefaultSt = stateNew; 1687 } 1688 else { 1689 defaultIsFeasible = false; 1690 DefaultSt = nullptr; 1691 } 1692 } 1693 1694 // Concretize the next value in the range. 1695 if (V1 == V2) 1696 break; 1697 1698 ++V1; 1699 assert (V1 <= V2); 1700 1701 } while (true); 1702 } 1703 1704 if (!defaultIsFeasible) 1705 return; 1706 1707 // If we have switch(enum value), the default branch is not 1708 // feasible if all of the enum constants not covered by 'case:' statements 1709 // are not feasible values for the switch condition. 1710 // 1711 // Note that this isn't as accurate as it could be. Even if there isn't 1712 // a case for a particular enum value as long as that enum value isn't 1713 // feasible then it shouldn't be considered for making 'default:' reachable. 1714 const SwitchStmt *SS = builder.getSwitch(); 1715 const Expr *CondExpr = SS->getCond()->IgnoreParenImpCasts(); 1716 if (CondExpr->getType()->getAs<EnumType>()) { 1717 if (SS->isAllEnumCasesCovered()) 1718 return; 1719 } 1720 1721 builder.generateDefaultCaseNode(DefaultSt); 1722 } 1723 1724 //===----------------------------------------------------------------------===// 1725 // Transfer functions: Loads and stores. 1726 //===----------------------------------------------------------------------===// 1727 1728 void ExprEngine::VisitCommonDeclRefExpr(const Expr *Ex, const NamedDecl *D, 1729 ExplodedNode *Pred, 1730 ExplodedNodeSet &Dst) { 1731 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 1732 1733 ProgramStateRef state = Pred->getState(); 1734 const LocationContext *LCtx = Pred->getLocationContext(); 1735 1736 if (const VarDecl *VD = dyn_cast<VarDecl>(D)) { 1737 // C permits "extern void v", and if you cast the address to a valid type, 1738 // you can even do things with it. We simply pretend 1739 assert(Ex->isGLValue() || VD->getType()->isVoidType()); 1740 SVal V = state->getLValue(VD, Pred->getLocationContext()); 1741 1742 // For references, the 'lvalue' is the pointer address stored in the 1743 // reference region. 1744 if (VD->getType()->isReferenceType()) { 1745 if (const MemRegion *R = V.getAsRegion()) 1746 V = state->getSVal(R); 1747 else 1748 V = UnknownVal(); 1749 } 1750 1751 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1752 ProgramPoint::PostLValueKind); 1753 return; 1754 } 1755 if (const EnumConstantDecl *ED = dyn_cast<EnumConstantDecl>(D)) { 1756 assert(!Ex->isGLValue()); 1757 SVal V = svalBuilder.makeIntVal(ED->getInitVal()); 1758 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V)); 1759 return; 1760 } 1761 if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) { 1762 SVal V = svalBuilder.getFunctionPointer(FD); 1763 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1764 ProgramPoint::PostLValueKind); 1765 return; 1766 } 1767 if (isa<FieldDecl>(D)) { 1768 // FIXME: Compute lvalue of field pointers-to-member. 1769 // Right now we just use a non-null void pointer, so that it gives proper 1770 // results in boolean contexts. 1771 SVal V = svalBuilder.conjureSymbolVal(Ex, LCtx, getContext().VoidPtrTy, 1772 currBldrCtx->blockCount()); 1773 state = state->assume(V.castAs<DefinedOrUnknownSVal>(), true); 1774 Bldr.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V), nullptr, 1775 ProgramPoint::PostLValueKind); 1776 return; 1777 } 1778 1779 llvm_unreachable("Support for this Decl not implemented."); 1780 } 1781 1782 /// VisitArraySubscriptExpr - Transfer function for array accesses 1783 void ExprEngine::VisitLvalArraySubscriptExpr(const ArraySubscriptExpr *A, 1784 ExplodedNode *Pred, 1785 ExplodedNodeSet &Dst){ 1786 1787 const Expr *Base = A->getBase()->IgnoreParens(); 1788 const Expr *Idx = A->getIdx()->IgnoreParens(); 1789 1790 1791 ExplodedNodeSet checkerPreStmt; 1792 getCheckerManager().runCheckersForPreStmt(checkerPreStmt, Pred, A, *this); 1793 1794 StmtNodeBuilder Bldr(checkerPreStmt, Dst, *currBldrCtx); 1795 1796 for (ExplodedNodeSet::iterator it = checkerPreStmt.begin(), 1797 ei = checkerPreStmt.end(); it != ei; ++it) { 1798 const LocationContext *LCtx = (*it)->getLocationContext(); 1799 ProgramStateRef state = (*it)->getState(); 1800 SVal V = state->getLValue(A->getType(), 1801 state->getSVal(Idx, LCtx), 1802 state->getSVal(Base, LCtx)); 1803 assert(A->isGLValue()); 1804 Bldr.generateNode(A, *it, state->BindExpr(A, LCtx, V), nullptr, 1805 ProgramPoint::PostLValueKind); 1806 } 1807 } 1808 1809 /// VisitMemberExpr - Transfer function for member expressions. 1810 void ExprEngine::VisitMemberExpr(const MemberExpr *M, ExplodedNode *Pred, 1811 ExplodedNodeSet &Dst) { 1812 1813 // FIXME: Prechecks eventually go in ::Visit(). 1814 ExplodedNodeSet CheckedSet; 1815 getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, M, *this); 1816 1817 ExplodedNodeSet EvalSet; 1818 ValueDecl *Member = M->getMemberDecl(); 1819 1820 // Handle static member variables and enum constants accessed via 1821 // member syntax. 1822 if (isa<VarDecl>(Member) || isa<EnumConstantDecl>(Member)) { 1823 ExplodedNodeSet Dst; 1824 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 1825 I != E; ++I) { 1826 VisitCommonDeclRefExpr(M, Member, Pred, EvalSet); 1827 } 1828 } else { 1829 StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); 1830 ExplodedNodeSet Tmp; 1831 1832 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 1833 I != E; ++I) { 1834 ProgramStateRef state = (*I)->getState(); 1835 const LocationContext *LCtx = (*I)->getLocationContext(); 1836 Expr *BaseExpr = M->getBase(); 1837 1838 // Handle C++ method calls. 1839 if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(Member)) { 1840 if (MD->isInstance()) 1841 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 1842 1843 SVal MDVal = svalBuilder.getFunctionPointer(MD); 1844 state = state->BindExpr(M, LCtx, MDVal); 1845 1846 Bldr.generateNode(M, *I, state); 1847 continue; 1848 } 1849 1850 // Handle regular struct fields / member variables. 1851 state = createTemporaryRegionIfNeeded(state, LCtx, BaseExpr); 1852 SVal baseExprVal = state->getSVal(BaseExpr, LCtx); 1853 1854 FieldDecl *field = cast<FieldDecl>(Member); 1855 SVal L = state->getLValue(field, baseExprVal); 1856 1857 if (M->isGLValue() || M->getType()->isArrayType()) { 1858 // We special-case rvalues of array type because the analyzer cannot 1859 // reason about them, since we expect all regions to be wrapped in Locs. 1860 // We instead treat these as lvalues and assume that they will decay to 1861 // pointers as soon as they are used. 1862 if (!M->isGLValue()) { 1863 assert(M->getType()->isArrayType()); 1864 const ImplicitCastExpr *PE = 1865 dyn_cast<ImplicitCastExpr>((*I)->getParentMap().getParent(M)); 1866 if (!PE || PE->getCastKind() != CK_ArrayToPointerDecay) { 1867 llvm_unreachable("should always be wrapped in ArrayToPointerDecay"); 1868 } 1869 } 1870 1871 if (field->getType()->isReferenceType()) { 1872 if (const MemRegion *R = L.getAsRegion()) 1873 L = state->getSVal(R); 1874 else 1875 L = UnknownVal(); 1876 } 1877 1878 Bldr.generateNode(M, *I, state->BindExpr(M, LCtx, L), nullptr, 1879 ProgramPoint::PostLValueKind); 1880 } else { 1881 Bldr.takeNodes(*I); 1882 evalLoad(Tmp, M, M, *I, state, L); 1883 Bldr.addNodes(Tmp); 1884 } 1885 } 1886 } 1887 1888 getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, M, *this); 1889 } 1890 1891 namespace { 1892 class CollectReachableSymbolsCallback : public SymbolVisitor { 1893 InvalidatedSymbols Symbols; 1894 public: 1895 CollectReachableSymbolsCallback(ProgramStateRef State) {} 1896 const InvalidatedSymbols &getSymbols() const { return Symbols; } 1897 1898 bool VisitSymbol(SymbolRef Sym) override { 1899 Symbols.insert(Sym); 1900 return true; 1901 } 1902 }; 1903 } // end anonymous namespace 1904 1905 // A value escapes in three possible cases: 1906 // (1) We are binding to something that is not a memory region. 1907 // (2) We are binding to a MemrRegion that does not have stack storage. 1908 // (3) We are binding to a MemRegion with stack storage that the store 1909 // does not understand. 1910 ProgramStateRef ExprEngine::processPointerEscapedOnBind(ProgramStateRef State, 1911 SVal Loc, SVal Val) { 1912 // Are we storing to something that causes the value to "escape"? 1913 bool escapes = true; 1914 1915 // TODO: Move to StoreManager. 1916 if (Optional<loc::MemRegionVal> regionLoc = Loc.getAs<loc::MemRegionVal>()) { 1917 escapes = !regionLoc->getRegion()->hasStackStorage(); 1918 1919 if (!escapes) { 1920 // To test (3), generate a new state with the binding added. If it is 1921 // the same state, then it escapes (since the store cannot represent 1922 // the binding). 1923 // Do this only if we know that the store is not supposed to generate the 1924 // same state. 1925 SVal StoredVal = State->getSVal(regionLoc->getRegion()); 1926 if (StoredVal != Val) 1927 escapes = (State == (State->bindLoc(*regionLoc, Val))); 1928 } 1929 } 1930 1931 // If our store can represent the binding and we aren't storing to something 1932 // that doesn't have local storage then just return and have the simulation 1933 // state continue as is. 1934 if (!escapes) 1935 return State; 1936 1937 // Otherwise, find all symbols referenced by 'val' that we are tracking 1938 // and stop tracking them. 1939 CollectReachableSymbolsCallback Scanner = 1940 State->scanReachableSymbols<CollectReachableSymbolsCallback>(Val); 1941 const InvalidatedSymbols &EscapedSymbols = Scanner.getSymbols(); 1942 State = getCheckerManager().runCheckersForPointerEscape(State, 1943 EscapedSymbols, 1944 /*CallEvent*/ nullptr, 1945 PSK_EscapeOnBind, 1946 nullptr); 1947 1948 return State; 1949 } 1950 1951 ProgramStateRef 1952 ExprEngine::notifyCheckersOfPointerEscape(ProgramStateRef State, 1953 const InvalidatedSymbols *Invalidated, 1954 ArrayRef<const MemRegion *> ExplicitRegions, 1955 ArrayRef<const MemRegion *> Regions, 1956 const CallEvent *Call, 1957 RegionAndSymbolInvalidationTraits &ITraits) { 1958 1959 if (!Invalidated || Invalidated->empty()) 1960 return State; 1961 1962 if (!Call) 1963 return getCheckerManager().runCheckersForPointerEscape(State, 1964 *Invalidated, 1965 nullptr, 1966 PSK_EscapeOther, 1967 &ITraits); 1968 1969 // If the symbols were invalidated by a call, we want to find out which ones 1970 // were invalidated directly due to being arguments to the call. 1971 InvalidatedSymbols SymbolsDirectlyInvalidated; 1972 for (ArrayRef<const MemRegion *>::iterator I = ExplicitRegions.begin(), 1973 E = ExplicitRegions.end(); I != E; ++I) { 1974 if (const SymbolicRegion *R = (*I)->StripCasts()->getAs<SymbolicRegion>()) 1975 SymbolsDirectlyInvalidated.insert(R->getSymbol()); 1976 } 1977 1978 InvalidatedSymbols SymbolsIndirectlyInvalidated; 1979 for (InvalidatedSymbols::const_iterator I=Invalidated->begin(), 1980 E = Invalidated->end(); I!=E; ++I) { 1981 SymbolRef sym = *I; 1982 if (SymbolsDirectlyInvalidated.count(sym)) 1983 continue; 1984 SymbolsIndirectlyInvalidated.insert(sym); 1985 } 1986 1987 if (!SymbolsDirectlyInvalidated.empty()) 1988 State = getCheckerManager().runCheckersForPointerEscape(State, 1989 SymbolsDirectlyInvalidated, Call, PSK_DirectEscapeOnCall, &ITraits); 1990 1991 // Notify about the symbols that get indirectly invalidated by the call. 1992 if (!SymbolsIndirectlyInvalidated.empty()) 1993 State = getCheckerManager().runCheckersForPointerEscape(State, 1994 SymbolsIndirectlyInvalidated, Call, PSK_IndirectEscapeOnCall, &ITraits); 1995 1996 return State; 1997 } 1998 1999 /// evalBind - Handle the semantics of binding a value to a specific location. 2000 /// This method is used by evalStore and (soon) VisitDeclStmt, and others. 2001 void ExprEngine::evalBind(ExplodedNodeSet &Dst, const Stmt *StoreE, 2002 ExplodedNode *Pred, 2003 SVal location, SVal Val, 2004 bool atDeclInit, const ProgramPoint *PP) { 2005 2006 const LocationContext *LC = Pred->getLocationContext(); 2007 PostStmt PS(StoreE, LC); 2008 if (!PP) 2009 PP = &PS; 2010 2011 // Do a previsit of the bind. 2012 ExplodedNodeSet CheckedSet; 2013 getCheckerManager().runCheckersForBind(CheckedSet, Pred, location, Val, 2014 StoreE, *this, *PP); 2015 2016 2017 StmtNodeBuilder Bldr(CheckedSet, Dst, *currBldrCtx); 2018 2019 // If the location is not a 'Loc', it will already be handled by 2020 // the checkers. There is nothing left to do. 2021 if (!location.getAs<Loc>()) { 2022 const ProgramPoint L = PostStore(StoreE, LC, /*Loc*/nullptr, 2023 /*tag*/nullptr); 2024 ProgramStateRef state = Pred->getState(); 2025 state = processPointerEscapedOnBind(state, location, Val); 2026 Bldr.generateNode(L, state, Pred); 2027 return; 2028 } 2029 2030 2031 for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); 2032 I!=E; ++I) { 2033 ExplodedNode *PredI = *I; 2034 ProgramStateRef state = PredI->getState(); 2035 2036 state = processPointerEscapedOnBind(state, location, Val); 2037 2038 // When binding the value, pass on the hint that this is a initialization. 2039 // For initializations, we do not need to inform clients of region 2040 // changes. 2041 state = state->bindLoc(location.castAs<Loc>(), 2042 Val, /* notifyChanges = */ !atDeclInit); 2043 2044 const MemRegion *LocReg = nullptr; 2045 if (Optional<loc::MemRegionVal> LocRegVal = 2046 location.getAs<loc::MemRegionVal>()) { 2047 LocReg = LocRegVal->getRegion(); 2048 } 2049 2050 const ProgramPoint L = PostStore(StoreE, LC, LocReg, nullptr); 2051 Bldr.generateNode(L, state, PredI); 2052 } 2053 } 2054 2055 /// evalStore - Handle the semantics of a store via an assignment. 2056 /// @param Dst The node set to store generated state nodes 2057 /// @param AssignE The assignment expression if the store happens in an 2058 /// assignment. 2059 /// @param LocationE The location expression that is stored to. 2060 /// @param state The current simulation state 2061 /// @param location The location to store the value 2062 /// @param Val The value to be stored 2063 void ExprEngine::evalStore(ExplodedNodeSet &Dst, const Expr *AssignE, 2064 const Expr *LocationE, 2065 ExplodedNode *Pred, 2066 ProgramStateRef state, SVal location, SVal Val, 2067 const ProgramPointTag *tag) { 2068 // Proceed with the store. We use AssignE as the anchor for the PostStore 2069 // ProgramPoint if it is non-NULL, and LocationE otherwise. 2070 const Expr *StoreE = AssignE ? AssignE : LocationE; 2071 2072 // Evaluate the location (checks for bad dereferences). 2073 ExplodedNodeSet Tmp; 2074 evalLocation(Tmp, AssignE, LocationE, Pred, state, location, tag, false); 2075 2076 if (Tmp.empty()) 2077 return; 2078 2079 if (location.isUndef()) 2080 return; 2081 2082 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) 2083 evalBind(Dst, StoreE, *NI, location, Val, false); 2084 } 2085 2086 void ExprEngine::evalLoad(ExplodedNodeSet &Dst, 2087 const Expr *NodeEx, 2088 const Expr *BoundEx, 2089 ExplodedNode *Pred, 2090 ProgramStateRef state, 2091 SVal location, 2092 const ProgramPointTag *tag, 2093 QualType LoadTy) 2094 { 2095 assert(!location.getAs<NonLoc>() && "location cannot be a NonLoc."); 2096 2097 // Are we loading from a region? This actually results in two loads; one 2098 // to fetch the address of the referenced value and one to fetch the 2099 // referenced value. 2100 if (const TypedValueRegion *TR = 2101 dyn_cast_or_null<TypedValueRegion>(location.getAsRegion())) { 2102 2103 QualType ValTy = TR->getValueType(); 2104 if (const ReferenceType *RT = ValTy->getAs<ReferenceType>()) { 2105 static SimpleProgramPointTag 2106 loadReferenceTag(TagProviderName, "Load Reference"); 2107 ExplodedNodeSet Tmp; 2108 evalLoadCommon(Tmp, NodeEx, BoundEx, Pred, state, 2109 location, &loadReferenceTag, 2110 getContext().getPointerType(RT->getPointeeType())); 2111 2112 // Perform the load from the referenced value. 2113 for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end() ; I!=E; ++I) { 2114 state = (*I)->getState(); 2115 location = state->getSVal(BoundEx, (*I)->getLocationContext()); 2116 evalLoadCommon(Dst, NodeEx, BoundEx, *I, state, location, tag, LoadTy); 2117 } 2118 return; 2119 } 2120 } 2121 2122 evalLoadCommon(Dst, NodeEx, BoundEx, Pred, state, location, tag, LoadTy); 2123 } 2124 2125 void ExprEngine::evalLoadCommon(ExplodedNodeSet &Dst, 2126 const Expr *NodeEx, 2127 const Expr *BoundEx, 2128 ExplodedNode *Pred, 2129 ProgramStateRef state, 2130 SVal location, 2131 const ProgramPointTag *tag, 2132 QualType LoadTy) { 2133 assert(NodeEx); 2134 assert(BoundEx); 2135 // Evaluate the location (checks for bad dereferences). 2136 ExplodedNodeSet Tmp; 2137 evalLocation(Tmp, NodeEx, BoundEx, Pred, state, location, tag, true); 2138 if (Tmp.empty()) 2139 return; 2140 2141 StmtNodeBuilder Bldr(Tmp, Dst, *currBldrCtx); 2142 if (location.isUndef()) 2143 return; 2144 2145 // Proceed with the load. 2146 for (ExplodedNodeSet::iterator NI=Tmp.begin(), NE=Tmp.end(); NI!=NE; ++NI) { 2147 state = (*NI)->getState(); 2148 const LocationContext *LCtx = (*NI)->getLocationContext(); 2149 2150 SVal V = UnknownVal(); 2151 if (location.isValid()) { 2152 if (LoadTy.isNull()) 2153 LoadTy = BoundEx->getType(); 2154 V = state->getSVal(location.castAs<Loc>(), LoadTy); 2155 } 2156 2157 Bldr.generateNode(NodeEx, *NI, state->BindExpr(BoundEx, LCtx, V), tag, 2158 ProgramPoint::PostLoadKind); 2159 } 2160 } 2161 2162 void ExprEngine::evalLocation(ExplodedNodeSet &Dst, 2163 const Stmt *NodeEx, 2164 const Stmt *BoundEx, 2165 ExplodedNode *Pred, 2166 ProgramStateRef state, 2167 SVal location, 2168 const ProgramPointTag *tag, 2169 bool isLoad) { 2170 StmtNodeBuilder BldrTop(Pred, Dst, *currBldrCtx); 2171 // Early checks for performance reason. 2172 if (location.isUnknown()) { 2173 return; 2174 } 2175 2176 ExplodedNodeSet Src; 2177 BldrTop.takeNodes(Pred); 2178 StmtNodeBuilder Bldr(Pred, Src, *currBldrCtx); 2179 if (Pred->getState() != state) { 2180 // Associate this new state with an ExplodedNode. 2181 // FIXME: If I pass null tag, the graph is incorrect, e.g for 2182 // int *p; 2183 // p = 0; 2184 // *p = 0xDEADBEEF; 2185 // "p = 0" is not noted as "Null pointer value stored to 'p'" but 2186 // instead "int *p" is noted as 2187 // "Variable 'p' initialized to a null pointer value" 2188 2189 static SimpleProgramPointTag tag(TagProviderName, "Location"); 2190 Bldr.generateNode(NodeEx, Pred, state, &tag); 2191 } 2192 ExplodedNodeSet Tmp; 2193 getCheckerManager().runCheckersForLocation(Tmp, Src, location, isLoad, 2194 NodeEx, BoundEx, *this); 2195 BldrTop.addNodes(Tmp); 2196 } 2197 2198 std::pair<const ProgramPointTag *, const ProgramPointTag*> 2199 ExprEngine::geteagerlyAssumeBinOpBifurcationTags() { 2200 static SimpleProgramPointTag 2201 eagerlyAssumeBinOpBifurcationTrue(TagProviderName, 2202 "Eagerly Assume True"), 2203 eagerlyAssumeBinOpBifurcationFalse(TagProviderName, 2204 "Eagerly Assume False"); 2205 return std::make_pair(&eagerlyAssumeBinOpBifurcationTrue, 2206 &eagerlyAssumeBinOpBifurcationFalse); 2207 } 2208 2209 void ExprEngine::evalEagerlyAssumeBinOpBifurcation(ExplodedNodeSet &Dst, 2210 ExplodedNodeSet &Src, 2211 const Expr *Ex) { 2212 StmtNodeBuilder Bldr(Src, Dst, *currBldrCtx); 2213 2214 for (ExplodedNodeSet::iterator I=Src.begin(), E=Src.end(); I!=E; ++I) { 2215 ExplodedNode *Pred = *I; 2216 // Test if the previous node was as the same expression. This can happen 2217 // when the expression fails to evaluate to anything meaningful and 2218 // (as an optimization) we don't generate a node. 2219 ProgramPoint P = Pred->getLocation(); 2220 if (!P.getAs<PostStmt>() || P.castAs<PostStmt>().getStmt() != Ex) { 2221 continue; 2222 } 2223 2224 ProgramStateRef state = Pred->getState(); 2225 SVal V = state->getSVal(Ex, Pred->getLocationContext()); 2226 Optional<nonloc::SymbolVal> SEV = V.getAs<nonloc::SymbolVal>(); 2227 if (SEV && SEV->isExpression()) { 2228 const std::pair<const ProgramPointTag *, const ProgramPointTag*> &tags = 2229 geteagerlyAssumeBinOpBifurcationTags(); 2230 2231 ProgramStateRef StateTrue, StateFalse; 2232 std::tie(StateTrue, StateFalse) = state->assume(*SEV); 2233 2234 // First assume that the condition is true. 2235 if (StateTrue) { 2236 SVal Val = svalBuilder.makeIntVal(1U, Ex->getType()); 2237 StateTrue = StateTrue->BindExpr(Ex, Pred->getLocationContext(), Val); 2238 Bldr.generateNode(Ex, Pred, StateTrue, tags.first); 2239 } 2240 2241 // Next, assume that the condition is false. 2242 if (StateFalse) { 2243 SVal Val = svalBuilder.makeIntVal(0U, Ex->getType()); 2244 StateFalse = StateFalse->BindExpr(Ex, Pred->getLocationContext(), Val); 2245 Bldr.generateNode(Ex, Pred, StateFalse, tags.second); 2246 } 2247 } 2248 } 2249 } 2250 2251 void ExprEngine::VisitGCCAsmStmt(const GCCAsmStmt *A, ExplodedNode *Pred, 2252 ExplodedNodeSet &Dst) { 2253 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2254 // We have processed both the inputs and the outputs. All of the outputs 2255 // should evaluate to Locs. Nuke all of their values. 2256 2257 // FIXME: Some day in the future it would be nice to allow a "plug-in" 2258 // which interprets the inline asm and stores proper results in the 2259 // outputs. 2260 2261 ProgramStateRef state = Pred->getState(); 2262 2263 for (const Expr *O : A->outputs()) { 2264 SVal X = state->getSVal(O, Pred->getLocationContext()); 2265 assert (!X.getAs<NonLoc>()); // Should be an Lval, or unknown, undef. 2266 2267 if (Optional<Loc> LV = X.getAs<Loc>()) 2268 state = state->bindLoc(*LV, UnknownVal()); 2269 } 2270 2271 Bldr.generateNode(A, Pred, state); 2272 } 2273 2274 void ExprEngine::VisitMSAsmStmt(const MSAsmStmt *A, ExplodedNode *Pred, 2275 ExplodedNodeSet &Dst) { 2276 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 2277 Bldr.generateNode(A, Pred, Pred->getState()); 2278 } 2279 2280 //===----------------------------------------------------------------------===// 2281 // Visualization. 2282 //===----------------------------------------------------------------------===// 2283 2284 #ifndef NDEBUG 2285 static ExprEngine* GraphPrintCheckerState; 2286 static SourceManager* GraphPrintSourceManager; 2287 2288 namespace llvm { 2289 template<> 2290 struct DOTGraphTraits<ExplodedNode*> : 2291 public DefaultDOTGraphTraits { 2292 2293 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {} 2294 2295 // FIXME: Since we do not cache error nodes in ExprEngine now, this does not 2296 // work. 2297 static std::string getNodeAttributes(const ExplodedNode *N, void*) { 2298 2299 #if 0 2300 // FIXME: Replace with a general scheme to tell if the node is 2301 // an error node. 2302 if (GraphPrintCheckerState->isImplicitNullDeref(N) || 2303 GraphPrintCheckerState->isExplicitNullDeref(N) || 2304 GraphPrintCheckerState->isUndefDeref(N) || 2305 GraphPrintCheckerState->isUndefStore(N) || 2306 GraphPrintCheckerState->isUndefControlFlow(N) || 2307 GraphPrintCheckerState->isUndefResult(N) || 2308 GraphPrintCheckerState->isBadCall(N) || 2309 GraphPrintCheckerState->isUndefArg(N)) 2310 return "color=\"red\",style=\"filled\""; 2311 2312 if (GraphPrintCheckerState->isNoReturnCall(N)) 2313 return "color=\"blue\",style=\"filled\""; 2314 #endif 2315 return ""; 2316 } 2317 2318 static void printLocation(raw_ostream &Out, SourceLocation SLoc) { 2319 if (SLoc.isFileID()) { 2320 Out << "\\lline=" 2321 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2322 << " col=" 2323 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc) 2324 << "\\l"; 2325 } 2326 } 2327 2328 static std::string getNodeLabel(const ExplodedNode *N, void*){ 2329 2330 std::string sbuf; 2331 llvm::raw_string_ostream Out(sbuf); 2332 2333 // Program Location. 2334 ProgramPoint Loc = N->getLocation(); 2335 2336 switch (Loc.getKind()) { 2337 case ProgramPoint::BlockEntranceKind: { 2338 Out << "Block Entrance: B" 2339 << Loc.castAs<BlockEntrance>().getBlock()->getBlockID(); 2340 if (const NamedDecl *ND = 2341 dyn_cast<NamedDecl>(Loc.getLocationContext()->getDecl())) { 2342 Out << " ("; 2343 ND->printName(Out); 2344 Out << ")"; 2345 } 2346 break; 2347 } 2348 2349 case ProgramPoint::BlockExitKind: 2350 assert (false); 2351 break; 2352 2353 case ProgramPoint::CallEnterKind: 2354 Out << "CallEnter"; 2355 break; 2356 2357 case ProgramPoint::CallExitBeginKind: 2358 Out << "CallExitBegin"; 2359 break; 2360 2361 case ProgramPoint::CallExitEndKind: 2362 Out << "CallExitEnd"; 2363 break; 2364 2365 case ProgramPoint::PostStmtPurgeDeadSymbolsKind: 2366 Out << "PostStmtPurgeDeadSymbols"; 2367 break; 2368 2369 case ProgramPoint::PreStmtPurgeDeadSymbolsKind: 2370 Out << "PreStmtPurgeDeadSymbols"; 2371 break; 2372 2373 case ProgramPoint::EpsilonKind: 2374 Out << "Epsilon Point"; 2375 break; 2376 2377 case ProgramPoint::PreImplicitCallKind: { 2378 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2379 Out << "PreCall: "; 2380 2381 // FIXME: Get proper printing options. 2382 PC.getDecl()->print(Out, LangOptions()); 2383 printLocation(Out, PC.getLocation()); 2384 break; 2385 } 2386 2387 case ProgramPoint::PostImplicitCallKind: { 2388 ImplicitCallPoint PC = Loc.castAs<ImplicitCallPoint>(); 2389 Out << "PostCall: "; 2390 2391 // FIXME: Get proper printing options. 2392 PC.getDecl()->print(Out, LangOptions()); 2393 printLocation(Out, PC.getLocation()); 2394 break; 2395 } 2396 2397 case ProgramPoint::PostInitializerKind: { 2398 Out << "PostInitializer: "; 2399 const CXXCtorInitializer *Init = 2400 Loc.castAs<PostInitializer>().getInitializer(); 2401 if (const FieldDecl *FD = Init->getAnyMember()) 2402 Out << *FD; 2403 else { 2404 QualType Ty = Init->getTypeSourceInfo()->getType(); 2405 Ty = Ty.getLocalUnqualifiedType(); 2406 LangOptions LO; // FIXME. 2407 Ty.print(Out, LO); 2408 } 2409 break; 2410 } 2411 2412 case ProgramPoint::BlockEdgeKind: { 2413 const BlockEdge &E = Loc.castAs<BlockEdge>(); 2414 Out << "Edge: (B" << E.getSrc()->getBlockID() << ", B" 2415 << E.getDst()->getBlockID() << ')'; 2416 2417 if (const Stmt *T = E.getSrc()->getTerminator()) { 2418 SourceLocation SLoc = T->getLocStart(); 2419 2420 Out << "\\|Terminator: "; 2421 LangOptions LO; // FIXME. 2422 E.getSrc()->printTerminator(Out, LO); 2423 2424 if (SLoc.isFileID()) { 2425 Out << "\\lline=" 2426 << GraphPrintSourceManager->getExpansionLineNumber(SLoc) 2427 << " col=" 2428 << GraphPrintSourceManager->getExpansionColumnNumber(SLoc); 2429 } 2430 2431 if (isa<SwitchStmt>(T)) { 2432 const Stmt *Label = E.getDst()->getLabel(); 2433 2434 if (Label) { 2435 if (const CaseStmt *C = dyn_cast<CaseStmt>(Label)) { 2436 Out << "\\lcase "; 2437 LangOptions LO; // FIXME. 2438 if (C->getLHS()) 2439 C->getLHS()->printPretty(Out, nullptr, PrintingPolicy(LO)); 2440 2441 if (const Stmt *RHS = C->getRHS()) { 2442 Out << " .. "; 2443 RHS->printPretty(Out, nullptr, PrintingPolicy(LO)); 2444 } 2445 2446 Out << ":"; 2447 } 2448 else { 2449 assert (isa<DefaultStmt>(Label)); 2450 Out << "\\ldefault:"; 2451 } 2452 } 2453 else 2454 Out << "\\l(implicit) default:"; 2455 } 2456 else if (isa<IndirectGotoStmt>(T)) { 2457 // FIXME 2458 } 2459 else { 2460 Out << "\\lCondition: "; 2461 if (*E.getSrc()->succ_begin() == E.getDst()) 2462 Out << "true"; 2463 else 2464 Out << "false"; 2465 } 2466 2467 Out << "\\l"; 2468 } 2469 2470 #if 0 2471 // FIXME: Replace with a general scheme to determine 2472 // the name of the check. 2473 if (GraphPrintCheckerState->isUndefControlFlow(N)) { 2474 Out << "\\|Control-flow based on\\lUndefined value.\\l"; 2475 } 2476 #endif 2477 break; 2478 } 2479 2480 default: { 2481 const Stmt *S = Loc.castAs<StmtPoint>().getStmt(); 2482 assert(S != nullptr && "Expecting non-null Stmt"); 2483 2484 Out << S->getStmtClassName() << ' ' << (const void*) S << ' '; 2485 LangOptions LO; // FIXME. 2486 S->printPretty(Out, nullptr, PrintingPolicy(LO)); 2487 printLocation(Out, S->getLocStart()); 2488 2489 if (Loc.getAs<PreStmt>()) 2490 Out << "\\lPreStmt\\l;"; 2491 else if (Loc.getAs<PostLoad>()) 2492 Out << "\\lPostLoad\\l;"; 2493 else if (Loc.getAs<PostStore>()) 2494 Out << "\\lPostStore\\l"; 2495 else if (Loc.getAs<PostLValue>()) 2496 Out << "\\lPostLValue\\l"; 2497 2498 #if 0 2499 // FIXME: Replace with a general scheme to determine 2500 // the name of the check. 2501 if (GraphPrintCheckerState->isImplicitNullDeref(N)) 2502 Out << "\\|Implicit-Null Dereference.\\l"; 2503 else if (GraphPrintCheckerState->isExplicitNullDeref(N)) 2504 Out << "\\|Explicit-Null Dereference.\\l"; 2505 else if (GraphPrintCheckerState->isUndefDeref(N)) 2506 Out << "\\|Dereference of undefialied value.\\l"; 2507 else if (GraphPrintCheckerState->isUndefStore(N)) 2508 Out << "\\|Store to Undefined Loc."; 2509 else if (GraphPrintCheckerState->isUndefResult(N)) 2510 Out << "\\|Result of operation is undefined."; 2511 else if (GraphPrintCheckerState->isNoReturnCall(N)) 2512 Out << "\\|Call to function marked \"noreturn\"."; 2513 else if (GraphPrintCheckerState->isBadCall(N)) 2514 Out << "\\|Call to NULL/Undefined."; 2515 else if (GraphPrintCheckerState->isUndefArg(N)) 2516 Out << "\\|Argument in call is undefined"; 2517 #endif 2518 2519 break; 2520 } 2521 } 2522 2523 ProgramStateRef state = N->getState(); 2524 Out << "\\|StateID: " << (const void*) state.get() 2525 << " NodeID: " << (const void*) N << "\\|"; 2526 state->printDOT(Out); 2527 2528 Out << "\\l"; 2529 2530 if (const ProgramPointTag *tag = Loc.getTag()) { 2531 Out << "\\|Tag: " << tag->getTagDescription(); 2532 Out << "\\l"; 2533 } 2534 return Out.str(); 2535 } 2536 }; 2537 } // end llvm namespace 2538 #endif 2539 2540 #ifndef NDEBUG 2541 template <typename ITERATOR> 2542 ExplodedNode *GetGraphNode(ITERATOR I) { return *I; } 2543 2544 template <> ExplodedNode* 2545 GetGraphNode<llvm::DenseMap<ExplodedNode*, Expr*>::iterator> 2546 (llvm::DenseMap<ExplodedNode*, Expr*>::iterator I) { 2547 return I->first; 2548 } 2549 #endif 2550 2551 void ExprEngine::ViewGraph(bool trim) { 2552 #ifndef NDEBUG 2553 if (trim) { 2554 std::vector<const ExplodedNode*> Src; 2555 2556 // Flush any outstanding reports to make sure we cover all the nodes. 2557 // This does not cause them to get displayed. 2558 for (BugReporter::iterator I=BR.begin(), E=BR.end(); I!=E; ++I) 2559 const_cast<BugType*>(*I)->FlushReports(BR); 2560 2561 // Iterate through the reports and get their nodes. 2562 for (BugReporter::EQClasses_iterator 2563 EI = BR.EQClasses_begin(), EE = BR.EQClasses_end(); EI != EE; ++EI) { 2564 ExplodedNode *N = const_cast<ExplodedNode*>(EI->begin()->getErrorNode()); 2565 if (N) Src.push_back(N); 2566 } 2567 2568 ViewGraph(Src); 2569 } 2570 else { 2571 GraphPrintCheckerState = this; 2572 GraphPrintSourceManager = &getContext().getSourceManager(); 2573 2574 llvm::ViewGraph(*G.roots_begin(), "ExprEngine"); 2575 2576 GraphPrintCheckerState = nullptr; 2577 GraphPrintSourceManager = nullptr; 2578 } 2579 #endif 2580 } 2581 2582 void ExprEngine::ViewGraph(ArrayRef<const ExplodedNode*> Nodes) { 2583 #ifndef NDEBUG 2584 GraphPrintCheckerState = this; 2585 GraphPrintSourceManager = &getContext().getSourceManager(); 2586 2587 std::unique_ptr<ExplodedGraph> TrimmedG(G.trim(Nodes)); 2588 2589 if (!TrimmedG.get()) 2590 llvm::errs() << "warning: Trimmed ExplodedGraph is empty.\n"; 2591 else 2592 llvm::ViewGraph(*TrimmedG->roots_begin(), "TrimmedExprEngine"); 2593 2594 GraphPrintCheckerState = nullptr; 2595 GraphPrintSourceManager = nullptr; 2596 #endif 2597 } 2598