1 //===- ExprEngineCXX.cpp - ExprEngine support for C++ -----------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file defines the C++ expression evaluation engine. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 14 #include "clang/Analysis/ConstructionContext.h" 15 #include "clang/AST/DeclCXX.h" 16 #include "clang/AST/StmtCXX.h" 17 #include "clang/AST/ParentMap.h" 18 #include "clang/Basic/PrettyStackTrace.h" 19 #include "clang/StaticAnalyzer/Core/CheckerManager.h" 20 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 21 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 22 23 using namespace clang; 24 using namespace ento; 25 26 void ExprEngine::CreateCXXTemporaryObject(const MaterializeTemporaryExpr *ME, 27 ExplodedNode *Pred, 28 ExplodedNodeSet &Dst) { 29 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 30 const Expr *tempExpr = ME->getSubExpr()->IgnoreParens(); 31 ProgramStateRef state = Pred->getState(); 32 const LocationContext *LCtx = Pred->getLocationContext(); 33 34 state = createTemporaryRegionIfNeeded(state, LCtx, tempExpr, ME); 35 Bldr.generateNode(ME, Pred, state); 36 } 37 38 // FIXME: This is the sort of code that should eventually live in a Core 39 // checker rather than as a special case in ExprEngine. 40 void ExprEngine::performTrivialCopy(NodeBuilder &Bldr, ExplodedNode *Pred, 41 const CallEvent &Call) { 42 SVal ThisVal; 43 bool AlwaysReturnsLValue; 44 const CXXRecordDecl *ThisRD = nullptr; 45 if (const CXXConstructorCall *Ctor = dyn_cast<CXXConstructorCall>(&Call)) { 46 assert(Ctor->getDecl()->isTrivial()); 47 assert(Ctor->getDecl()->isCopyOrMoveConstructor()); 48 ThisVal = Ctor->getCXXThisVal(); 49 ThisRD = Ctor->getDecl()->getParent(); 50 AlwaysReturnsLValue = false; 51 } else { 52 assert(cast<CXXMethodDecl>(Call.getDecl())->isTrivial()); 53 assert(cast<CXXMethodDecl>(Call.getDecl())->getOverloadedOperator() == 54 OO_Equal); 55 ThisVal = cast<CXXInstanceCall>(Call).getCXXThisVal(); 56 ThisRD = cast<CXXMethodDecl>(Call.getDecl())->getParent(); 57 AlwaysReturnsLValue = true; 58 } 59 60 assert(ThisRD); 61 if (ThisRD->isEmpty()) { 62 // Do nothing for empty classes. Otherwise it'd retrieve an UnknownVal 63 // and bind it and RegionStore would think that the actual value 64 // in this region at this offset is unknown. 65 return; 66 } 67 68 const LocationContext *LCtx = Pred->getLocationContext(); 69 70 ExplodedNodeSet Dst; 71 Bldr.takeNodes(Pred); 72 73 SVal V = Call.getArgSVal(0); 74 75 // If the value being copied is not unknown, load from its location to get 76 // an aggregate rvalue. 77 if (Optional<Loc> L = V.getAs<Loc>()) 78 V = Pred->getState()->getSVal(*L); 79 else 80 assert(V.isUnknownOrUndef()); 81 82 const Expr *CallExpr = Call.getOriginExpr(); 83 evalBind(Dst, CallExpr, Pred, ThisVal, V, true); 84 85 PostStmt PS(CallExpr, LCtx); 86 for (ExplodedNodeSet::iterator I = Dst.begin(), E = Dst.end(); 87 I != E; ++I) { 88 ProgramStateRef State = (*I)->getState(); 89 if (AlwaysReturnsLValue) 90 State = State->BindExpr(CallExpr, LCtx, ThisVal); 91 else 92 State = bindReturnValue(Call, LCtx, State); 93 Bldr.generateNode(PS, State, *I); 94 } 95 } 96 97 98 SVal ExprEngine::makeZeroElementRegion(ProgramStateRef State, SVal LValue, 99 QualType &Ty, bool &IsArray) { 100 SValBuilder &SVB = State->getStateManager().getSValBuilder(); 101 ASTContext &Ctx = SVB.getContext(); 102 103 while (const ArrayType *AT = Ctx.getAsArrayType(Ty)) { 104 Ty = AT->getElementType(); 105 LValue = State->getLValue(Ty, SVB.makeZeroArrayIndex(), LValue); 106 IsArray = true; 107 } 108 109 return LValue; 110 } 111 112 std::pair<ProgramStateRef, SVal> ExprEngine::handleConstructionContext( 113 const Expr *E, ProgramStateRef State, const LocationContext *LCtx, 114 const ConstructionContext *CC, EvalCallOptions &CallOpts) { 115 SValBuilder &SVB = getSValBuilder(); 116 MemRegionManager &MRMgr = SVB.getRegionManager(); 117 ASTContext &ACtx = SVB.getContext(); 118 119 // See if we're constructing an existing region by looking at the 120 // current construction context. 121 if (CC) { 122 switch (CC->getKind()) { 123 case ConstructionContext::CXX17ElidedCopyVariableKind: 124 case ConstructionContext::SimpleVariableKind: { 125 const auto *DSCC = cast<VariableConstructionContext>(CC); 126 const auto *DS = DSCC->getDeclStmt(); 127 const auto *Var = cast<VarDecl>(DS->getSingleDecl()); 128 SVal LValue = State->getLValue(Var, LCtx); 129 QualType Ty = Var->getType(); 130 LValue = 131 makeZeroElementRegion(State, LValue, Ty, CallOpts.IsArrayCtorOrDtor); 132 State = 133 addObjectUnderConstruction(State, DSCC->getDeclStmt(), LCtx, LValue); 134 return std::make_pair(State, LValue); 135 } 136 case ConstructionContext::CXX17ElidedCopyConstructorInitializerKind: 137 case ConstructionContext::SimpleConstructorInitializerKind: { 138 const auto *ICC = cast<ConstructorInitializerConstructionContext>(CC); 139 const auto *Init = ICC->getCXXCtorInitializer(); 140 assert(Init->isAnyMemberInitializer()); 141 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); 142 Loc ThisPtr = 143 SVB.getCXXThis(CurCtor, LCtx->getStackFrame()); 144 SVal ThisVal = State->getSVal(ThisPtr); 145 146 const ValueDecl *Field; 147 SVal FieldVal; 148 if (Init->isIndirectMemberInitializer()) { 149 Field = Init->getIndirectMember(); 150 FieldVal = State->getLValue(Init->getIndirectMember(), ThisVal); 151 } else { 152 Field = Init->getMember(); 153 FieldVal = State->getLValue(Init->getMember(), ThisVal); 154 } 155 156 QualType Ty = Field->getType(); 157 FieldVal = makeZeroElementRegion(State, FieldVal, Ty, 158 CallOpts.IsArrayCtorOrDtor); 159 State = addObjectUnderConstruction(State, Init, LCtx, FieldVal); 160 return std::make_pair(State, FieldVal); 161 } 162 case ConstructionContext::NewAllocatedObjectKind: { 163 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 164 const auto *NECC = cast<NewAllocatedObjectConstructionContext>(CC); 165 const auto *NE = NECC->getCXXNewExpr(); 166 SVal V = *getObjectUnderConstruction(State, NE, LCtx); 167 if (const SubRegion *MR = 168 dyn_cast_or_null<SubRegion>(V.getAsRegion())) { 169 if (NE->isArray()) { 170 // TODO: In fact, we need to call the constructor for every 171 // allocated element, not just the first one! 172 CallOpts.IsArrayCtorOrDtor = true; 173 return std::make_pair( 174 State, loc::MemRegionVal(getStoreManager().GetElementZeroRegion( 175 MR, NE->getType()->getPointeeType()))); 176 } 177 return std::make_pair(State, V); 178 } 179 // TODO: Detect when the allocator returns a null pointer. 180 // Constructor shall not be called in this case. 181 } 182 break; 183 } 184 case ConstructionContext::SimpleReturnedValueKind: 185 case ConstructionContext::CXX17ElidedCopyReturnedValueKind: { 186 // The temporary is to be managed by the parent stack frame. 187 // So build it in the parent stack frame if we're not in the 188 // top frame of the analysis. 189 const StackFrameContext *SFC = LCtx->getStackFrame(); 190 if (const LocationContext *CallerLCtx = SFC->getParent()) { 191 auto RTC = (*SFC->getCallSiteBlock())[SFC->getIndex()] 192 .getAs<CFGCXXRecordTypedCall>(); 193 if (!RTC) { 194 // We were unable to find the correct construction context for the 195 // call in the parent stack frame. This is equivalent to not being 196 // able to find construction context at all. 197 break; 198 } 199 if (isa<BlockInvocationContext>(CallerLCtx)) { 200 // Unwrap block invocation contexts. They're mostly part of 201 // the current stack frame. 202 CallerLCtx = CallerLCtx->getParent(); 203 assert(!isa<BlockInvocationContext>(CallerLCtx)); 204 } 205 return handleConstructionContext( 206 cast<Expr>(SFC->getCallSite()), State, CallerLCtx, 207 RTC->getConstructionContext(), CallOpts); 208 } else { 209 // We are on the top frame of the analysis. We do not know where is the 210 // object returned to. Conjure a symbolic region for the return value. 211 // TODO: We probably need a new MemRegion kind to represent the storage 212 // of that SymbolicRegion, so that we cound produce a fancy symbol 213 // instead of an anonymous conjured symbol. 214 // TODO: Do we need to track the region to avoid having it dead 215 // too early? It does die too early, at least in C++17, but because 216 // putting anything into a SymbolicRegion causes an immediate escape, 217 // it doesn't cause any leak false positives. 218 const auto *RCC = cast<ReturnedValueConstructionContext>(CC); 219 // Make sure that this doesn't coincide with any other symbol 220 // conjured for the returned expression. 221 static const int TopLevelSymRegionTag = 0; 222 const Expr *RetE = RCC->getReturnStmt()->getRetValue(); 223 assert(RetE && "Void returns should not have a construction context"); 224 QualType ReturnTy = RetE->getType(); 225 QualType RegionTy = ACtx.getPointerType(ReturnTy); 226 SVal V = SVB.conjureSymbolVal(&TopLevelSymRegionTag, RetE, SFC, 227 RegionTy, currBldrCtx->blockCount()); 228 return std::make_pair(State, V); 229 } 230 llvm_unreachable("Unhandled return value construction context!"); 231 } 232 case ConstructionContext::ElidedTemporaryObjectKind: { 233 assert(AMgr.getAnalyzerOptions().ShouldElideConstructors); 234 const auto *TCC = cast<ElidedTemporaryObjectConstructionContext>(CC); 235 const CXXBindTemporaryExpr *BTE = TCC->getCXXBindTemporaryExpr(); 236 const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr(); 237 const CXXConstructExpr *CE = TCC->getConstructorAfterElision(); 238 239 // Support pre-C++17 copy elision. We'll have the elidable copy 240 // constructor in the AST and in the CFG, but we'll skip it 241 // and construct directly into the final object. This call 242 // also sets the CallOpts flags for us. 243 SVal V; 244 // If the elided copy/move constructor is not supported, there's still 245 // benefit in trying to model the non-elided constructor. 246 // Stash our state before trying to elide, as it'll get overwritten. 247 ProgramStateRef PreElideState = State; 248 EvalCallOptions PreElideCallOpts = CallOpts; 249 250 std::tie(State, V) = handleConstructionContext( 251 CE, State, LCtx, TCC->getConstructionContextAfterElision(), CallOpts); 252 253 // FIXME: This definition of "copy elision has not failed" is unreliable. 254 // It doesn't indicate that the constructor will actually be inlined 255 // later; it is still up to evalCall() to decide. 256 if (!CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion) { 257 // Remember that we've elided the constructor. 258 State = addObjectUnderConstruction(State, CE, LCtx, V); 259 260 // Remember that we've elided the destructor. 261 if (BTE) 262 State = elideDestructor(State, BTE, LCtx); 263 264 // Instead of materialization, shamelessly return 265 // the final object destination. 266 if (MTE) 267 State = addObjectUnderConstruction(State, MTE, LCtx, V); 268 269 return std::make_pair(State, V); 270 } else { 271 // Copy elision failed. Revert the changes and proceed as if we have 272 // a simple temporary. 273 State = PreElideState; 274 CallOpts = PreElideCallOpts; 275 } 276 LLVM_FALLTHROUGH; 277 } 278 case ConstructionContext::SimpleTemporaryObjectKind: { 279 const auto *TCC = cast<TemporaryObjectConstructionContext>(CC); 280 const CXXBindTemporaryExpr *BTE = TCC->getCXXBindTemporaryExpr(); 281 const MaterializeTemporaryExpr *MTE = TCC->getMaterializedTemporaryExpr(); 282 SVal V = UnknownVal(); 283 284 if (MTE) { 285 if (const ValueDecl *VD = MTE->getExtendingDecl()) { 286 assert(MTE->getStorageDuration() != SD_FullExpression); 287 if (!VD->getType()->isReferenceType()) { 288 // We're lifetime-extended by a surrounding aggregate. 289 // Automatic destructors aren't quite working in this case 290 // on the CFG side. We should warn the caller about that. 291 // FIXME: Is there a better way to retrieve this information from 292 // the MaterializeTemporaryExpr? 293 CallOpts.IsTemporaryLifetimeExtendedViaAggregate = true; 294 } 295 } 296 297 if (MTE->getStorageDuration() == SD_Static || 298 MTE->getStorageDuration() == SD_Thread) 299 V = loc::MemRegionVal(MRMgr.getCXXStaticTempObjectRegion(E)); 300 } 301 302 if (V.isUnknown()) 303 V = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 304 305 if (BTE) 306 State = addObjectUnderConstruction(State, BTE, LCtx, V); 307 308 if (MTE) 309 State = addObjectUnderConstruction(State, MTE, LCtx, V); 310 311 CallOpts.IsTemporaryCtorOrDtor = true; 312 return std::make_pair(State, V); 313 } 314 case ConstructionContext::ArgumentKind: { 315 // Arguments are technically temporaries. 316 CallOpts.IsTemporaryCtorOrDtor = true; 317 318 const auto *ACC = cast<ArgumentConstructionContext>(CC); 319 const Expr *E = ACC->getCallLikeExpr(); 320 unsigned Idx = ACC->getIndex(); 321 const CXXBindTemporaryExpr *BTE = ACC->getCXXBindTemporaryExpr(); 322 323 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 324 SVal V = UnknownVal(); 325 auto getArgLoc = [&](CallEventRef<> Caller) -> Optional<SVal> { 326 const LocationContext *FutureSFC = 327 Caller->getCalleeStackFrame(currBldrCtx->blockCount()); 328 // Return early if we are unable to reliably foresee 329 // the future stack frame. 330 if (!FutureSFC) 331 return None; 332 333 // This should be equivalent to Caller->getDecl() for now, but 334 // FutureSFC->getDecl() is likely to support better stuff (like 335 // virtual functions) earlier. 336 const Decl *CalleeD = FutureSFC->getDecl(); 337 338 // FIXME: Support for variadic arguments is not implemented here yet. 339 if (CallEvent::isVariadic(CalleeD)) 340 return None; 341 342 // Operator arguments do not correspond to operator parameters 343 // because this-argument is implemented as a normal argument in 344 // operator call expressions but not in operator declarations. 345 const VarRegion *VR = Caller->getParameterLocation( 346 *Caller->getAdjustedParameterIndex(Idx), currBldrCtx->blockCount()); 347 if (!VR) 348 return None; 349 350 return loc::MemRegionVal(VR); 351 }; 352 353 if (const auto *CE = dyn_cast<CallExpr>(E)) { 354 CallEventRef<> Caller = CEMgr.getSimpleCall(CE, State, LCtx); 355 if (auto OptV = getArgLoc(Caller)) 356 V = *OptV; 357 else 358 break; 359 State = addObjectUnderConstruction(State, {CE, Idx}, LCtx, V); 360 } else if (const auto *CCE = dyn_cast<CXXConstructExpr>(E)) { 361 // Don't bother figuring out the target region for the future 362 // constructor because we won't need it. 363 CallEventRef<> Caller = 364 CEMgr.getCXXConstructorCall(CCE, /*Target=*/nullptr, State, LCtx); 365 if (auto OptV = getArgLoc(Caller)) 366 V = *OptV; 367 else 368 break; 369 State = addObjectUnderConstruction(State, {CCE, Idx}, LCtx, V); 370 } else if (const auto *ME = dyn_cast<ObjCMessageExpr>(E)) { 371 CallEventRef<> Caller = CEMgr.getObjCMethodCall(ME, State, LCtx); 372 if (auto OptV = getArgLoc(Caller)) 373 V = *OptV; 374 else 375 break; 376 State = addObjectUnderConstruction(State, {ME, Idx}, LCtx, V); 377 } 378 379 assert(!V.isUnknown()); 380 381 if (BTE) 382 State = addObjectUnderConstruction(State, BTE, LCtx, V); 383 384 return std::make_pair(State, V); 385 } 386 } 387 } 388 // If we couldn't find an existing region to construct into, assume we're 389 // constructing a temporary. Notify the caller of our failure. 390 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 391 return std::make_pair( 392 State, loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx))); 393 } 394 395 void ExprEngine::handleConstructor(const Expr *E, 396 ExplodedNode *Pred, 397 ExplodedNodeSet &destNodes) { 398 const auto *CE = dyn_cast<CXXConstructExpr>(E); 399 const auto *CIE = dyn_cast<CXXInheritedCtorInitExpr>(E); 400 assert(CE || CIE); 401 402 const LocationContext *LCtx = Pred->getLocationContext(); 403 ProgramStateRef State = Pred->getState(); 404 405 SVal Target = UnknownVal(); 406 407 if (CE) { 408 if (Optional<SVal> ElidedTarget = 409 getObjectUnderConstruction(State, CE, LCtx)) { 410 // We've previously modeled an elidable constructor by pretending that it 411 // in fact constructs into the correct target. This constructor can 412 // therefore be skipped. 413 Target = *ElidedTarget; 414 StmtNodeBuilder Bldr(Pred, destNodes, *currBldrCtx); 415 State = finishObjectConstruction(State, CE, LCtx); 416 if (auto L = Target.getAs<Loc>()) 417 State = State->BindExpr(CE, LCtx, State->getSVal(*L, CE->getType())); 418 Bldr.generateNode(CE, Pred, State); 419 return; 420 } 421 } 422 423 // FIXME: Handle arrays, which run the same constructor for every element. 424 // For now, we just run the first constructor (which should still invalidate 425 // the entire array). 426 427 EvalCallOptions CallOpts; 428 auto C = getCurrentCFGElement().getAs<CFGConstructor>(); 429 assert(C || getCurrentCFGElement().getAs<CFGStmt>()); 430 const ConstructionContext *CC = C ? C->getConstructionContext() : nullptr; 431 432 const CXXConstructExpr::ConstructionKind CK = 433 CE ? CE->getConstructionKind() : CIE->getConstructionKind(); 434 switch (CK) { 435 case CXXConstructExpr::CK_Complete: { 436 // Inherited constructors are always base class constructors. 437 assert(CE && !CIE && "A complete constructor is inherited?!"); 438 439 // The target region is found from construction context. 440 std::tie(State, Target) = 441 handleConstructionContext(CE, State, LCtx, CC, CallOpts); 442 break; 443 } 444 case CXXConstructExpr::CK_VirtualBase: { 445 // Make sure we are not calling virtual base class initializers twice. 446 // Only the most-derived object should initialize virtual base classes. 447 const auto *OuterCtor = dyn_cast_or_null<CXXConstructExpr>( 448 LCtx->getStackFrame()->getCallSite()); 449 assert( 450 (!OuterCtor || 451 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Complete || 452 OuterCtor->getConstructionKind() == CXXConstructExpr::CK_Delegating) && 453 ("This virtual base should have already been initialized by " 454 "the most derived class!")); 455 (void)OuterCtor; 456 LLVM_FALLTHROUGH; 457 } 458 case CXXConstructExpr::CK_NonVirtualBase: 459 // In C++17, classes with non-virtual bases may be aggregates, so they would 460 // be initialized as aggregates without a constructor call, so we may have 461 // a base class constructed directly into an initializer list without 462 // having the derived-class constructor call on the previous stack frame. 463 // Initializer lists may be nested into more initializer lists that 464 // correspond to surrounding aggregate initializations. 465 // FIXME: For now this code essentially bails out. We need to find the 466 // correct target region and set it. 467 // FIXME: Instead of relying on the ParentMap, we should have the 468 // trigger-statement (InitListExpr in this case) passed down from CFG or 469 // otherwise always available during construction. 470 if (dyn_cast_or_null<InitListExpr>(LCtx->getParentMap().getParent(E))) { 471 MemRegionManager &MRMgr = getSValBuilder().getRegionManager(); 472 Target = loc::MemRegionVal(MRMgr.getCXXTempObjectRegion(E, LCtx)); 473 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 474 break; 475 } 476 LLVM_FALLTHROUGH; 477 case CXXConstructExpr::CK_Delegating: { 478 const CXXMethodDecl *CurCtor = cast<CXXMethodDecl>(LCtx->getDecl()); 479 Loc ThisPtr = getSValBuilder().getCXXThis(CurCtor, 480 LCtx->getStackFrame()); 481 SVal ThisVal = State->getSVal(ThisPtr); 482 483 if (CK == CXXConstructExpr::CK_Delegating) { 484 Target = ThisVal; 485 } else { 486 // Cast to the base type. 487 bool IsVirtual = (CK == CXXConstructExpr::CK_VirtualBase); 488 SVal BaseVal = 489 getStoreManager().evalDerivedToBase(ThisVal, E->getType(), IsVirtual); 490 Target = BaseVal; 491 } 492 break; 493 } 494 } 495 496 if (State != Pred->getState()) { 497 static SimpleProgramPointTag T("ExprEngine", 498 "Prepare for object construction"); 499 ExplodedNodeSet DstPrepare; 500 StmtNodeBuilder BldrPrepare(Pred, DstPrepare, *currBldrCtx); 501 BldrPrepare.generateNode(E, Pred, State, &T, ProgramPoint::PreStmtKind); 502 assert(DstPrepare.size() <= 1); 503 if (DstPrepare.size() == 0) 504 return; 505 Pred = *BldrPrepare.begin(); 506 } 507 508 const MemRegion *TargetRegion = Target.getAsRegion(); 509 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 510 CallEventRef<> Call = 511 CIE ? (CallEventRef<>)CEMgr.getCXXInheritedConstructorCall( 512 CIE, TargetRegion, State, LCtx) 513 : (CallEventRef<>)CEMgr.getCXXConstructorCall( 514 CE, TargetRegion, State, LCtx); 515 516 ExplodedNodeSet DstPreVisit; 517 getCheckerManager().runCheckersForPreStmt(DstPreVisit, Pred, E, *this); 518 519 ExplodedNodeSet PreInitialized; 520 if (CE) { 521 // FIXME: Is it possible and/or useful to do this before PreStmt? 522 StmtNodeBuilder Bldr(DstPreVisit, PreInitialized, *currBldrCtx); 523 for (ExplodedNodeSet::iterator I = DstPreVisit.begin(), 524 E = DstPreVisit.end(); 525 I != E; ++I) { 526 ProgramStateRef State = (*I)->getState(); 527 if (CE->requiresZeroInitialization()) { 528 // FIXME: Once we properly handle constructors in new-expressions, we'll 529 // need to invalidate the region before setting a default value, to make 530 // sure there aren't any lingering bindings around. This probably needs 531 // to happen regardless of whether or not the object is zero-initialized 532 // to handle random fields of a placement-initialized object picking up 533 // old bindings. We might only want to do it when we need to, though. 534 // FIXME: This isn't actually correct for arrays -- we need to zero- 535 // initialize the entire array, not just the first element -- but our 536 // handling of arrays everywhere else is weak as well, so this shouldn't 537 // actually make things worse. Placement new makes this tricky as well, 538 // since it's then possible to be initializing one part of a multi- 539 // dimensional array. 540 State = State->bindDefaultZero(Target, LCtx); 541 } 542 543 Bldr.generateNode(CE, *I, State, /*tag=*/nullptr, 544 ProgramPoint::PreStmtKind); 545 } 546 } else { 547 PreInitialized = DstPreVisit; 548 } 549 550 ExplodedNodeSet DstPreCall; 551 getCheckerManager().runCheckersForPreCall(DstPreCall, PreInitialized, 552 *Call, *this); 553 554 ExplodedNodeSet DstEvaluated; 555 StmtNodeBuilder Bldr(DstPreCall, DstEvaluated, *currBldrCtx); 556 557 if (CE && CE->getConstructor()->isTrivial() && 558 CE->getConstructor()->isCopyOrMoveConstructor() && 559 !CallOpts.IsArrayCtorOrDtor) { 560 // FIXME: Handle other kinds of trivial constructors as well. 561 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 562 I != E; ++I) 563 performTrivialCopy(Bldr, *I, *Call); 564 565 } else { 566 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 567 I != E; ++I) 568 defaultEvalCall(Bldr, *I, *Call, CallOpts); 569 } 570 571 // If the CFG was constructed without elements for temporary destructors 572 // and the just-called constructor created a temporary object then 573 // stop exploration if the temporary object has a noreturn constructor. 574 // This can lose coverage because the destructor, if it were present 575 // in the CFG, would be called at the end of the full expression or 576 // later (for life-time extended temporaries) -- but avoids infeasible 577 // paths when no-return temporary destructors are used for assertions. 578 const AnalysisDeclContext *ADC = LCtx->getAnalysisDeclContext(); 579 if (!ADC->getCFGBuildOptions().AddTemporaryDtors) { 580 if (llvm::isa_and_nonnull<CXXTempObjectRegion>(TargetRegion) && 581 cast<CXXConstructorDecl>(Call->getDecl()) 582 ->getParent() 583 ->isAnyDestructorNoReturn()) { 584 585 // If we've inlined the constructor, then DstEvaluated would be empty. 586 // In this case we still want a sink, which could be implemented 587 // in processCallExit. But we don't have that implemented at the moment, 588 // so if you hit this assertion, see if you can avoid inlining 589 // the respective constructor when analyzer-config cfg-temporary-dtors 590 // is set to false. 591 // Otherwise there's nothing wrong with inlining such constructor. 592 assert(!DstEvaluated.empty() && 593 "We should not have inlined this constructor!"); 594 595 for (ExplodedNode *N : DstEvaluated) { 596 Bldr.generateSink(E, N, N->getState()); 597 } 598 599 // There is no need to run the PostCall and PostStmt checker 600 // callbacks because we just generated sinks on all nodes in th 601 // frontier. 602 return; 603 } 604 } 605 606 ExplodedNodeSet DstPostArgumentCleanup; 607 for (ExplodedNode *I : DstEvaluated) 608 finishArgumentConstruction(DstPostArgumentCleanup, I, *Call); 609 610 // If there were other constructors called for object-type arguments 611 // of this constructor, clean them up. 612 ExplodedNodeSet DstPostCall; 613 getCheckerManager().runCheckersForPostCall(DstPostCall, 614 DstPostArgumentCleanup, 615 *Call, *this); 616 getCheckerManager().runCheckersForPostStmt(destNodes, DstPostCall, E, *this); 617 } 618 619 void ExprEngine::VisitCXXConstructExpr(const CXXConstructExpr *CE, 620 ExplodedNode *Pred, 621 ExplodedNodeSet &Dst) { 622 handleConstructor(CE, Pred, Dst); 623 } 624 625 void ExprEngine::VisitCXXInheritedCtorInitExpr( 626 const CXXInheritedCtorInitExpr *CE, ExplodedNode *Pred, 627 ExplodedNodeSet &Dst) { 628 handleConstructor(CE, Pred, Dst); 629 } 630 631 void ExprEngine::VisitCXXDestructor(QualType ObjectType, 632 const MemRegion *Dest, 633 const Stmt *S, 634 bool IsBaseDtor, 635 ExplodedNode *Pred, 636 ExplodedNodeSet &Dst, 637 EvalCallOptions &CallOpts) { 638 assert(S && "A destructor without a trigger!"); 639 const LocationContext *LCtx = Pred->getLocationContext(); 640 ProgramStateRef State = Pred->getState(); 641 642 const CXXRecordDecl *RecordDecl = ObjectType->getAsCXXRecordDecl(); 643 assert(RecordDecl && "Only CXXRecordDecls should have destructors"); 644 const CXXDestructorDecl *DtorDecl = RecordDecl->getDestructor(); 645 // FIXME: There should always be a Decl, otherwise the destructor call 646 // shouldn't have been added to the CFG in the first place. 647 if (!DtorDecl) { 648 // Skip the invalid destructor. We cannot simply return because 649 // it would interrupt the analysis instead. 650 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); 651 // FIXME: PostImplicitCall with a null decl may crash elsewhere anyway. 652 PostImplicitCall PP(/*Decl=*/nullptr, S->getEndLoc(), LCtx, &T); 653 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 654 Bldr.generateNode(PP, Pred->getState(), Pred); 655 return; 656 } 657 658 if (!Dest) { 659 // We're trying to destroy something that is not a region. This may happen 660 // for a variety of reasons (unknown target region, concrete integer instead 661 // of target region, etc.). The current code makes an attempt to recover. 662 // FIXME: We probably don't really need to recover when we're dealing 663 // with concrete integers specifically. 664 CallOpts.IsCtorOrDtorWithImproperlyModeledTargetRegion = true; 665 if (const Expr *E = dyn_cast_or_null<Expr>(S)) { 666 Dest = MRMgr.getCXXTempObjectRegion(E, Pred->getLocationContext()); 667 } else { 668 static SimpleProgramPointTag T("ExprEngine", "SkipInvalidDestructor"); 669 NodeBuilder Bldr(Pred, Dst, *currBldrCtx); 670 Bldr.generateSink(Pred->getLocation().withTag(&T), 671 Pred->getState(), Pred); 672 return; 673 } 674 } 675 676 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 677 CallEventRef<CXXDestructorCall> Call = 678 CEMgr.getCXXDestructorCall(DtorDecl, S, Dest, IsBaseDtor, State, LCtx); 679 680 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 681 Call->getSourceRange().getBegin(), 682 "Error evaluating destructor"); 683 684 ExplodedNodeSet DstPreCall; 685 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, 686 *Call, *this); 687 688 ExplodedNodeSet DstInvalidated; 689 StmtNodeBuilder Bldr(DstPreCall, DstInvalidated, *currBldrCtx); 690 for (ExplodedNodeSet::iterator I = DstPreCall.begin(), E = DstPreCall.end(); 691 I != E; ++I) 692 defaultEvalCall(Bldr, *I, *Call, CallOpts); 693 694 getCheckerManager().runCheckersForPostCall(Dst, DstInvalidated, 695 *Call, *this); 696 } 697 698 void ExprEngine::VisitCXXNewAllocatorCall(const CXXNewExpr *CNE, 699 ExplodedNode *Pred, 700 ExplodedNodeSet &Dst) { 701 ProgramStateRef State = Pred->getState(); 702 const LocationContext *LCtx = Pred->getLocationContext(); 703 PrettyStackTraceLoc CrashInfo(getContext().getSourceManager(), 704 CNE->getBeginLoc(), 705 "Error evaluating New Allocator Call"); 706 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 707 CallEventRef<CXXAllocatorCall> Call = 708 CEMgr.getCXXAllocatorCall(CNE, State, LCtx); 709 710 ExplodedNodeSet DstPreCall; 711 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, 712 *Call, *this); 713 714 ExplodedNodeSet DstPostCall; 715 StmtNodeBuilder CallBldr(DstPreCall, DstPostCall, *currBldrCtx); 716 for (ExplodedNode *I : DstPreCall) { 717 // FIXME: Provide evalCall for checkers? 718 defaultEvalCall(CallBldr, I, *Call); 719 } 720 // If the call is inlined, DstPostCall will be empty and we bail out now. 721 722 // Store return value of operator new() for future use, until the actual 723 // CXXNewExpr gets processed. 724 ExplodedNodeSet DstPostValue; 725 StmtNodeBuilder ValueBldr(DstPostCall, DstPostValue, *currBldrCtx); 726 for (ExplodedNode *I : DstPostCall) { 727 // FIXME: Because CNE serves as the "call site" for the allocator (due to 728 // lack of a better expression in the AST), the conjured return value symbol 729 // is going to be of the same type (C++ object pointer type). Technically 730 // this is not correct because the operator new's prototype always says that 731 // it returns a 'void *'. So we should change the type of the symbol, 732 // and then evaluate the cast over the symbolic pointer from 'void *' to 733 // the object pointer type. But without changing the symbol's type it 734 // is breaking too much to evaluate the no-op symbolic cast over it, so we 735 // skip it for now. 736 ProgramStateRef State = I->getState(); 737 SVal RetVal = State->getSVal(CNE, LCtx); 738 739 // If this allocation function is not declared as non-throwing, failures 740 // /must/ be signalled by exceptions, and thus the return value will never 741 // be NULL. -fno-exceptions does not influence this semantics. 742 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case 743 // where new can return NULL. If we end up supporting that option, we can 744 // consider adding a check for it here. 745 // C++11 [basic.stc.dynamic.allocation]p3. 746 if (const FunctionDecl *FD = CNE->getOperatorNew()) { 747 QualType Ty = FD->getType(); 748 if (const auto *ProtoType = Ty->getAs<FunctionProtoType>()) 749 if (!ProtoType->isNothrow()) 750 State = State->assume(RetVal.castAs<DefinedOrUnknownSVal>(), true); 751 } 752 753 ValueBldr.generateNode( 754 CNE, I, addObjectUnderConstruction(State, CNE, LCtx, RetVal)); 755 } 756 757 ExplodedNodeSet DstPostPostCallCallback; 758 getCheckerManager().runCheckersForPostCall(DstPostPostCallCallback, 759 DstPostValue, *Call, *this); 760 for (ExplodedNode *I : DstPostPostCallCallback) { 761 getCheckerManager().runCheckersForNewAllocator(*Call, Dst, I, *this); 762 } 763 } 764 765 void ExprEngine::VisitCXXNewExpr(const CXXNewExpr *CNE, ExplodedNode *Pred, 766 ExplodedNodeSet &Dst) { 767 // FIXME: Much of this should eventually migrate to CXXAllocatorCall. 768 // Also, we need to decide how allocators actually work -- they're not 769 // really part of the CXXNewExpr because they happen BEFORE the 770 // CXXConstructExpr subexpression. See PR12014 for some discussion. 771 772 unsigned blockCount = currBldrCtx->blockCount(); 773 const LocationContext *LCtx = Pred->getLocationContext(); 774 SVal symVal = UnknownVal(); 775 FunctionDecl *FD = CNE->getOperatorNew(); 776 777 bool IsStandardGlobalOpNewFunction = 778 FD->isReplaceableGlobalAllocationFunction(); 779 780 ProgramStateRef State = Pred->getState(); 781 782 // Retrieve the stored operator new() return value. 783 if (AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 784 symVal = *getObjectUnderConstruction(State, CNE, LCtx); 785 State = finishObjectConstruction(State, CNE, LCtx); 786 } 787 788 // We assume all standard global 'operator new' functions allocate memory in 789 // heap. We realize this is an approximation that might not correctly model 790 // a custom global allocator. 791 if (symVal.isUnknown()) { 792 if (IsStandardGlobalOpNewFunction) 793 symVal = svalBuilder.getConjuredHeapSymbolVal(CNE, LCtx, blockCount); 794 else 795 symVal = svalBuilder.conjureSymbolVal(nullptr, CNE, LCtx, CNE->getType(), 796 blockCount); 797 } 798 799 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 800 CallEventRef<CXXAllocatorCall> Call = 801 CEMgr.getCXXAllocatorCall(CNE, State, LCtx); 802 803 if (!AMgr.getAnalyzerOptions().MayInlineCXXAllocator) { 804 // Invalidate placement args. 805 // FIXME: Once we figure out how we want allocators to work, 806 // we should be using the usual pre-/(default-)eval-/post-call checkers 807 // here. 808 State = Call->invalidateRegions(blockCount); 809 if (!State) 810 return; 811 812 // If this allocation function is not declared as non-throwing, failures 813 // /must/ be signalled by exceptions, and thus the return value will never 814 // be NULL. -fno-exceptions does not influence this semantics. 815 // FIXME: GCC has a -fcheck-new option, which forces it to consider the case 816 // where new can return NULL. If we end up supporting that option, we can 817 // consider adding a check for it here. 818 // C++11 [basic.stc.dynamic.allocation]p3. 819 if (FD) { 820 QualType Ty = FD->getType(); 821 if (const auto *ProtoType = Ty->getAs<FunctionProtoType>()) 822 if (!ProtoType->isNothrow()) 823 if (auto dSymVal = symVal.getAs<DefinedOrUnknownSVal>()) 824 State = State->assume(*dSymVal, true); 825 } 826 } 827 828 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 829 830 SVal Result = symVal; 831 832 if (CNE->isArray()) { 833 // FIXME: allocating an array requires simulating the constructors. 834 // For now, just return a symbolicated region. 835 if (const auto *NewReg = cast_or_null<SubRegion>(symVal.getAsRegion())) { 836 QualType ObjTy = CNE->getType()->getPointeeType(); 837 const ElementRegion *EleReg = 838 getStoreManager().GetElementZeroRegion(NewReg, ObjTy); 839 Result = loc::MemRegionVal(EleReg); 840 } 841 State = State->BindExpr(CNE, Pred->getLocationContext(), Result); 842 Bldr.generateNode(CNE, Pred, State); 843 return; 844 } 845 846 // FIXME: Once we have proper support for CXXConstructExprs inside 847 // CXXNewExpr, we need to make sure that the constructed object is not 848 // immediately invalidated here. (The placement call should happen before 849 // the constructor call anyway.) 850 if (FD && FD->isReservedGlobalPlacementOperator()) { 851 // Non-array placement new should always return the placement location. 852 SVal PlacementLoc = State->getSVal(CNE->getPlacementArg(0), LCtx); 853 Result = svalBuilder.evalCast(PlacementLoc, CNE->getType(), 854 CNE->getPlacementArg(0)->getType()); 855 } 856 857 // Bind the address of the object, then check to see if we cached out. 858 State = State->BindExpr(CNE, LCtx, Result); 859 ExplodedNode *NewN = Bldr.generateNode(CNE, Pred, State); 860 if (!NewN) 861 return; 862 863 // If the type is not a record, we won't have a CXXConstructExpr as an 864 // initializer. Copy the value over. 865 if (const Expr *Init = CNE->getInitializer()) { 866 if (!isa<CXXConstructExpr>(Init)) { 867 assert(Bldr.getResults().size() == 1); 868 Bldr.takeNodes(NewN); 869 evalBind(Dst, CNE, NewN, Result, State->getSVal(Init, LCtx), 870 /*FirstInit=*/IsStandardGlobalOpNewFunction); 871 } 872 } 873 } 874 875 void ExprEngine::VisitCXXDeleteExpr(const CXXDeleteExpr *CDE, 876 ExplodedNode *Pred, ExplodedNodeSet &Dst) { 877 878 CallEventManager &CEMgr = getStateManager().getCallEventManager(); 879 CallEventRef<CXXDeallocatorCall> Call = CEMgr.getCXXDeallocatorCall( 880 CDE, Pred->getState(), Pred->getLocationContext()); 881 882 ExplodedNodeSet DstPreCall; 883 getCheckerManager().runCheckersForPreCall(DstPreCall, Pred, *Call, *this); 884 885 getCheckerManager().runCheckersForPostCall(Dst, DstPreCall, *Call, *this); 886 } 887 888 void ExprEngine::VisitCXXCatchStmt(const CXXCatchStmt *CS, ExplodedNode *Pred, 889 ExplodedNodeSet &Dst) { 890 const VarDecl *VD = CS->getExceptionDecl(); 891 if (!VD) { 892 Dst.Add(Pred); 893 return; 894 } 895 896 const LocationContext *LCtx = Pred->getLocationContext(); 897 SVal V = svalBuilder.conjureSymbolVal(CS, LCtx, VD->getType(), 898 currBldrCtx->blockCount()); 899 ProgramStateRef state = Pred->getState(); 900 state = state->bindLoc(state->getLValue(VD, LCtx), V, LCtx); 901 902 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 903 Bldr.generateNode(CS, Pred, state); 904 } 905 906 void ExprEngine::VisitCXXThisExpr(const CXXThisExpr *TE, ExplodedNode *Pred, 907 ExplodedNodeSet &Dst) { 908 StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); 909 910 // Get the this object region from StoreManager. 911 const LocationContext *LCtx = Pred->getLocationContext(); 912 const MemRegion *R = 913 svalBuilder.getRegionManager().getCXXThisRegion( 914 getContext().getCanonicalType(TE->getType()), 915 LCtx); 916 917 ProgramStateRef state = Pred->getState(); 918 SVal V = state->getSVal(loc::MemRegionVal(R)); 919 Bldr.generateNode(TE, Pred, state->BindExpr(TE, LCtx, V)); 920 } 921 922 void ExprEngine::VisitLambdaExpr(const LambdaExpr *LE, ExplodedNode *Pred, 923 ExplodedNodeSet &Dst) { 924 const LocationContext *LocCtxt = Pred->getLocationContext(); 925 926 // Get the region of the lambda itself. 927 const MemRegion *R = svalBuilder.getRegionManager().getCXXTempObjectRegion( 928 LE, LocCtxt); 929 SVal V = loc::MemRegionVal(R); 930 931 ProgramStateRef State = Pred->getState(); 932 933 // If we created a new MemRegion for the lambda, we should explicitly bind 934 // the captures. 935 CXXRecordDecl::field_iterator CurField = LE->getLambdaClass()->field_begin(); 936 for (LambdaExpr::const_capture_init_iterator i = LE->capture_init_begin(), 937 e = LE->capture_init_end(); 938 i != e; ++i, ++CurField) { 939 FieldDecl *FieldForCapture = *CurField; 940 SVal FieldLoc = State->getLValue(FieldForCapture, V); 941 942 SVal InitVal; 943 if (!FieldForCapture->hasCapturedVLAType()) { 944 Expr *InitExpr = *i; 945 assert(InitExpr && "Capture missing initialization expression"); 946 InitVal = State->getSVal(InitExpr, LocCtxt); 947 } else { 948 // The field stores the length of a captured variable-length array. 949 // These captures don't have initialization expressions; instead we 950 // get the length from the VLAType size expression. 951 Expr *SizeExpr = FieldForCapture->getCapturedVLAType()->getSizeExpr(); 952 InitVal = State->getSVal(SizeExpr, LocCtxt); 953 } 954 955 State = State->bindLoc(FieldLoc, InitVal, LocCtxt); 956 } 957 958 // Decay the Loc into an RValue, because there might be a 959 // MaterializeTemporaryExpr node above this one which expects the bound value 960 // to be an RValue. 961 SVal LambdaRVal = State->getSVal(R); 962 963 ExplodedNodeSet Tmp; 964 StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx); 965 // FIXME: is this the right program point kind? 966 Bldr.generateNode(LE, Pred, 967 State->BindExpr(LE, LocCtxt, LambdaRVal), 968 nullptr, ProgramPoint::PostLValueKind); 969 970 // FIXME: Move all post/pre visits to ::Visit(). 971 getCheckerManager().runCheckersForPostStmt(Dst, Tmp, LE, *this); 972 } 973