1 //==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a generic engine for intraprocedural, path-sensitive, 11 // dataflow analysis via graph reachability engine. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 16 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 17 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 18 #include "clang/Index/TranslationUnit.h" 19 #include "clang/AST/Expr.h" 20 #include "clang/AST/StmtCXX.h" 21 #include "llvm/Support/Casting.h" 22 #include "llvm/ADT/DenseMap.h" 23 using namespace clang; 24 using namespace ento; 25 26 //===----------------------------------------------------------------------===// 27 // Worklist classes for exploration of reachable states. 28 //===----------------------------------------------------------------------===// 29 30 WorkList::Visitor::~Visitor() {} 31 32 namespace { 33 class DFS : public WorkList { 34 SmallVector<WorkListUnit,20> Stack; 35 public: 36 virtual bool hasWork() const { 37 return !Stack.empty(); 38 } 39 40 virtual void enqueue(const WorkListUnit& U) { 41 Stack.push_back(U); 42 } 43 44 virtual WorkListUnit dequeue() { 45 assert (!Stack.empty()); 46 const WorkListUnit& U = Stack.back(); 47 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 48 return U; 49 } 50 51 virtual bool visitItemsInWorkList(Visitor &V) { 52 for (SmallVectorImpl<WorkListUnit>::iterator 53 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 54 if (V.visit(*I)) 55 return true; 56 } 57 return false; 58 } 59 }; 60 61 class BFS : public WorkList { 62 std::deque<WorkListUnit> Queue; 63 public: 64 virtual bool hasWork() const { 65 return !Queue.empty(); 66 } 67 68 virtual void enqueue(const WorkListUnit& U) { 69 Queue.push_front(U); 70 } 71 72 virtual WorkListUnit dequeue() { 73 WorkListUnit U = Queue.front(); 74 Queue.pop_front(); 75 return U; 76 } 77 78 virtual bool visitItemsInWorkList(Visitor &V) { 79 for (std::deque<WorkListUnit>::iterator 80 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 81 if (V.visit(*I)) 82 return true; 83 } 84 return false; 85 } 86 }; 87 88 } // end anonymous namespace 89 90 // Place the dstor for WorkList here because it contains virtual member 91 // functions, and we the code for the dstor generated in one compilation unit. 92 WorkList::~WorkList() {} 93 94 WorkList *WorkList::makeDFS() { return new DFS(); } 95 WorkList *WorkList::makeBFS() { return new BFS(); } 96 97 namespace { 98 class BFSBlockDFSContents : public WorkList { 99 std::deque<WorkListUnit> Queue; 100 SmallVector<WorkListUnit,20> Stack; 101 public: 102 virtual bool hasWork() const { 103 return !Queue.empty() || !Stack.empty(); 104 } 105 106 virtual void enqueue(const WorkListUnit& U) { 107 if (isa<BlockEntrance>(U.getNode()->getLocation())) 108 Queue.push_front(U); 109 else 110 Stack.push_back(U); 111 } 112 113 virtual WorkListUnit dequeue() { 114 // Process all basic blocks to completion. 115 if (!Stack.empty()) { 116 const WorkListUnit& U = Stack.back(); 117 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 118 return U; 119 } 120 121 assert(!Queue.empty()); 122 // Don't use const reference. The subsequent pop_back() might make it 123 // unsafe. 124 WorkListUnit U = Queue.front(); 125 Queue.pop_front(); 126 return U; 127 } 128 virtual bool visitItemsInWorkList(Visitor &V) { 129 for (SmallVectorImpl<WorkListUnit>::iterator 130 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 131 if (V.visit(*I)) 132 return true; 133 } 134 for (std::deque<WorkListUnit>::iterator 135 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 136 if (V.visit(*I)) 137 return true; 138 } 139 return false; 140 } 141 142 }; 143 } // end anonymous namespace 144 145 WorkList* WorkList::makeBFSBlockDFSContents() { 146 return new BFSBlockDFSContents(); 147 } 148 149 //===----------------------------------------------------------------------===// 150 // Core analysis engine. 151 //===----------------------------------------------------------------------===// 152 153 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 154 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 155 const ProgramState *InitState) { 156 157 if (G->num_roots() == 0) { // Initialize the analysis by constructing 158 // the root if none exists. 159 160 const CFGBlock *Entry = &(L->getCFG()->getEntry()); 161 162 assert (Entry->empty() && 163 "Entry block must be empty."); 164 165 assert (Entry->succ_size() == 1 && 166 "Entry block must have 1 successor."); 167 168 // Get the solitary successor. 169 const CFGBlock *Succ = *(Entry->succ_begin()); 170 171 // Construct an edge representing the 172 // starting location in the function. 173 BlockEdge StartLoc(Entry, Succ, L); 174 175 // Set the current block counter to being empty. 176 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 177 178 if (!InitState) 179 // Generate the root. 180 generateNode(StartLoc, SubEng.getInitialState(L), 0); 181 else 182 generateNode(StartLoc, InitState, 0); 183 } 184 185 // Check if we have a steps limit 186 bool UnlimitedSteps = Steps == 0; 187 188 while (WList->hasWork()) { 189 if (!UnlimitedSteps) { 190 if (Steps == 0) 191 break; 192 --Steps; 193 } 194 195 const WorkListUnit& WU = WList->dequeue(); 196 197 // Set the current block counter. 198 WList->setBlockCounter(WU.getBlockCounter()); 199 200 // Retrieve the node. 201 ExplodedNode *Node = WU.getNode(); 202 203 // Dispatch on the location type. 204 switch (Node->getLocation().getKind()) { 205 case ProgramPoint::BlockEdgeKind: 206 HandleBlockEdge(cast<BlockEdge>(Node->getLocation()), Node); 207 break; 208 209 case ProgramPoint::BlockEntranceKind: 210 HandleBlockEntrance(cast<BlockEntrance>(Node->getLocation()), Node); 211 break; 212 213 case ProgramPoint::BlockExitKind: 214 assert (false && "BlockExit location never occur in forward analysis."); 215 break; 216 217 case ProgramPoint::CallEnterKind: 218 HandleCallEnter(cast<CallEnter>(Node->getLocation()), WU.getBlock(), 219 WU.getIndex(), Node); 220 break; 221 222 case ProgramPoint::CallExitKind: 223 HandleCallExit(cast<CallExit>(Node->getLocation()), Node); 224 break; 225 226 default: 227 assert(isa<PostStmt>(Node->getLocation()) || 228 isa<PostInitializer>(Node->getLocation())); 229 HandlePostStmt(WU.getBlock(), WU.getIndex(), Node); 230 break; 231 } 232 } 233 234 SubEng.processEndWorklist(hasWorkRemaining()); 235 return WList->hasWork(); 236 } 237 238 void CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 239 unsigned Steps, 240 const ProgramState *InitState, 241 ExplodedNodeSet &Dst) { 242 ExecuteWorkList(L, Steps, InitState); 243 for (SmallVectorImpl<ExplodedNode*>::iterator I = G->EndNodes.begin(), 244 E = G->EndNodes.end(); I != E; ++I) { 245 Dst.Add(*I); 246 } 247 } 248 249 void CoreEngine::HandleCallEnter(const CallEnter &L, const CFGBlock *Block, 250 unsigned Index, ExplodedNode *Pred) { 251 CallEnterNodeBuilder Builder(*this, Pred, L.getCallExpr(), 252 L.getCalleeContext(), Block, Index); 253 SubEng.processCallEnter(Builder); 254 } 255 256 void CoreEngine::HandleCallExit(const CallExit &L, ExplodedNode *Pred) { 257 CallExitNodeBuilder Builder(*this, Pred); 258 SubEng.processCallExit(Builder); 259 } 260 261 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) { 262 263 const CFGBlock *Blk = L.getDst(); 264 NodeBuilderContext BuilderCtx(*this, Blk, Pred); 265 266 // Check if we are entering the EXIT block. 267 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 268 269 assert (L.getLocationContext()->getCFG()->getExit().size() == 0 270 && "EXIT block cannot contain Stmts."); 271 272 // Process the final state transition. 273 SubEng.processEndOfFunction(BuilderCtx); 274 275 // This path is done. Don't enqueue any more nodes. 276 return; 277 } 278 279 // Call into the SubEngine to process entering the CFGBlock. 280 ExplodedNodeSet dstNodes; 281 BlockEntrance BE(Blk, Pred->getLocationContext()); 282 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE); 283 SubEng.processCFGBlockEntrance(nodeBuilder); 284 285 // Auto-generate a node. 286 if (!nodeBuilder.hasGeneratedNodes()) { 287 nodeBuilder.generateNode(Pred->State, Pred); 288 } 289 290 // Enqueue nodes onto the worklist. 291 enqueue(dstNodes); 292 293 // Make sink nodes as exhausted. 294 const SmallVectorImpl<ExplodedNode*> &Sinks = nodeBuilder.getSinks(); 295 for (SmallVectorImpl<ExplodedNode*>::const_iterator 296 I =Sinks.begin(), E = Sinks.end(); I != E; ++I) { 297 blocksExhausted.push_back(std::make_pair(L, *I)); 298 } 299 } 300 301 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L, 302 ExplodedNode *Pred) { 303 304 // Increment the block counter. 305 BlockCounter Counter = WList->getBlockCounter(); 306 Counter = BCounterFactory.IncrementCount(Counter, 307 Pred->getLocationContext()->getCurrentStackFrame(), 308 L.getBlock()->getBlockID()); 309 WList->setBlockCounter(Counter); 310 311 // Process the entrance of the block. 312 if (CFGElement E = L.getFirstElement()) { 313 NodeBuilderContext Ctx(*this, L.getBlock(), Pred); 314 SubEng.processCFGElement(E, Pred, 0, &Ctx); 315 } 316 else 317 HandleBlockExit(L.getBlock(), Pred); 318 } 319 320 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) { 321 322 if (const Stmt *Term = B->getTerminator()) { 323 switch (Term->getStmtClass()) { 324 default: 325 llvm_unreachable("Analysis for this terminator not implemented."); 326 327 case Stmt::BinaryOperatorClass: // '&&' and '||' 328 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 329 return; 330 331 case Stmt::BinaryConditionalOperatorClass: 332 case Stmt::ConditionalOperatorClass: 333 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 334 Term, B, Pred); 335 return; 336 337 // FIXME: Use constant-folding in CFG construction to simplify this 338 // case. 339 340 case Stmt::ChooseExprClass: 341 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 342 return; 343 344 case Stmt::DoStmtClass: 345 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 346 return; 347 348 case Stmt::CXXForRangeStmtClass: 349 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred); 350 return; 351 352 case Stmt::ForStmtClass: 353 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 354 return; 355 356 case Stmt::ContinueStmtClass: 357 case Stmt::BreakStmtClass: 358 case Stmt::GotoStmtClass: 359 break; 360 361 case Stmt::IfStmtClass: 362 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 363 return; 364 365 case Stmt::IndirectGotoStmtClass: { 366 // Only 1 successor: the indirect goto dispatch block. 367 assert (B->succ_size() == 1); 368 369 IndirectGotoNodeBuilder 370 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 371 *(B->succ_begin()), this); 372 373 SubEng.processIndirectGoto(builder); 374 return; 375 } 376 377 case Stmt::ObjCForCollectionStmtClass: { 378 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 379 // 380 // (1) inside a basic block, which represents the binding of the 381 // 'element' variable to a value. 382 // (2) in a terminator, which represents the branch. 383 // 384 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 385 // whether or not collection contains any more elements. We cannot 386 // just test to see if the element is nil because a container can 387 // contain nil elements. 388 HandleBranch(Term, Term, B, Pred); 389 return; 390 } 391 392 case Stmt::SwitchStmtClass: { 393 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 394 this); 395 396 SubEng.processSwitch(builder); 397 return; 398 } 399 400 case Stmt::WhileStmtClass: 401 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 402 return; 403 } 404 } 405 406 assert (B->succ_size() == 1 && 407 "Blocks with no terminator should have at most 1 successor."); 408 409 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 410 Pred->State, Pred); 411 } 412 413 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term, 414 const CFGBlock * B, ExplodedNode *Pred) { 415 assert(B->succ_size() == 2); 416 NodeBuilderContext Ctx(*this, B, Pred); 417 ExplodedNodeSet Dst; 418 SubEng.processBranch(Cond, Term, Ctx, Pred, Dst, 419 *(B->succ_begin()), *(B->succ_begin()+1)); 420 // Enqueue the new frontier onto the worklist. 421 enqueue(Dst); 422 } 423 424 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx, 425 ExplodedNode *Pred) { 426 assert(B); 427 assert(!B->empty()); 428 429 if (StmtIdx == B->size()) 430 HandleBlockExit(B, Pred); 431 else { 432 NodeBuilderContext Ctx(*this, B, Pred); 433 SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx); 434 } 435 } 436 437 /// generateNode - Utility method to generate nodes, hook up successors, 438 /// and add nodes to the worklist. 439 void CoreEngine::generateNode(const ProgramPoint &Loc, 440 const ProgramState *State, 441 ExplodedNode *Pred) { 442 443 bool IsNew; 444 ExplodedNode *Node = G->getNode(Loc, State, false, &IsNew); 445 446 if (Pred) 447 Node->addPredecessor(Pred, *G); // Link 'Node' with its predecessor. 448 else { 449 assert (IsNew); 450 G->addRoot(Node); // 'Node' has no predecessor. Make it a root. 451 } 452 453 // Only add 'Node' to the worklist if it was freshly generated. 454 if (IsNew) WList->enqueue(Node); 455 } 456 457 void CoreEngine::enqueueStmtNode(ExplodedNode *N, 458 const CFGBlock *Block, unsigned Idx) { 459 assert(Block); 460 assert (!N->isSink()); 461 462 // Check if this node entered a callee. 463 if (isa<CallEnter>(N->getLocation())) { 464 // Still use the index of the CallExpr. It's needed to create the callee 465 // StackFrameContext. 466 WList->enqueue(N, Block, Idx); 467 return; 468 } 469 470 // Do not create extra nodes. Move to the next CFG element. 471 if (isa<PostInitializer>(N->getLocation())) { 472 WList->enqueue(N, Block, Idx+1); 473 return; 474 } 475 476 const CFGStmt *CS = (*Block)[Idx].getAs<CFGStmt>(); 477 const Stmt *St = CS ? CS->getStmt() : 0; 478 PostStmt Loc(St, N->getLocationContext()); 479 480 if (Loc == N->getLocation()) { 481 // Note: 'N' should be a fresh node because otherwise it shouldn't be 482 // a member of Deferred. 483 WList->enqueue(N, Block, Idx+1); 484 return; 485 } 486 487 bool IsNew; 488 ExplodedNode *Succ = G->getNode(Loc, N->getState(), false, &IsNew); 489 Succ->addPredecessor(N, *G); 490 491 if (IsNew) 492 WList->enqueue(Succ, Block, Idx+1); 493 } 494 495 ExplodedNode *CoreEngine::generateCallExitNode(ExplodedNode *N) { 496 // Create a CallExit node and enqueue it. 497 const StackFrameContext *LocCtx 498 = cast<StackFrameContext>(N->getLocationContext()); 499 const Stmt *CE = LocCtx->getCallSite(); 500 501 // Use the the callee location context. 502 CallExit Loc(CE, LocCtx); 503 504 bool isNew; 505 ExplodedNode *Node = G->getNode(Loc, N->getState(), false, &isNew); 506 Node->addPredecessor(N, *G); 507 return isNew ? Node : 0; 508 } 509 510 511 void CoreEngine::enqueue(ExplodedNodeSet &Set) { 512 for (ExplodedNodeSet::iterator I = Set.begin(), 513 E = Set.end(); I != E; ++I) { 514 WList->enqueue(*I); 515 } 516 } 517 518 void CoreEngine::enqueue(ExplodedNodeSet &Set, 519 const CFGBlock *Block, unsigned Idx) { 520 for (ExplodedNodeSet::iterator I = Set.begin(), 521 E = Set.end(); I != E; ++I) { 522 enqueueStmtNode(*I, Block, Idx); 523 } 524 } 525 526 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set) { 527 for (ExplodedNodeSet::iterator I = Set.begin(), E = Set.end(); I != E; ++I) { 528 ExplodedNode *N = *I; 529 // If we are in an inlined call, generate CallExit node. 530 if (N->getLocationContext()->getParent()) { 531 N = generateCallExitNode(N); 532 if (N) 533 WList->enqueue(N); 534 } else 535 G->addEndOfPath(N); 536 } 537 } 538 539 540 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc, 541 const ProgramState *State, 542 ExplodedNode *FromN, 543 bool MarkAsSink) { 544 HasGeneratedNodes = true; 545 bool IsNew; 546 ExplodedNode *N = C.Eng.G->getNode(Loc, State, MarkAsSink, &IsNew); 547 N->addPredecessor(FromN, *C.Eng.G); 548 Frontier.erase(FromN); 549 550 if (!IsNew) 551 return 0; 552 553 if (!MarkAsSink) 554 Frontier.Add(N); 555 556 return N; 557 } 558 559 StmtNodeBuilder::~StmtNodeBuilder() { 560 if (EnclosingBldr) 561 for (ExplodedNodeSet::iterator I = Frontier.begin(), 562 E = Frontier.end(); I != E; ++I ) 563 EnclosingBldr->addNodes(*I); 564 } 565 566 ExplodedNode *BranchNodeBuilder::generateNode(const ProgramState *State, 567 bool branch, 568 ExplodedNode *NodePred) { 569 // If the branch has been marked infeasible we should not generate a node. 570 if (!isFeasible(branch)) 571 return NULL; 572 573 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF, 574 NodePred->getLocationContext()); 575 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred); 576 return Succ; 577 } 578 579 ExplodedNode* 580 IndirectGotoNodeBuilder::generateNode(const iterator &I, 581 const ProgramState *St, 582 bool IsSink) { 583 bool IsNew; 584 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 585 Pred->getLocationContext()), St, 586 IsSink, &IsNew); 587 Succ->addPredecessor(Pred, *Eng.G); 588 589 if (!IsNew) 590 return 0; 591 592 if (!IsSink) 593 Eng.WList->enqueue(Succ); 594 595 return Succ; 596 } 597 598 599 ExplodedNode* 600 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I, 601 const ProgramState *St) { 602 603 bool IsNew; 604 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 605 Pred->getLocationContext()), St, 606 false, &IsNew); 607 Succ->addPredecessor(Pred, *Eng.G); 608 if (!IsNew) 609 return 0; 610 611 Eng.WList->enqueue(Succ); 612 return Succ; 613 } 614 615 616 ExplodedNode* 617 SwitchNodeBuilder::generateDefaultCaseNode(const ProgramState *St, 618 bool IsSink) { 619 // Get the block for the default case. 620 assert(Src->succ_rbegin() != Src->succ_rend()); 621 CFGBlock *DefaultBlock = *Src->succ_rbegin(); 622 623 // Sanity check for default blocks that are unreachable and not caught 624 // by earlier stages. 625 if (!DefaultBlock) 626 return NULL; 627 628 bool IsNew; 629 ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock, 630 Pred->getLocationContext()), St, 631 IsSink, &IsNew); 632 Succ->addPredecessor(Pred, *Eng.G); 633 634 if (!IsNew) 635 return 0; 636 637 if (!IsSink) 638 Eng.WList->enqueue(Succ); 639 640 return Succ; 641 } 642 643 void CallEnterNodeBuilder::generateNode(const ProgramState *state) { 644 // Check if the callee is in the same translation unit. 645 if (CalleeCtx->getTranslationUnit() != 646 Pred->getLocationContext()->getTranslationUnit()) { 647 // Create a new engine. We must be careful that the new engine should not 648 // reference data structures owned by the old engine. 649 650 AnalysisManager &OldMgr = Eng.SubEng.getAnalysisManager(); 651 652 // Get the callee's translation unit. 653 idx::TranslationUnit *TU = CalleeCtx->getTranslationUnit(); 654 655 // Create a new AnalysisManager with components of the callee's 656 // TranslationUnit. 657 // The Diagnostic is actually shared when we create ASTUnits from AST files. 658 AnalysisManager AMgr(TU->getASTContext(), TU->getDiagnostic(), OldMgr); 659 660 // Create the new engine. 661 // FIXME: This cast isn't really safe. 662 bool GCEnabled = static_cast<ExprEngine&>(Eng.SubEng).isObjCGCEnabled(); 663 ExprEngine NewEng(AMgr, GCEnabled); 664 665 // Create the new LocationContext. 666 AnalysisDeclContext *NewAnaCtx = 667 AMgr.getAnalysisDeclContext(CalleeCtx->getDecl(), 668 CalleeCtx->getTranslationUnit()); 669 670 const StackFrameContext *OldLocCtx = CalleeCtx; 671 const StackFrameContext *NewLocCtx = 672 NewAnaCtx->getStackFrame(OldLocCtx->getParent(), 673 OldLocCtx->getCallSite(), 674 OldLocCtx->getCallSiteBlock(), 675 OldLocCtx->getIndex()); 676 677 // Now create an initial state for the new engine. 678 const ProgramState *NewState = 679 NewEng.getStateManager().MarshalState(state, NewLocCtx); 680 ExplodedNodeSet ReturnNodes; 681 NewEng.ExecuteWorkListWithInitialState(NewLocCtx, AMgr.getMaxNodes(), 682 NewState, ReturnNodes); 683 return; 684 } 685 686 // Get the callee entry block. 687 const CFGBlock *Entry = &(CalleeCtx->getCFG()->getEntry()); 688 assert(Entry->empty()); 689 assert(Entry->succ_size() == 1); 690 691 // Get the solitary successor. 692 const CFGBlock *SuccB = *(Entry->succ_begin()); 693 694 // Construct an edge representing the starting location in the callee. 695 BlockEdge Loc(Entry, SuccB, CalleeCtx); 696 697 bool isNew; 698 ExplodedNode *Node = Eng.G->getNode(Loc, state, false, &isNew); 699 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 700 701 if (isNew) 702 Eng.WList->enqueue(Node); 703 } 704 705 void CallExitNodeBuilder::generateNode(const ProgramState *state) { 706 // Get the callee's location context. 707 const StackFrameContext *LocCtx 708 = cast<StackFrameContext>(Pred->getLocationContext()); 709 // When exiting an implicit automatic obj dtor call, the callsite is the Stmt 710 // that triggers the dtor. 711 PostStmt Loc(LocCtx->getCallSite(), LocCtx->getParent()); 712 bool isNew; 713 ExplodedNode *Node = Eng.G->getNode(Loc, state, false, &isNew); 714 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 715 if (isNew) 716 Eng.WList->enqueue(Node, LocCtx->getCallSiteBlock(), 717 LocCtx->getIndex() + 1); 718 } 719