1 //==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a generic engine for intraprocedural, path-sensitive, 11 // dataflow analysis via graph reachability engine. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 16 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 17 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 18 #include "clang/Index/TranslationUnit.h" 19 #include "clang/AST/Expr.h" 20 #include "llvm/Support/Casting.h" 21 #include "llvm/ADT/DenseMap.h" 22 #include <vector> 23 #include <queue> 24 25 using llvm::cast; 26 using llvm::isa; 27 using namespace clang; 28 using namespace ento; 29 30 // This should be removed in the future. 31 namespace clang { 32 namespace ento { 33 TransferFuncs* MakeCFRefCountTF(ASTContext& Ctx, bool GCEnabled, 34 const LangOptions& lopts); 35 } 36 } 37 38 //===----------------------------------------------------------------------===// 39 // Worklist classes for exploration of reachable states. 40 //===----------------------------------------------------------------------===// 41 42 WorkList::Visitor::~Visitor() {} 43 44 namespace { 45 class DFS : public WorkList { 46 llvm::SmallVector<WorkListUnit,20> Stack; 47 public: 48 virtual bool hasWork() const { 49 return !Stack.empty(); 50 } 51 52 virtual void enqueue(const WorkListUnit& U) { 53 Stack.push_back(U); 54 } 55 56 virtual WorkListUnit dequeue() { 57 assert (!Stack.empty()); 58 const WorkListUnit& U = Stack.back(); 59 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 60 return U; 61 } 62 63 virtual bool visitItemsInWorkList(Visitor &V) { 64 for (llvm::SmallVectorImpl<WorkListUnit>::iterator 65 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 66 if (V.visit(*I)) 67 return true; 68 } 69 return false; 70 } 71 }; 72 73 class BFS : public WorkList { 74 std::deque<WorkListUnit> Queue; 75 public: 76 virtual bool hasWork() const { 77 return !Queue.empty(); 78 } 79 80 virtual void enqueue(const WorkListUnit& U) { 81 Queue.push_front(U); 82 } 83 84 virtual WorkListUnit dequeue() { 85 WorkListUnit U = Queue.front(); 86 Queue.pop_front(); 87 return U; 88 } 89 90 virtual bool visitItemsInWorkList(Visitor &V) { 91 for (std::deque<WorkListUnit>::iterator 92 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 93 if (V.visit(*I)) 94 return true; 95 } 96 return false; 97 } 98 }; 99 100 } // end anonymous namespace 101 102 // Place the dstor for WorkList here because it contains virtual member 103 // functions, and we the code for the dstor generated in one compilation unit. 104 WorkList::~WorkList() {} 105 106 WorkList *WorkList::makeDFS() { return new DFS(); } 107 WorkList *WorkList::makeBFS() { return new BFS(); } 108 109 namespace { 110 class BFSBlockDFSContents : public WorkList { 111 std::deque<WorkListUnit> Queue; 112 llvm::SmallVector<WorkListUnit,20> Stack; 113 public: 114 virtual bool hasWork() const { 115 return !Queue.empty() || !Stack.empty(); 116 } 117 118 virtual void enqueue(const WorkListUnit& U) { 119 if (isa<BlockEntrance>(U.getNode()->getLocation())) 120 Queue.push_front(U); 121 else 122 Stack.push_back(U); 123 } 124 125 virtual WorkListUnit dequeue() { 126 // Process all basic blocks to completion. 127 if (!Stack.empty()) { 128 const WorkListUnit& U = Stack.back(); 129 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 130 return U; 131 } 132 133 assert(!Queue.empty()); 134 // Don't use const reference. The subsequent pop_back() might make it 135 // unsafe. 136 WorkListUnit U = Queue.front(); 137 Queue.pop_front(); 138 return U; 139 } 140 virtual bool visitItemsInWorkList(Visitor &V) { 141 for (llvm::SmallVectorImpl<WorkListUnit>::iterator 142 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 143 if (V.visit(*I)) 144 return true; 145 } 146 for (std::deque<WorkListUnit>::iterator 147 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 148 if (V.visit(*I)) 149 return true; 150 } 151 return false; 152 } 153 154 }; 155 } // end anonymous namespace 156 157 WorkList* WorkList::makeBFSBlockDFSContents() { 158 return new BFSBlockDFSContents(); 159 } 160 161 //===----------------------------------------------------------------------===// 162 // Core analysis engine. 163 //===----------------------------------------------------------------------===// 164 165 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 166 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 167 const GRState *InitState) { 168 169 if (G->num_roots() == 0) { // Initialize the analysis by constructing 170 // the root if none exists. 171 172 const CFGBlock* Entry = &(L->getCFG()->getEntry()); 173 174 assert (Entry->empty() && 175 "Entry block must be empty."); 176 177 assert (Entry->succ_size() == 1 && 178 "Entry block must have 1 successor."); 179 180 // Get the solitary successor. 181 const CFGBlock* Succ = *(Entry->succ_begin()); 182 183 // Construct an edge representing the 184 // starting location in the function. 185 BlockEdge StartLoc(Entry, Succ, L); 186 187 // Set the current block counter to being empty. 188 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 189 190 if (!InitState) 191 // Generate the root. 192 generateNode(StartLoc, SubEng.getInitialState(L), 0); 193 else 194 generateNode(StartLoc, InitState, 0); 195 } 196 197 // Check if we have a steps limit 198 bool UnlimitedSteps = Steps == 0; 199 200 while (WList->hasWork()) { 201 if (!UnlimitedSteps) { 202 if (Steps == 0) 203 break; 204 --Steps; 205 } 206 207 const WorkListUnit& WU = WList->dequeue(); 208 209 // Set the current block counter. 210 WList->setBlockCounter(WU.getBlockCounter()); 211 212 // Retrieve the node. 213 ExplodedNode* Node = WU.getNode(); 214 215 // Dispatch on the location type. 216 switch (Node->getLocation().getKind()) { 217 case ProgramPoint::BlockEdgeKind: 218 HandleBlockEdge(cast<BlockEdge>(Node->getLocation()), Node); 219 break; 220 221 case ProgramPoint::BlockEntranceKind: 222 HandleBlockEntrance(cast<BlockEntrance>(Node->getLocation()), Node); 223 break; 224 225 case ProgramPoint::BlockExitKind: 226 assert (false && "BlockExit location never occur in forward analysis."); 227 break; 228 229 case ProgramPoint::CallEnterKind: 230 HandleCallEnter(cast<CallEnter>(Node->getLocation()), WU.getBlock(), 231 WU.getIndex(), Node); 232 break; 233 234 case ProgramPoint::CallExitKind: 235 HandleCallExit(cast<CallExit>(Node->getLocation()), Node); 236 break; 237 238 default: 239 assert(isa<PostStmt>(Node->getLocation()) || 240 isa<PostInitializer>(Node->getLocation())); 241 HandlePostStmt(WU.getBlock(), WU.getIndex(), Node); 242 break; 243 } 244 } 245 246 SubEng.processEndWorklist(hasWorkRemaining()); 247 return WList->hasWork(); 248 } 249 250 void CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 251 unsigned Steps, 252 const GRState *InitState, 253 ExplodedNodeSet &Dst) { 254 ExecuteWorkList(L, Steps, InitState); 255 for (llvm::SmallVectorImpl<ExplodedNode*>::iterator I = G->EndNodes.begin(), 256 E = G->EndNodes.end(); I != E; ++I) { 257 Dst.Add(*I); 258 } 259 } 260 261 void CoreEngine::HandleCallEnter(const CallEnter &L, const CFGBlock *Block, 262 unsigned Index, ExplodedNode *Pred) { 263 CallEnterNodeBuilder Builder(*this, Pred, L.getCallExpr(), 264 L.getCalleeContext(), Block, Index); 265 SubEng.processCallEnter(Builder); 266 } 267 268 void CoreEngine::HandleCallExit(const CallExit &L, ExplodedNode *Pred) { 269 CallExitNodeBuilder Builder(*this, Pred); 270 SubEng.processCallExit(Builder); 271 } 272 273 void CoreEngine::HandleBlockEdge(const BlockEdge& L, ExplodedNode* Pred) { 274 275 const CFGBlock* Blk = L.getDst(); 276 277 // Check if we are entering the EXIT block. 278 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 279 280 assert (L.getLocationContext()->getCFG()->getExit().size() == 0 281 && "EXIT block cannot contain Stmts."); 282 283 // Process the final state transition. 284 EndOfFunctionNodeBuilder Builder(Blk, Pred, this); 285 SubEng.processEndOfFunction(Builder); 286 287 // This path is done. Don't enqueue any more nodes. 288 return; 289 } 290 291 // Call into the subengine to process entering the CFGBlock. 292 ExplodedNodeSet dstNodes; 293 BlockEntrance BE(Blk, Pred->getLocationContext()); 294 GenericNodeBuilder<BlockEntrance> nodeBuilder(*this, Pred, BE); 295 SubEng.processCFGBlockEntrance(dstNodes, nodeBuilder); 296 297 if (dstNodes.empty()) { 298 if (!nodeBuilder.hasGeneratedNode) { 299 // Auto-generate a node and enqueue it to the worklist. 300 generateNode(BE, Pred->State, Pred); 301 } 302 } 303 else { 304 for (ExplodedNodeSet::iterator I = dstNodes.begin(), E = dstNodes.end(); 305 I != E; ++I) { 306 WList->enqueue(*I); 307 } 308 } 309 310 for (llvm::SmallVectorImpl<ExplodedNode*>::const_iterator 311 I = nodeBuilder.sinks().begin(), E = nodeBuilder.sinks().end(); 312 I != E; ++I) { 313 blocksExhausted.push_back(std::make_pair(L, *I)); 314 } 315 } 316 317 void CoreEngine::HandleBlockEntrance(const BlockEntrance& L, 318 ExplodedNode* Pred) { 319 320 // Increment the block counter. 321 BlockCounter Counter = WList->getBlockCounter(); 322 Counter = BCounterFactory.IncrementCount(Counter, 323 Pred->getLocationContext()->getCurrentStackFrame(), 324 L.getBlock()->getBlockID()); 325 WList->setBlockCounter(Counter); 326 327 // Process the entrance of the block. 328 if (CFGElement E = L.getFirstElement()) { 329 StmtNodeBuilder Builder(L.getBlock(), 0, Pred, this, 330 SubEng.getStateManager()); 331 SubEng.processCFGElement(E, Builder); 332 } 333 else 334 HandleBlockExit(L.getBlock(), Pred); 335 } 336 337 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode* Pred) { 338 339 if (const Stmt* Term = B->getTerminator()) { 340 switch (Term->getStmtClass()) { 341 default: 342 assert(false && "Analysis for this terminator not implemented."); 343 break; 344 345 case Stmt::BinaryOperatorClass: // '&&' and '||' 346 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 347 return; 348 349 case Stmt::BinaryConditionalOperatorClass: 350 case Stmt::ConditionalOperatorClass: 351 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 352 Term, B, Pred); 353 return; 354 355 // FIXME: Use constant-folding in CFG construction to simplify this 356 // case. 357 358 case Stmt::ChooseExprClass: 359 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 360 return; 361 362 case Stmt::DoStmtClass: 363 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 364 return; 365 366 case Stmt::ForStmtClass: 367 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 368 return; 369 370 case Stmt::ContinueStmtClass: 371 case Stmt::BreakStmtClass: 372 case Stmt::GotoStmtClass: 373 break; 374 375 case Stmt::IfStmtClass: 376 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 377 return; 378 379 case Stmt::IndirectGotoStmtClass: { 380 // Only 1 successor: the indirect goto dispatch block. 381 assert (B->succ_size() == 1); 382 383 IndirectGotoNodeBuilder 384 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 385 *(B->succ_begin()), this); 386 387 SubEng.processIndirectGoto(builder); 388 return; 389 } 390 391 case Stmt::ObjCForCollectionStmtClass: { 392 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 393 // 394 // (1) inside a basic block, which represents the binding of the 395 // 'element' variable to a value. 396 // (2) in a terminator, which represents the branch. 397 // 398 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 399 // whether or not collection contains any more elements. We cannot 400 // just test to see if the element is nil because a container can 401 // contain nil elements. 402 HandleBranch(Term, Term, B, Pred); 403 return; 404 } 405 406 case Stmt::SwitchStmtClass: { 407 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 408 this); 409 410 SubEng.processSwitch(builder); 411 return; 412 } 413 414 case Stmt::WhileStmtClass: 415 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 416 return; 417 } 418 } 419 420 assert (B->succ_size() == 1 && 421 "Blocks with no terminator should have at most 1 successor."); 422 423 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 424 Pred->State, Pred); 425 } 426 427 void CoreEngine::HandleBranch(const Stmt* Cond, const Stmt* Term, 428 const CFGBlock * B, ExplodedNode* Pred) { 429 assert(B->succ_size() == 2); 430 BranchNodeBuilder Builder(B, *(B->succ_begin()), *(B->succ_begin()+1), 431 Pred, this); 432 SubEng.processBranch(Cond, Term, Builder); 433 } 434 435 void CoreEngine::HandlePostStmt(const CFGBlock* B, unsigned StmtIdx, 436 ExplodedNode* Pred) { 437 assert (!B->empty()); 438 439 if (StmtIdx == B->size()) 440 HandleBlockExit(B, Pred); 441 else { 442 StmtNodeBuilder Builder(B, StmtIdx, Pred, this, 443 SubEng.getStateManager()); 444 SubEng.processCFGElement((*B)[StmtIdx], Builder); 445 } 446 } 447 448 /// generateNode - Utility method to generate nodes, hook up successors, 449 /// and add nodes to the worklist. 450 void CoreEngine::generateNode(const ProgramPoint& Loc, 451 const GRState* State, ExplodedNode* Pred) { 452 453 bool IsNew; 454 ExplodedNode* Node = G->getNode(Loc, State, &IsNew); 455 456 if (Pred) 457 Node->addPredecessor(Pred, *G); // Link 'Node' with its predecessor. 458 else { 459 assert (IsNew); 460 G->addRoot(Node); // 'Node' has no predecessor. Make it a root. 461 } 462 463 // Only add 'Node' to the worklist if it was freshly generated. 464 if (IsNew) WList->enqueue(Node); 465 } 466 467 ExplodedNode * 468 GenericNodeBuilderImpl::generateNodeImpl(const GRState *state, 469 ExplodedNode *pred, 470 ProgramPoint programPoint, 471 bool asSink) { 472 473 hasGeneratedNode = true; 474 bool isNew; 475 ExplodedNode *node = engine.getGraph().getNode(programPoint, state, &isNew); 476 if (pred) 477 node->addPredecessor(pred, engine.getGraph()); 478 if (isNew) { 479 if (asSink) { 480 node->markAsSink(); 481 sinksGenerated.push_back(node); 482 } 483 return node; 484 } 485 return 0; 486 } 487 488 StmtNodeBuilder::StmtNodeBuilder(const CFGBlock* b, unsigned idx, 489 ExplodedNode* N, CoreEngine* e, 490 GRStateManager &mgr) 491 : Eng(*e), B(*b), Idx(idx), Pred(N), Mgr(mgr), 492 PurgingDeadSymbols(false), BuildSinks(false), hasGeneratedNode(false), 493 PointKind(ProgramPoint::PostStmtKind), Tag(0) { 494 Deferred.insert(N); 495 CleanedState = Pred->getState(); 496 } 497 498 StmtNodeBuilder::~StmtNodeBuilder() { 499 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I) 500 if (!(*I)->isSink()) 501 GenerateAutoTransition(*I); 502 } 503 504 void StmtNodeBuilder::GenerateAutoTransition(ExplodedNode* N) { 505 assert (!N->isSink()); 506 507 // Check if this node entered a callee. 508 if (isa<CallEnter>(N->getLocation())) { 509 // Still use the index of the CallExpr. It's needed to create the callee 510 // StackFrameContext. 511 Eng.WList->enqueue(N, &B, Idx); 512 return; 513 } 514 515 // Do not create extra nodes. Move to the next CFG element. 516 if (isa<PostInitializer>(N->getLocation())) { 517 Eng.WList->enqueue(N, &B, Idx+1); 518 return; 519 } 520 521 PostStmt Loc(getStmt(), N->getLocationContext()); 522 523 if (Loc == N->getLocation()) { 524 // Note: 'N' should be a fresh node because otherwise it shouldn't be 525 // a member of Deferred. 526 Eng.WList->enqueue(N, &B, Idx+1); 527 return; 528 } 529 530 bool IsNew; 531 ExplodedNode* Succ = Eng.G->getNode(Loc, N->State, &IsNew); 532 Succ->addPredecessor(N, *Eng.G); 533 534 if (IsNew) 535 Eng.WList->enqueue(Succ, &B, Idx+1); 536 } 537 538 ExplodedNode* StmtNodeBuilder::MakeNode(ExplodedNodeSet& Dst, const Stmt* S, 539 ExplodedNode* Pred, const GRState* St, 540 ProgramPoint::Kind K) { 541 542 ExplodedNode* N = generateNode(S, St, Pred, K); 543 544 if (N) { 545 if (BuildSinks) 546 N->markAsSink(); 547 else 548 Dst.Add(N); 549 } 550 551 return N; 552 } 553 554 static ProgramPoint GetProgramPoint(const Stmt *S, ProgramPoint::Kind K, 555 const LocationContext *LC, const void *tag){ 556 switch (K) { 557 default: 558 assert(false && "Unhandled ProgramPoint kind"); 559 case ProgramPoint::PreStmtKind: 560 return PreStmt(S, LC, tag); 561 case ProgramPoint::PostStmtKind: 562 return PostStmt(S, LC, tag); 563 case ProgramPoint::PreLoadKind: 564 return PreLoad(S, LC, tag); 565 case ProgramPoint::PostLoadKind: 566 return PostLoad(S, LC, tag); 567 case ProgramPoint::PreStoreKind: 568 return PreStore(S, LC, tag); 569 case ProgramPoint::PostStoreKind: 570 return PostStore(S, LC, tag); 571 case ProgramPoint::PostLValueKind: 572 return PostLValue(S, LC, tag); 573 case ProgramPoint::PostPurgeDeadSymbolsKind: 574 return PostPurgeDeadSymbols(S, LC, tag); 575 } 576 } 577 578 ExplodedNode* 579 StmtNodeBuilder::generateNodeInternal(const Stmt* S, const GRState* state, 580 ExplodedNode* Pred, 581 ProgramPoint::Kind K, 582 const void *tag) { 583 584 const ProgramPoint &L = GetProgramPoint(S, K, Pred->getLocationContext(),tag); 585 return generateNodeInternal(L, state, Pred); 586 } 587 588 ExplodedNode* 589 StmtNodeBuilder::generateNodeInternal(const ProgramPoint &Loc, 590 const GRState* State, 591 ExplodedNode* Pred) { 592 bool IsNew; 593 ExplodedNode* N = Eng.G->getNode(Loc, State, &IsNew); 594 N->addPredecessor(Pred, *Eng.G); 595 Deferred.erase(Pred); 596 597 if (IsNew) { 598 Deferred.insert(N); 599 return N; 600 } 601 602 return NULL; 603 } 604 605 // This function generate a new ExplodedNode but not a new branch(block edge). 606 ExplodedNode* BranchNodeBuilder::generateNode(const Stmt* Condition, 607 const GRState* State) { 608 bool IsNew; 609 610 ExplodedNode* Succ 611 = Eng.G->getNode(PostCondition(Condition, Pred->getLocationContext()), State, 612 &IsNew); 613 614 Succ->addPredecessor(Pred, *Eng.G); 615 616 Pred = Succ; 617 618 if (IsNew) 619 return Succ; 620 621 return NULL; 622 } 623 624 ExplodedNode* BranchNodeBuilder::generateNode(const GRState* State, 625 bool branch) { 626 627 // If the branch has been marked infeasible we should not generate a node. 628 if (!isFeasible(branch)) 629 return NULL; 630 631 bool IsNew; 632 633 ExplodedNode* Succ = 634 Eng.G->getNode(BlockEdge(Src,branch ? DstT:DstF,Pred->getLocationContext()), 635 State, &IsNew); 636 637 Succ->addPredecessor(Pred, *Eng.G); 638 639 if (branch) 640 GeneratedTrue = true; 641 else 642 GeneratedFalse = true; 643 644 if (IsNew) { 645 Deferred.push_back(Succ); 646 return Succ; 647 } 648 649 return NULL; 650 } 651 652 BranchNodeBuilder::~BranchNodeBuilder() { 653 if (!GeneratedTrue) generateNode(Pred->State, true); 654 if (!GeneratedFalse) generateNode(Pred->State, false); 655 656 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I) 657 if (!(*I)->isSink()) Eng.WList->enqueue(*I); 658 } 659 660 661 ExplodedNode* 662 IndirectGotoNodeBuilder::generateNode(const iterator& I, const GRState* St, 663 bool isSink) { 664 bool IsNew; 665 666 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 667 Pred->getLocationContext()), St, &IsNew); 668 669 Succ->addPredecessor(Pred, *Eng.G); 670 671 if (IsNew) { 672 673 if (isSink) 674 Succ->markAsSink(); 675 else 676 Eng.WList->enqueue(Succ); 677 678 return Succ; 679 } 680 681 return NULL; 682 } 683 684 685 ExplodedNode* 686 SwitchNodeBuilder::generateCaseStmtNode(const iterator& I, const GRState* St){ 687 688 bool IsNew; 689 690 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 691 Pred->getLocationContext()), St, &IsNew); 692 Succ->addPredecessor(Pred, *Eng.G); 693 694 if (IsNew) { 695 Eng.WList->enqueue(Succ); 696 return Succ; 697 } 698 699 return NULL; 700 } 701 702 703 ExplodedNode* 704 SwitchNodeBuilder::generateDefaultCaseNode(const GRState* St, bool isSink) { 705 706 // Get the block for the default case. 707 assert (Src->succ_rbegin() != Src->succ_rend()); 708 CFGBlock* DefaultBlock = *Src->succ_rbegin(); 709 710 bool IsNew; 711 712 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock, 713 Pred->getLocationContext()), St, &IsNew); 714 Succ->addPredecessor(Pred, *Eng.G); 715 716 if (IsNew) { 717 if (isSink) 718 Succ->markAsSink(); 719 else 720 Eng.WList->enqueue(Succ); 721 722 return Succ; 723 } 724 725 return NULL; 726 } 727 728 EndOfFunctionNodeBuilder::~EndOfFunctionNodeBuilder() { 729 // Auto-generate an EOP node if one has not been generated. 730 if (!hasGeneratedNode) { 731 // If we are in an inlined call, generate CallExit node. 732 if (Pred->getLocationContext()->getParent()) 733 GenerateCallExitNode(Pred->State); 734 else 735 generateNode(Pred->State); 736 } 737 } 738 739 ExplodedNode* 740 EndOfFunctionNodeBuilder::generateNode(const GRState* State, 741 ExplodedNode* P, const void *tag) { 742 hasGeneratedNode = true; 743 bool IsNew; 744 745 ExplodedNode* Node = Eng.G->getNode(BlockEntrance(&B, 746 Pred->getLocationContext(), tag ? tag : Tag), 747 State, &IsNew); 748 749 Node->addPredecessor(P ? P : Pred, *Eng.G); 750 751 if (IsNew) { 752 Eng.G->addEndOfPath(Node); 753 return Node; 754 } 755 756 return NULL; 757 } 758 759 void EndOfFunctionNodeBuilder::GenerateCallExitNode(const GRState *state) { 760 hasGeneratedNode = true; 761 // Create a CallExit node and enqueue it. 762 const StackFrameContext *LocCtx 763 = cast<StackFrameContext>(Pred->getLocationContext()); 764 const Stmt *CE = LocCtx->getCallSite(); 765 766 // Use the the callee location context. 767 CallExit Loc(CE, LocCtx); 768 769 bool isNew; 770 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 771 Node->addPredecessor(Pred, *Eng.G); 772 773 if (isNew) 774 Eng.WList->enqueue(Node); 775 } 776 777 778 void CallEnterNodeBuilder::generateNode(const GRState *state) { 779 // Check if the callee is in the same translation unit. 780 if (CalleeCtx->getTranslationUnit() != 781 Pred->getLocationContext()->getTranslationUnit()) { 782 // Create a new engine. We must be careful that the new engine should not 783 // reference data structures owned by the old engine. 784 785 AnalysisManager &OldMgr = Eng.SubEng.getAnalysisManager(); 786 787 // Get the callee's translation unit. 788 idx::TranslationUnit *TU = CalleeCtx->getTranslationUnit(); 789 790 // Create a new AnalysisManager with components of the callee's 791 // TranslationUnit. 792 // The Diagnostic is actually shared when we create ASTUnits from AST files. 793 AnalysisManager AMgr(TU->getASTContext(), TU->getDiagnostic(), 794 OldMgr.getLangOptions(), 795 OldMgr.getPathDiagnosticClient(), 796 OldMgr.getStoreManagerCreator(), 797 OldMgr.getConstraintManagerCreator(), 798 OldMgr.getCheckerManager(), 799 OldMgr.getIndexer(), 800 OldMgr.getMaxNodes(), OldMgr.getMaxVisit(), 801 OldMgr.shouldVisualizeGraphviz(), 802 OldMgr.shouldVisualizeUbigraph(), 803 OldMgr.shouldPurgeDead(), 804 OldMgr.shouldEagerlyAssume(), 805 OldMgr.shouldTrimGraph(), 806 OldMgr.shouldInlineCall(), 807 OldMgr.getAnalysisContextManager().getUseUnoptimizedCFG(), 808 OldMgr.getAnalysisContextManager().getAddImplicitDtors(), 809 OldMgr.getAnalysisContextManager().getAddInitializers(), 810 OldMgr.shouldEagerlyTrimExplodedGraph()); 811 llvm::OwningPtr<TransferFuncs> TF(MakeCFRefCountTF(AMgr.getASTContext(), 812 /* GCEnabled */ false, 813 AMgr.getLangOptions())); 814 // Create the new engine. 815 ExprEngine NewEng(AMgr, TF.take()); 816 817 // Create the new LocationContext. 818 AnalysisContext *NewAnaCtx = AMgr.getAnalysisContext(CalleeCtx->getDecl(), 819 CalleeCtx->getTranslationUnit()); 820 const StackFrameContext *OldLocCtx = CalleeCtx; 821 const StackFrameContext *NewLocCtx = AMgr.getStackFrame(NewAnaCtx, 822 OldLocCtx->getParent(), 823 OldLocCtx->getCallSite(), 824 OldLocCtx->getCallSiteBlock(), 825 OldLocCtx->getIndex()); 826 827 // Now create an initial state for the new engine. 828 const GRState *NewState = NewEng.getStateManager().MarshalState(state, 829 NewLocCtx); 830 ExplodedNodeSet ReturnNodes; 831 NewEng.ExecuteWorkListWithInitialState(NewLocCtx, AMgr.getMaxNodes(), 832 NewState, ReturnNodes); 833 return; 834 } 835 836 // Get the callee entry block. 837 const CFGBlock *Entry = &(CalleeCtx->getCFG()->getEntry()); 838 assert(Entry->empty()); 839 assert(Entry->succ_size() == 1); 840 841 // Get the solitary successor. 842 const CFGBlock *SuccB = *(Entry->succ_begin()); 843 844 // Construct an edge representing the starting location in the callee. 845 BlockEdge Loc(Entry, SuccB, CalleeCtx); 846 847 bool isNew; 848 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 849 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 850 851 if (isNew) 852 Eng.WList->enqueue(Node); 853 } 854 855 void CallExitNodeBuilder::generateNode(const GRState *state) { 856 // Get the callee's location context. 857 const StackFrameContext *LocCtx 858 = cast<StackFrameContext>(Pred->getLocationContext()); 859 // When exiting an implicit automatic obj dtor call, the callsite is the Stmt 860 // that triggers the dtor. 861 PostStmt Loc(LocCtx->getCallSite(), LocCtx->getParent()); 862 bool isNew; 863 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 864 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 865 if (isNew) 866 Eng.WList->enqueue(Node, LocCtx->getCallSiteBlock(), 867 LocCtx->getIndex() + 1); 868 } 869