1 //==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a generic engine for intraprocedural, path-sensitive, 11 // dataflow analysis via graph reachability engine. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 16 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 17 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 18 #include "clang/Index/TranslationUnit.h" 19 #include "clang/AST/Expr.h" 20 #include "llvm/Support/Casting.h" 21 #include "llvm/ADT/DenseMap.h" 22 using namespace clang; 23 using namespace ento; 24 25 // This should be removed in the future. 26 namespace clang { 27 namespace ento { 28 TransferFuncs* MakeCFRefCountTF(ASTContext& Ctx, bool GCEnabled, 29 const LangOptions& lopts); 30 } 31 } 32 33 //===----------------------------------------------------------------------===// 34 // Worklist classes for exploration of reachable states. 35 //===----------------------------------------------------------------------===// 36 37 WorkList::Visitor::~Visitor() {} 38 39 namespace { 40 class DFS : public WorkList { 41 SmallVector<WorkListUnit,20> Stack; 42 public: 43 virtual bool hasWork() const { 44 return !Stack.empty(); 45 } 46 47 virtual void enqueue(const WorkListUnit& U) { 48 Stack.push_back(U); 49 } 50 51 virtual WorkListUnit dequeue() { 52 assert (!Stack.empty()); 53 const WorkListUnit& U = Stack.back(); 54 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 55 return U; 56 } 57 58 virtual bool visitItemsInWorkList(Visitor &V) { 59 for (SmallVectorImpl<WorkListUnit>::iterator 60 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 61 if (V.visit(*I)) 62 return true; 63 } 64 return false; 65 } 66 }; 67 68 class BFS : public WorkList { 69 std::deque<WorkListUnit> Queue; 70 public: 71 virtual bool hasWork() const { 72 return !Queue.empty(); 73 } 74 75 virtual void enqueue(const WorkListUnit& U) { 76 Queue.push_front(U); 77 } 78 79 virtual WorkListUnit dequeue() { 80 WorkListUnit U = Queue.front(); 81 Queue.pop_front(); 82 return U; 83 } 84 85 virtual bool visitItemsInWorkList(Visitor &V) { 86 for (std::deque<WorkListUnit>::iterator 87 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 88 if (V.visit(*I)) 89 return true; 90 } 91 return false; 92 } 93 }; 94 95 } // end anonymous namespace 96 97 // Place the dstor for WorkList here because it contains virtual member 98 // functions, and we the code for the dstor generated in one compilation unit. 99 WorkList::~WorkList() {} 100 101 WorkList *WorkList::makeDFS() { return new DFS(); } 102 WorkList *WorkList::makeBFS() { return new BFS(); } 103 104 namespace { 105 class BFSBlockDFSContents : public WorkList { 106 std::deque<WorkListUnit> Queue; 107 SmallVector<WorkListUnit,20> Stack; 108 public: 109 virtual bool hasWork() const { 110 return !Queue.empty() || !Stack.empty(); 111 } 112 113 virtual void enqueue(const WorkListUnit& U) { 114 if (isa<BlockEntrance>(U.getNode()->getLocation())) 115 Queue.push_front(U); 116 else 117 Stack.push_back(U); 118 } 119 120 virtual WorkListUnit dequeue() { 121 // Process all basic blocks to completion. 122 if (!Stack.empty()) { 123 const WorkListUnit& U = Stack.back(); 124 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 125 return U; 126 } 127 128 assert(!Queue.empty()); 129 // Don't use const reference. The subsequent pop_back() might make it 130 // unsafe. 131 WorkListUnit U = Queue.front(); 132 Queue.pop_front(); 133 return U; 134 } 135 virtual bool visitItemsInWorkList(Visitor &V) { 136 for (SmallVectorImpl<WorkListUnit>::iterator 137 I = Stack.begin(), E = Stack.end(); I != E; ++I) { 138 if (V.visit(*I)) 139 return true; 140 } 141 for (std::deque<WorkListUnit>::iterator 142 I = Queue.begin(), E = Queue.end(); I != E; ++I) { 143 if (V.visit(*I)) 144 return true; 145 } 146 return false; 147 } 148 149 }; 150 } // end anonymous namespace 151 152 WorkList* WorkList::makeBFSBlockDFSContents() { 153 return new BFSBlockDFSContents(); 154 } 155 156 //===----------------------------------------------------------------------===// 157 // Core analysis engine. 158 //===----------------------------------------------------------------------===// 159 160 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 161 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 162 const GRState *InitState) { 163 164 if (G->num_roots() == 0) { // Initialize the analysis by constructing 165 // the root if none exists. 166 167 const CFGBlock* Entry = &(L->getCFG()->getEntry()); 168 169 assert (Entry->empty() && 170 "Entry block must be empty."); 171 172 assert (Entry->succ_size() == 1 && 173 "Entry block must have 1 successor."); 174 175 // Get the solitary successor. 176 const CFGBlock* Succ = *(Entry->succ_begin()); 177 178 // Construct an edge representing the 179 // starting location in the function. 180 BlockEdge StartLoc(Entry, Succ, L); 181 182 // Set the current block counter to being empty. 183 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 184 185 if (!InitState) 186 // Generate the root. 187 generateNode(StartLoc, SubEng.getInitialState(L), 0); 188 else 189 generateNode(StartLoc, InitState, 0); 190 } 191 192 // Check if we have a steps limit 193 bool UnlimitedSteps = Steps == 0; 194 195 while (WList->hasWork()) { 196 if (!UnlimitedSteps) { 197 if (Steps == 0) 198 break; 199 --Steps; 200 } 201 202 const WorkListUnit& WU = WList->dequeue(); 203 204 // Set the current block counter. 205 WList->setBlockCounter(WU.getBlockCounter()); 206 207 // Retrieve the node. 208 ExplodedNode* Node = WU.getNode(); 209 210 // Dispatch on the location type. 211 switch (Node->getLocation().getKind()) { 212 case ProgramPoint::BlockEdgeKind: 213 HandleBlockEdge(cast<BlockEdge>(Node->getLocation()), Node); 214 break; 215 216 case ProgramPoint::BlockEntranceKind: 217 HandleBlockEntrance(cast<BlockEntrance>(Node->getLocation()), Node); 218 break; 219 220 case ProgramPoint::BlockExitKind: 221 assert (false && "BlockExit location never occur in forward analysis."); 222 break; 223 224 case ProgramPoint::CallEnterKind: 225 HandleCallEnter(cast<CallEnter>(Node->getLocation()), WU.getBlock(), 226 WU.getIndex(), Node); 227 break; 228 229 case ProgramPoint::CallExitKind: 230 HandleCallExit(cast<CallExit>(Node->getLocation()), Node); 231 break; 232 233 default: 234 assert(isa<PostStmt>(Node->getLocation()) || 235 isa<PostInitializer>(Node->getLocation())); 236 HandlePostStmt(WU.getBlock(), WU.getIndex(), Node); 237 break; 238 } 239 } 240 241 SubEng.processEndWorklist(hasWorkRemaining()); 242 return WList->hasWork(); 243 } 244 245 void CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 246 unsigned Steps, 247 const GRState *InitState, 248 ExplodedNodeSet &Dst) { 249 ExecuteWorkList(L, Steps, InitState); 250 for (SmallVectorImpl<ExplodedNode*>::iterator I = G->EndNodes.begin(), 251 E = G->EndNodes.end(); I != E; ++I) { 252 Dst.Add(*I); 253 } 254 } 255 256 void CoreEngine::HandleCallEnter(const CallEnter &L, const CFGBlock *Block, 257 unsigned Index, ExplodedNode *Pred) { 258 CallEnterNodeBuilder Builder(*this, Pred, L.getCallExpr(), 259 L.getCalleeContext(), Block, Index); 260 SubEng.processCallEnter(Builder); 261 } 262 263 void CoreEngine::HandleCallExit(const CallExit &L, ExplodedNode *Pred) { 264 CallExitNodeBuilder Builder(*this, Pred); 265 SubEng.processCallExit(Builder); 266 } 267 268 void CoreEngine::HandleBlockEdge(const BlockEdge& L, ExplodedNode* Pred) { 269 270 const CFGBlock* Blk = L.getDst(); 271 272 // Check if we are entering the EXIT block. 273 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 274 275 assert (L.getLocationContext()->getCFG()->getExit().size() == 0 276 && "EXIT block cannot contain Stmts."); 277 278 // Process the final state transition. 279 EndOfFunctionNodeBuilder Builder(Blk, Pred, this); 280 SubEng.processEndOfFunction(Builder); 281 282 // This path is done. Don't enqueue any more nodes. 283 return; 284 } 285 286 // Call into the subengine to process entering the CFGBlock. 287 ExplodedNodeSet dstNodes; 288 BlockEntrance BE(Blk, Pred->getLocationContext()); 289 GenericNodeBuilder<BlockEntrance> nodeBuilder(*this, Pred, BE); 290 SubEng.processCFGBlockEntrance(dstNodes, nodeBuilder); 291 292 if (dstNodes.empty()) { 293 if (!nodeBuilder.hasGeneratedNode) { 294 // Auto-generate a node and enqueue it to the worklist. 295 generateNode(BE, Pred->State, Pred); 296 } 297 } 298 else { 299 for (ExplodedNodeSet::iterator I = dstNodes.begin(), E = dstNodes.end(); 300 I != E; ++I) { 301 WList->enqueue(*I); 302 } 303 } 304 305 for (SmallVectorImpl<ExplodedNode*>::const_iterator 306 I = nodeBuilder.sinks().begin(), E = nodeBuilder.sinks().end(); 307 I != E; ++I) { 308 blocksExhausted.push_back(std::make_pair(L, *I)); 309 } 310 } 311 312 void CoreEngine::HandleBlockEntrance(const BlockEntrance& L, 313 ExplodedNode* Pred) { 314 315 // Increment the block counter. 316 BlockCounter Counter = WList->getBlockCounter(); 317 Counter = BCounterFactory.IncrementCount(Counter, 318 Pred->getLocationContext()->getCurrentStackFrame(), 319 L.getBlock()->getBlockID()); 320 WList->setBlockCounter(Counter); 321 322 // Process the entrance of the block. 323 if (CFGElement E = L.getFirstElement()) { 324 StmtNodeBuilder Builder(L.getBlock(), 0, Pred, this, 325 SubEng.getStateManager()); 326 SubEng.processCFGElement(E, Builder); 327 } 328 else 329 HandleBlockExit(L.getBlock(), Pred); 330 } 331 332 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode* Pred) { 333 334 if (const Stmt* Term = B->getTerminator()) { 335 switch (Term->getStmtClass()) { 336 default: 337 assert(false && "Analysis for this terminator not implemented."); 338 break; 339 340 case Stmt::BinaryOperatorClass: // '&&' and '||' 341 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 342 return; 343 344 case Stmt::BinaryConditionalOperatorClass: 345 case Stmt::ConditionalOperatorClass: 346 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 347 Term, B, Pred); 348 return; 349 350 // FIXME: Use constant-folding in CFG construction to simplify this 351 // case. 352 353 case Stmt::ChooseExprClass: 354 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 355 return; 356 357 case Stmt::DoStmtClass: 358 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 359 return; 360 361 case Stmt::ForStmtClass: 362 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 363 return; 364 365 case Stmt::ContinueStmtClass: 366 case Stmt::BreakStmtClass: 367 case Stmt::GotoStmtClass: 368 break; 369 370 case Stmt::IfStmtClass: 371 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 372 return; 373 374 case Stmt::IndirectGotoStmtClass: { 375 // Only 1 successor: the indirect goto dispatch block. 376 assert (B->succ_size() == 1); 377 378 IndirectGotoNodeBuilder 379 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 380 *(B->succ_begin()), this); 381 382 SubEng.processIndirectGoto(builder); 383 return; 384 } 385 386 case Stmt::ObjCForCollectionStmtClass: { 387 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 388 // 389 // (1) inside a basic block, which represents the binding of the 390 // 'element' variable to a value. 391 // (2) in a terminator, which represents the branch. 392 // 393 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 394 // whether or not collection contains any more elements. We cannot 395 // just test to see if the element is nil because a container can 396 // contain nil elements. 397 HandleBranch(Term, Term, B, Pred); 398 return; 399 } 400 401 case Stmt::SwitchStmtClass: { 402 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 403 this); 404 405 SubEng.processSwitch(builder); 406 return; 407 } 408 409 case Stmt::WhileStmtClass: 410 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 411 return; 412 } 413 } 414 415 assert (B->succ_size() == 1 && 416 "Blocks with no terminator should have at most 1 successor."); 417 418 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 419 Pred->State, Pred); 420 } 421 422 void CoreEngine::HandleBranch(const Stmt* Cond, const Stmt* Term, 423 const CFGBlock * B, ExplodedNode* Pred) { 424 assert(B->succ_size() == 2); 425 BranchNodeBuilder Builder(B, *(B->succ_begin()), *(B->succ_begin()+1), 426 Pred, this); 427 SubEng.processBranch(Cond, Term, Builder); 428 } 429 430 void CoreEngine::HandlePostStmt(const CFGBlock* B, unsigned StmtIdx, 431 ExplodedNode* Pred) { 432 assert (!B->empty()); 433 434 if (StmtIdx == B->size()) 435 HandleBlockExit(B, Pred); 436 else { 437 StmtNodeBuilder Builder(B, StmtIdx, Pred, this, 438 SubEng.getStateManager()); 439 SubEng.processCFGElement((*B)[StmtIdx], Builder); 440 } 441 } 442 443 /// generateNode - Utility method to generate nodes, hook up successors, 444 /// and add nodes to the worklist. 445 void CoreEngine::generateNode(const ProgramPoint& Loc, 446 const GRState* State, ExplodedNode* Pred) { 447 448 bool IsNew; 449 ExplodedNode* Node = G->getNode(Loc, State, &IsNew); 450 451 if (Pred) 452 Node->addPredecessor(Pred, *G); // Link 'Node' with its predecessor. 453 else { 454 assert (IsNew); 455 G->addRoot(Node); // 'Node' has no predecessor. Make it a root. 456 } 457 458 // Only add 'Node' to the worklist if it was freshly generated. 459 if (IsNew) WList->enqueue(Node); 460 } 461 462 ExplodedNode * 463 GenericNodeBuilderImpl::generateNodeImpl(const GRState *state, 464 ExplodedNode *pred, 465 ProgramPoint programPoint, 466 bool asSink) { 467 468 hasGeneratedNode = true; 469 bool isNew; 470 ExplodedNode *node = engine.getGraph().getNode(programPoint, state, &isNew); 471 if (pred) 472 node->addPredecessor(pred, engine.getGraph()); 473 if (isNew) { 474 if (asSink) { 475 node->markAsSink(); 476 sinksGenerated.push_back(node); 477 } 478 return node; 479 } 480 return 0; 481 } 482 483 StmtNodeBuilder::StmtNodeBuilder(const CFGBlock* b, unsigned idx, 484 ExplodedNode* N, CoreEngine* e, 485 GRStateManager &mgr) 486 : Eng(*e), B(*b), Idx(idx), Pred(N), Mgr(mgr), 487 PurgingDeadSymbols(false), BuildSinks(false), hasGeneratedNode(false), 488 PointKind(ProgramPoint::PostStmtKind), Tag(0) { 489 Deferred.insert(N); 490 CleanedState = Pred->getState(); 491 } 492 493 StmtNodeBuilder::~StmtNodeBuilder() { 494 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I) 495 if (!(*I)->isSink()) 496 GenerateAutoTransition(*I); 497 } 498 499 void StmtNodeBuilder::GenerateAutoTransition(ExplodedNode* N) { 500 assert (!N->isSink()); 501 502 // Check if this node entered a callee. 503 if (isa<CallEnter>(N->getLocation())) { 504 // Still use the index of the CallExpr. It's needed to create the callee 505 // StackFrameContext. 506 Eng.WList->enqueue(N, &B, Idx); 507 return; 508 } 509 510 // Do not create extra nodes. Move to the next CFG element. 511 if (isa<PostInitializer>(N->getLocation())) { 512 Eng.WList->enqueue(N, &B, Idx+1); 513 return; 514 } 515 516 PostStmt Loc(getStmt(), N->getLocationContext()); 517 518 if (Loc == N->getLocation()) { 519 // Note: 'N' should be a fresh node because otherwise it shouldn't be 520 // a member of Deferred. 521 Eng.WList->enqueue(N, &B, Idx+1); 522 return; 523 } 524 525 bool IsNew; 526 ExplodedNode* Succ = Eng.G->getNode(Loc, N->State, &IsNew); 527 Succ->addPredecessor(N, *Eng.G); 528 529 if (IsNew) 530 Eng.WList->enqueue(Succ, &B, Idx+1); 531 } 532 533 ExplodedNode* StmtNodeBuilder::MakeNode(ExplodedNodeSet& Dst, const Stmt* S, 534 ExplodedNode* Pred, const GRState* St, 535 ProgramPoint::Kind K) { 536 537 ExplodedNode* N = generateNode(S, St, Pred, K); 538 539 if (N) { 540 if (BuildSinks) 541 N->markAsSink(); 542 else 543 Dst.Add(N); 544 } 545 546 return N; 547 } 548 549 static ProgramPoint GetProgramPoint(const Stmt *S, ProgramPoint::Kind K, 550 const LocationContext *LC, const void *tag){ 551 switch (K) { 552 default: 553 assert(false && "Unhandled ProgramPoint kind"); 554 case ProgramPoint::PreStmtKind: 555 return PreStmt(S, LC, tag); 556 case ProgramPoint::PostStmtKind: 557 return PostStmt(S, LC, tag); 558 case ProgramPoint::PreLoadKind: 559 return PreLoad(S, LC, tag); 560 case ProgramPoint::PostLoadKind: 561 return PostLoad(S, LC, tag); 562 case ProgramPoint::PreStoreKind: 563 return PreStore(S, LC, tag); 564 case ProgramPoint::PostStoreKind: 565 return PostStore(S, LC, tag); 566 case ProgramPoint::PostLValueKind: 567 return PostLValue(S, LC, tag); 568 case ProgramPoint::PostPurgeDeadSymbolsKind: 569 return PostPurgeDeadSymbols(S, LC, tag); 570 } 571 } 572 573 ExplodedNode* 574 StmtNodeBuilder::generateNodeInternal(const Stmt* S, const GRState* state, 575 ExplodedNode* Pred, 576 ProgramPoint::Kind K, 577 const void *tag) { 578 579 const ProgramPoint &L = GetProgramPoint(S, K, Pred->getLocationContext(),tag); 580 return generateNodeInternal(L, state, Pred); 581 } 582 583 ExplodedNode* 584 StmtNodeBuilder::generateNodeInternal(const ProgramPoint &Loc, 585 const GRState* State, 586 ExplodedNode* Pred) { 587 bool IsNew; 588 ExplodedNode* N = Eng.G->getNode(Loc, State, &IsNew); 589 N->addPredecessor(Pred, *Eng.G); 590 Deferred.erase(Pred); 591 592 if (IsNew) { 593 Deferred.insert(N); 594 return N; 595 } 596 597 return NULL; 598 } 599 600 // This function generate a new ExplodedNode but not a new branch(block edge). 601 ExplodedNode* BranchNodeBuilder::generateNode(const Stmt* Condition, 602 const GRState* State) { 603 bool IsNew; 604 605 ExplodedNode* Succ 606 = Eng.G->getNode(PostCondition(Condition, Pred->getLocationContext()), State, 607 &IsNew); 608 609 Succ->addPredecessor(Pred, *Eng.G); 610 611 Pred = Succ; 612 613 if (IsNew) 614 return Succ; 615 616 return NULL; 617 } 618 619 ExplodedNode* BranchNodeBuilder::generateNode(const GRState* State, 620 bool branch) { 621 622 // If the branch has been marked infeasible we should not generate a node. 623 if (!isFeasible(branch)) 624 return NULL; 625 626 bool IsNew; 627 628 ExplodedNode* Succ = 629 Eng.G->getNode(BlockEdge(Src,branch ? DstT:DstF,Pred->getLocationContext()), 630 State, &IsNew); 631 632 Succ->addPredecessor(Pred, *Eng.G); 633 634 if (branch) 635 GeneratedTrue = true; 636 else 637 GeneratedFalse = true; 638 639 if (IsNew) { 640 Deferred.push_back(Succ); 641 return Succ; 642 } 643 644 return NULL; 645 } 646 647 BranchNodeBuilder::~BranchNodeBuilder() { 648 if (!GeneratedTrue) generateNode(Pred->State, true); 649 if (!GeneratedFalse) generateNode(Pred->State, false); 650 651 for (DeferredTy::iterator I=Deferred.begin(), E=Deferred.end(); I!=E; ++I) 652 if (!(*I)->isSink()) Eng.WList->enqueue(*I); 653 } 654 655 656 ExplodedNode* 657 IndirectGotoNodeBuilder::generateNode(const iterator& I, const GRState* St, 658 bool isSink) { 659 bool IsNew; 660 661 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 662 Pred->getLocationContext()), St, &IsNew); 663 664 Succ->addPredecessor(Pred, *Eng.G); 665 666 if (IsNew) { 667 668 if (isSink) 669 Succ->markAsSink(); 670 else 671 Eng.WList->enqueue(Succ); 672 673 return Succ; 674 } 675 676 return NULL; 677 } 678 679 680 ExplodedNode* 681 SwitchNodeBuilder::generateCaseStmtNode(const iterator& I, const GRState* St){ 682 683 bool IsNew; 684 685 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(), 686 Pred->getLocationContext()), St, &IsNew); 687 Succ->addPredecessor(Pred, *Eng.G); 688 689 if (IsNew) { 690 Eng.WList->enqueue(Succ); 691 return Succ; 692 } 693 694 return NULL; 695 } 696 697 698 ExplodedNode* 699 SwitchNodeBuilder::generateDefaultCaseNode(const GRState* St, bool isSink) { 700 701 // Get the block for the default case. 702 assert (Src->succ_rbegin() != Src->succ_rend()); 703 CFGBlock* DefaultBlock = *Src->succ_rbegin(); 704 705 bool IsNew; 706 707 ExplodedNode* Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock, 708 Pred->getLocationContext()), St, &IsNew); 709 Succ->addPredecessor(Pred, *Eng.G); 710 711 if (IsNew) { 712 if (isSink) 713 Succ->markAsSink(); 714 else 715 Eng.WList->enqueue(Succ); 716 717 return Succ; 718 } 719 720 return NULL; 721 } 722 723 EndOfFunctionNodeBuilder::~EndOfFunctionNodeBuilder() { 724 // Auto-generate an EOP node if one has not been generated. 725 if (!hasGeneratedNode) { 726 // If we are in an inlined call, generate CallExit node. 727 if (Pred->getLocationContext()->getParent()) 728 GenerateCallExitNode(Pred->State); 729 else 730 generateNode(Pred->State); 731 } 732 } 733 734 ExplodedNode* 735 EndOfFunctionNodeBuilder::generateNode(const GRState* State, 736 ExplodedNode* P, const void *tag) { 737 hasGeneratedNode = true; 738 bool IsNew; 739 740 ExplodedNode* Node = Eng.G->getNode(BlockEntrance(&B, 741 Pred->getLocationContext(), tag ? tag : Tag), 742 State, &IsNew); 743 744 Node->addPredecessor(P ? P : Pred, *Eng.G); 745 746 if (IsNew) { 747 Eng.G->addEndOfPath(Node); 748 return Node; 749 } 750 751 return NULL; 752 } 753 754 void EndOfFunctionNodeBuilder::GenerateCallExitNode(const GRState *state) { 755 hasGeneratedNode = true; 756 // Create a CallExit node and enqueue it. 757 const StackFrameContext *LocCtx 758 = cast<StackFrameContext>(Pred->getLocationContext()); 759 const Stmt *CE = LocCtx->getCallSite(); 760 761 // Use the the callee location context. 762 CallExit Loc(CE, LocCtx); 763 764 bool isNew; 765 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 766 Node->addPredecessor(Pred, *Eng.G); 767 768 if (isNew) 769 Eng.WList->enqueue(Node); 770 } 771 772 773 void CallEnterNodeBuilder::generateNode(const GRState *state) { 774 // Check if the callee is in the same translation unit. 775 if (CalleeCtx->getTranslationUnit() != 776 Pred->getLocationContext()->getTranslationUnit()) { 777 // Create a new engine. We must be careful that the new engine should not 778 // reference data structures owned by the old engine. 779 780 AnalysisManager &OldMgr = Eng.SubEng.getAnalysisManager(); 781 782 // Get the callee's translation unit. 783 idx::TranslationUnit *TU = CalleeCtx->getTranslationUnit(); 784 785 // Create a new AnalysisManager with components of the callee's 786 // TranslationUnit. 787 // The Diagnostic is actually shared when we create ASTUnits from AST files. 788 AnalysisManager AMgr(TU->getASTContext(), TU->getDiagnostic(), 789 OldMgr.getLangOptions(), 790 OldMgr.getPathDiagnosticClient(), 791 OldMgr.getStoreManagerCreator(), 792 OldMgr.getConstraintManagerCreator(), 793 OldMgr.getCheckerManager(), 794 OldMgr.getIndexer(), 795 OldMgr.getMaxNodes(), OldMgr.getMaxVisit(), 796 OldMgr.shouldVisualizeGraphviz(), 797 OldMgr.shouldVisualizeUbigraph(), 798 OldMgr.shouldPurgeDead(), 799 OldMgr.shouldEagerlyAssume(), 800 OldMgr.shouldTrimGraph(), 801 OldMgr.shouldInlineCall(), 802 OldMgr.getAnalysisContextManager().getUseUnoptimizedCFG(), 803 OldMgr.getAnalysisContextManager(). 804 getCFGBuildOptions().AddImplicitDtors, 805 OldMgr.getAnalysisContextManager(). 806 getCFGBuildOptions().AddInitializers, 807 OldMgr.shouldEagerlyTrimExplodedGraph()); 808 llvm::OwningPtr<TransferFuncs> TF(MakeCFRefCountTF(AMgr.getASTContext(), 809 /* GCEnabled */ false, 810 AMgr.getLangOptions())); 811 // Create the new engine. 812 ExprEngine NewEng(AMgr, TF.take()); 813 814 // Create the new LocationContext. 815 AnalysisContext *NewAnaCtx = AMgr.getAnalysisContext(CalleeCtx->getDecl(), 816 CalleeCtx->getTranslationUnit()); 817 const StackFrameContext *OldLocCtx = CalleeCtx; 818 const StackFrameContext *NewLocCtx = AMgr.getStackFrame(NewAnaCtx, 819 OldLocCtx->getParent(), 820 OldLocCtx->getCallSite(), 821 OldLocCtx->getCallSiteBlock(), 822 OldLocCtx->getIndex()); 823 824 // Now create an initial state for the new engine. 825 const GRState *NewState = NewEng.getStateManager().MarshalState(state, 826 NewLocCtx); 827 ExplodedNodeSet ReturnNodes; 828 NewEng.ExecuteWorkListWithInitialState(NewLocCtx, AMgr.getMaxNodes(), 829 NewState, ReturnNodes); 830 return; 831 } 832 833 // Get the callee entry block. 834 const CFGBlock *Entry = &(CalleeCtx->getCFG()->getEntry()); 835 assert(Entry->empty()); 836 assert(Entry->succ_size() == 1); 837 838 // Get the solitary successor. 839 const CFGBlock *SuccB = *(Entry->succ_begin()); 840 841 // Construct an edge representing the starting location in the callee. 842 BlockEdge Loc(Entry, SuccB, CalleeCtx); 843 844 bool isNew; 845 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 846 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 847 848 if (isNew) 849 Eng.WList->enqueue(Node); 850 } 851 852 void CallExitNodeBuilder::generateNode(const GRState *state) { 853 // Get the callee's location context. 854 const StackFrameContext *LocCtx 855 = cast<StackFrameContext>(Pred->getLocationContext()); 856 // When exiting an implicit automatic obj dtor call, the callsite is the Stmt 857 // that triggers the dtor. 858 PostStmt Loc(LocCtx->getCallSite(), LocCtx->getParent()); 859 bool isNew; 860 ExplodedNode *Node = Eng.G->getNode(Loc, state, &isNew); 861 Node->addPredecessor(const_cast<ExplodedNode*>(Pred), *Eng.G); 862 if (isNew) 863 Eng.WList->enqueue(Node, LocCtx->getCallSiteBlock(), 864 LocCtx->getIndex() + 1); 865 } 866