1 //==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a generic engine for intraprocedural, path-sensitive, 11 // dataflow analysis via graph reachability engine. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 16 #include "clang/AST/Expr.h" 17 #include "clang/AST/ExprCXX.h" 18 #include "clang/AST/StmtCXX.h" 19 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 20 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" 21 #include "llvm/ADT/Statistic.h" 22 #include "llvm/ADT/DenseSet.h" 23 #include "llvm/Support/Casting.h" 24 25 using namespace clang; 26 using namespace ento; 27 28 #define DEBUG_TYPE "CoreEngine" 29 30 STATISTIC(NumSteps, 31 "The # of steps executed."); 32 STATISTIC(NumReachedMaxSteps, 33 "The # of times we reached the max number of steps."); 34 STATISTIC(NumPathsExplored, 35 "The # of paths explored by the analyzer."); 36 37 STATISTIC(MaxQueueSize, "Maximum size of the worklist"); 38 STATISTIC(MaxReachableSize, "Maximum size of auxiliary worklist set"); 39 40 //===----------------------------------------------------------------------===// 41 // Worklist classes for exploration of reachable states. 42 //===----------------------------------------------------------------------===// 43 44 namespace { 45 class DFS : public WorkList { 46 SmallVector<WorkListUnit,20> Stack; 47 public: 48 bool hasWork() const override { 49 return !Stack.empty(); 50 } 51 52 void enqueue(const WorkListUnit& U) override { 53 Stack.push_back(U); 54 } 55 56 WorkListUnit dequeue() override { 57 assert (!Stack.empty()); 58 const WorkListUnit& U = Stack.back(); 59 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 60 return U; 61 } 62 }; 63 64 class BFS : public WorkList { 65 std::deque<WorkListUnit> Queue; 66 public: 67 bool hasWork() const override { 68 return !Queue.empty(); 69 } 70 71 void enqueue(const WorkListUnit& U) override { 72 Queue.push_back(U); 73 } 74 75 WorkListUnit dequeue() override { 76 WorkListUnit U = Queue.front(); 77 Queue.pop_front(); 78 return U; 79 } 80 81 }; 82 83 } // end anonymous namespace 84 85 // Place the dstor for WorkList here because it contains virtual member 86 // functions, and we the code for the dstor generated in one compilation unit. 87 WorkList::~WorkList() {} 88 89 std::unique_ptr<WorkList> WorkList::makeDFS() { 90 return llvm::make_unique<DFS>(); 91 } 92 93 std::unique_ptr<WorkList> WorkList::makeBFS() { 94 return llvm::make_unique<BFS>(); 95 } 96 97 namespace { 98 class BFSBlockDFSContents : public WorkList { 99 std::deque<WorkListUnit> Queue; 100 SmallVector<WorkListUnit,20> Stack; 101 public: 102 bool hasWork() const override { 103 return !Queue.empty() || !Stack.empty(); 104 } 105 106 void enqueue(const WorkListUnit& U) override { 107 if (U.getNode()->getLocation().getAs<BlockEntrance>()) 108 Queue.push_front(U); 109 else 110 Stack.push_back(U); 111 } 112 113 WorkListUnit dequeue() override { 114 // Process all basic blocks to completion. 115 if (!Stack.empty()) { 116 const WorkListUnit& U = Stack.back(); 117 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 118 return U; 119 } 120 121 assert(!Queue.empty()); 122 // Don't use const reference. The subsequent pop_back() might make it 123 // unsafe. 124 WorkListUnit U = Queue.front(); 125 Queue.pop_front(); 126 return U; 127 } 128 }; 129 } // end anonymous namespace 130 131 std::unique_ptr<WorkList> WorkList::makeBFSBlockDFSContents() { 132 return llvm::make_unique<BFSBlockDFSContents>(); 133 } 134 135 class UnexploredFirstStack : public WorkList { 136 137 /// Stack of nodes known to have statements we have not traversed yet. 138 SmallVector<WorkListUnit, 20> StackUnexplored; 139 140 /// Stack of all other nodes. 141 SmallVector<WorkListUnit, 20> StackOthers; 142 143 typedef unsigned BlockID; 144 typedef std::pair<BlockID, const StackFrameContext *> LocIdentifier; 145 llvm::DenseSet<LocIdentifier> Reachable; 146 147 public: 148 bool hasWork() const override { 149 return !(StackUnexplored.empty() && StackOthers.empty()); 150 } 151 152 void enqueue(const WorkListUnit &U) override { 153 const ExplodedNode *N = U.getNode(); 154 auto BE = N->getLocation().getAs<BlockEntrance>(); 155 156 if (!BE) { 157 158 // Assume the choice of the order of the preceeding block entrance was 159 // correct. 160 StackUnexplored.push_back(U); 161 } else { 162 LocIdentifier LocId = std::make_pair( 163 BE->getBlock()->getBlockID(), N->getStackFrame()); 164 auto InsertInfo = Reachable.insert(LocId); 165 166 if (InsertInfo.second) { 167 StackUnexplored.push_back(U); 168 } else { 169 StackOthers.push_back(U); 170 } 171 } 172 MaxReachableSize.updateMax(Reachable.size()); 173 MaxQueueSize.updateMax(StackUnexplored.size() + StackOthers.size()); 174 } 175 176 WorkListUnit dequeue() override { 177 if (!StackUnexplored.empty()) { 178 WorkListUnit &U = StackUnexplored.back(); 179 StackUnexplored.pop_back(); 180 return U; 181 } else { 182 WorkListUnit &U = StackOthers.back(); 183 StackOthers.pop_back(); 184 return U; 185 } 186 } 187 }; 188 189 std::unique_ptr<WorkList> WorkList::makeUnexploredFirst() { 190 return llvm::make_unique<UnexploredFirstStack>(); 191 } 192 193 //===----------------------------------------------------------------------===// 194 // Core analysis engine. 195 //===----------------------------------------------------------------------===// 196 197 static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) { 198 switch (Opts.getExplorationStrategy()) { 199 case AnalyzerOptions::ExplorationStrategyKind::DFS: 200 return WorkList::makeDFS(); 201 case AnalyzerOptions::ExplorationStrategyKind::BFS: 202 return WorkList::makeBFS(); 203 case AnalyzerOptions::ExplorationStrategyKind::BFSBlockDFSContents: 204 return WorkList::makeBFSBlockDFSContents(); 205 case AnalyzerOptions::ExplorationStrategyKind::UnexploredFirst: 206 return WorkList::makeUnexploredFirst(); 207 default: 208 llvm_unreachable("Unexpected case"); 209 } 210 } 211 212 CoreEngine::CoreEngine(SubEngine &subengine, 213 FunctionSummariesTy *FS, 214 AnalyzerOptions &Opts) : SubEng(subengine), 215 WList(generateWorkList(Opts)), 216 BCounterFactory(G.getAllocator()), 217 FunctionSummaries(FS) {} 218 219 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 220 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 221 ProgramStateRef InitState) { 222 223 if (G.num_roots() == 0) { // Initialize the analysis by constructing 224 // the root if none exists. 225 226 const CFGBlock *Entry = &(L->getCFG()->getEntry()); 227 228 assert (Entry->empty() && 229 "Entry block must be empty."); 230 231 assert (Entry->succ_size() == 1 && 232 "Entry block must have 1 successor."); 233 234 // Mark the entry block as visited. 235 FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(), 236 L->getDecl(), 237 L->getCFG()->getNumBlockIDs()); 238 239 // Get the solitary successor. 240 const CFGBlock *Succ = *(Entry->succ_begin()); 241 242 // Construct an edge representing the 243 // starting location in the function. 244 BlockEdge StartLoc(Entry, Succ, L); 245 246 // Set the current block counter to being empty. 247 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 248 249 if (!InitState) 250 InitState = SubEng.getInitialState(L); 251 252 bool IsNew; 253 ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew); 254 assert (IsNew); 255 G.addRoot(Node); 256 257 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node); 258 ExplodedNodeSet DstBegin; 259 SubEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc); 260 261 enqueue(DstBegin); 262 } 263 264 // Check if we have a steps limit 265 bool UnlimitedSteps = Steps == 0; 266 // Cap our pre-reservation in the event that the user specifies 267 // a very large number of maximum steps. 268 const unsigned PreReservationCap = 4000000; 269 if(!UnlimitedSteps) 270 G.reserve(std::min(Steps,PreReservationCap)); 271 272 while (WList->hasWork()) { 273 if (!UnlimitedSteps) { 274 if (Steps == 0) { 275 NumReachedMaxSteps++; 276 break; 277 } 278 --Steps; 279 } 280 281 NumSteps++; 282 283 const WorkListUnit& WU = WList->dequeue(); 284 285 // Set the current block counter. 286 WList->setBlockCounter(WU.getBlockCounter()); 287 288 // Retrieve the node. 289 ExplodedNode *Node = WU.getNode(); 290 291 dispatchWorkItem(Node, Node->getLocation(), WU); 292 } 293 SubEng.processEndWorklist(hasWorkRemaining()); 294 return WList->hasWork(); 295 } 296 297 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc, 298 const WorkListUnit& WU) { 299 // Dispatch on the location type. 300 switch (Loc.getKind()) { 301 case ProgramPoint::BlockEdgeKind: 302 HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred); 303 break; 304 305 case ProgramPoint::BlockEntranceKind: 306 HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred); 307 break; 308 309 case ProgramPoint::BlockExitKind: 310 assert (false && "BlockExit location never occur in forward analysis."); 311 break; 312 313 case ProgramPoint::CallEnterKind: { 314 HandleCallEnter(Loc.castAs<CallEnter>(), Pred); 315 break; 316 } 317 318 case ProgramPoint::CallExitBeginKind: 319 SubEng.processCallExit(Pred); 320 break; 321 322 case ProgramPoint::EpsilonKind: { 323 assert(Pred->hasSinglePred() && 324 "Assume epsilon has exactly one predecessor by construction"); 325 ExplodedNode *PNode = Pred->getFirstPred(); 326 dispatchWorkItem(Pred, PNode->getLocation(), WU); 327 break; 328 } 329 default: 330 assert(Loc.getAs<PostStmt>() || 331 Loc.getAs<PostInitializer>() || 332 Loc.getAs<PostImplicitCall>() || 333 Loc.getAs<CallExitEnd>() || 334 Loc.getAs<LoopExit>() || 335 Loc.getAs<PostAllocatorCall>()); 336 HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred); 337 break; 338 } 339 } 340 341 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 342 unsigned Steps, 343 ProgramStateRef InitState, 344 ExplodedNodeSet &Dst) { 345 bool DidNotFinish = ExecuteWorkList(L, Steps, InitState); 346 for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E; 347 ++I) { 348 Dst.Add(*I); 349 } 350 return DidNotFinish; 351 } 352 353 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) { 354 355 const CFGBlock *Blk = L.getDst(); 356 NodeBuilderContext BuilderCtx(*this, Blk, Pred); 357 358 // Mark this block as visited. 359 const LocationContext *LC = Pred->getLocationContext(); 360 FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(), 361 LC->getDecl(), 362 LC->getCFG()->getNumBlockIDs()); 363 364 // Check if we are entering the EXIT block. 365 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 366 367 assert (L.getLocationContext()->getCFG()->getExit().size() == 0 368 && "EXIT block cannot contain Stmts."); 369 370 // Get return statement.. 371 const ReturnStmt *RS = nullptr; 372 if (!L.getSrc()->empty()) { 373 if (Optional<CFGStmt> LastStmt = L.getSrc()->back().getAs<CFGStmt>()) { 374 RS = dyn_cast<ReturnStmt>(LastStmt->getStmt()); 375 } 376 } 377 378 // Process the final state transition. 379 SubEng.processEndOfFunction(BuilderCtx, Pred, RS); 380 381 // This path is done. Don't enqueue any more nodes. 382 return; 383 } 384 385 // Call into the SubEngine to process entering the CFGBlock. 386 ExplodedNodeSet dstNodes; 387 BlockEntrance BE(Blk, Pred->getLocationContext()); 388 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE); 389 SubEng.processCFGBlockEntrance(L, nodeBuilder, Pred); 390 391 // Auto-generate a node. 392 if (!nodeBuilder.hasGeneratedNodes()) { 393 nodeBuilder.generateNode(Pred->State, Pred); 394 } 395 396 // Enqueue nodes onto the worklist. 397 enqueue(dstNodes); 398 } 399 400 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L, 401 ExplodedNode *Pred) { 402 403 // Increment the block counter. 404 const LocationContext *LC = Pred->getLocationContext(); 405 unsigned BlockId = L.getBlock()->getBlockID(); 406 BlockCounter Counter = WList->getBlockCounter(); 407 Counter = BCounterFactory.IncrementCount(Counter, LC->getCurrentStackFrame(), 408 BlockId); 409 WList->setBlockCounter(Counter); 410 411 // Process the entrance of the block. 412 if (Optional<CFGElement> E = L.getFirstElement()) { 413 NodeBuilderContext Ctx(*this, L.getBlock(), Pred); 414 SubEng.processCFGElement(*E, Pred, 0, &Ctx); 415 } 416 else 417 HandleBlockExit(L.getBlock(), Pred); 418 } 419 420 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) { 421 422 if (const Stmt *Term = B->getTerminator()) { 423 switch (Term->getStmtClass()) { 424 default: 425 llvm_unreachable("Analysis for this terminator not implemented."); 426 427 case Stmt::CXXBindTemporaryExprClass: 428 HandleCleanupTemporaryBranch( 429 cast<CXXBindTemporaryExpr>(B->getTerminator().getStmt()), B, Pred); 430 return; 431 432 // Model static initializers. 433 case Stmt::DeclStmtClass: 434 HandleStaticInit(cast<DeclStmt>(Term), B, Pred); 435 return; 436 437 case Stmt::BinaryOperatorClass: // '&&' and '||' 438 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 439 return; 440 441 case Stmt::BinaryConditionalOperatorClass: 442 case Stmt::ConditionalOperatorClass: 443 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 444 Term, B, Pred); 445 return; 446 447 // FIXME: Use constant-folding in CFG construction to simplify this 448 // case. 449 450 case Stmt::ChooseExprClass: 451 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 452 return; 453 454 case Stmt::CXXTryStmtClass: { 455 // Generate a node for each of the successors. 456 // Our logic for EH analysis can certainly be improved. 457 for (CFGBlock::const_succ_iterator it = B->succ_begin(), 458 et = B->succ_end(); it != et; ++it) { 459 if (const CFGBlock *succ = *it) { 460 generateNode(BlockEdge(B, succ, Pred->getLocationContext()), 461 Pred->State, Pred); 462 } 463 } 464 return; 465 } 466 467 case Stmt::DoStmtClass: 468 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 469 return; 470 471 case Stmt::CXXForRangeStmtClass: 472 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred); 473 return; 474 475 case Stmt::ForStmtClass: 476 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 477 return; 478 479 case Stmt::ContinueStmtClass: 480 case Stmt::BreakStmtClass: 481 case Stmt::GotoStmtClass: 482 break; 483 484 case Stmt::IfStmtClass: 485 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 486 return; 487 488 case Stmt::IndirectGotoStmtClass: { 489 // Only 1 successor: the indirect goto dispatch block. 490 assert (B->succ_size() == 1); 491 492 IndirectGotoNodeBuilder 493 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 494 *(B->succ_begin()), this); 495 496 SubEng.processIndirectGoto(builder); 497 return; 498 } 499 500 case Stmt::ObjCForCollectionStmtClass: { 501 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 502 // 503 // (1) inside a basic block, which represents the binding of the 504 // 'element' variable to a value. 505 // (2) in a terminator, which represents the branch. 506 // 507 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 508 // whether or not collection contains any more elements. We cannot 509 // just test to see if the element is nil because a container can 510 // contain nil elements. 511 HandleBranch(Term, Term, B, Pred); 512 return; 513 } 514 515 case Stmt::SwitchStmtClass: { 516 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 517 this); 518 519 SubEng.processSwitch(builder); 520 return; 521 } 522 523 case Stmt::WhileStmtClass: 524 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 525 return; 526 } 527 } 528 529 assert (B->succ_size() == 1 && 530 "Blocks with no terminator should have at most 1 successor."); 531 532 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 533 Pred->State, Pred); 534 } 535 536 void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) { 537 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred); 538 SubEng.processCallEnter(BuilderCtx, CE, Pred); 539 } 540 541 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term, 542 const CFGBlock * B, ExplodedNode *Pred) { 543 assert(B->succ_size() == 2); 544 NodeBuilderContext Ctx(*this, B, Pred); 545 ExplodedNodeSet Dst; 546 SubEng.processBranch(Cond, Term, Ctx, Pred, Dst, 547 *(B->succ_begin()), *(B->succ_begin()+1)); 548 // Enqueue the new frontier onto the worklist. 549 enqueue(Dst); 550 } 551 552 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 553 const CFGBlock *B, 554 ExplodedNode *Pred) { 555 assert(B->succ_size() == 2); 556 NodeBuilderContext Ctx(*this, B, Pred); 557 ExplodedNodeSet Dst; 558 SubEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()), 559 *(B->succ_begin() + 1)); 560 // Enqueue the new frontier onto the worklist. 561 enqueue(Dst); 562 } 563 564 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B, 565 ExplodedNode *Pred) { 566 assert(B->succ_size() == 2); 567 NodeBuilderContext Ctx(*this, B, Pred); 568 ExplodedNodeSet Dst; 569 SubEng.processStaticInitializer(DS, Ctx, Pred, Dst, 570 *(B->succ_begin()), *(B->succ_begin()+1)); 571 // Enqueue the new frontier onto the worklist. 572 enqueue(Dst); 573 } 574 575 576 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx, 577 ExplodedNode *Pred) { 578 assert(B); 579 assert(!B->empty()); 580 581 if (StmtIdx == B->size()) 582 HandleBlockExit(B, Pred); 583 else { 584 NodeBuilderContext Ctx(*this, B, Pred); 585 SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx); 586 } 587 } 588 589 /// generateNode - Utility method to generate nodes, hook up successors, 590 /// and add nodes to the worklist. 591 void CoreEngine::generateNode(const ProgramPoint &Loc, 592 ProgramStateRef State, 593 ExplodedNode *Pred) { 594 595 bool IsNew; 596 ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew); 597 598 if (Pred) 599 Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor. 600 else { 601 assert (IsNew); 602 G.addRoot(Node); // 'Node' has no predecessor. Make it a root. 603 } 604 605 // Only add 'Node' to the worklist if it was freshly generated. 606 if (IsNew) WList->enqueue(Node); 607 } 608 609 void CoreEngine::enqueueStmtNode(ExplodedNode *N, 610 const CFGBlock *Block, unsigned Idx) { 611 assert(Block); 612 assert (!N->isSink()); 613 614 // Check if this node entered a callee. 615 if (N->getLocation().getAs<CallEnter>()) { 616 // Still use the index of the CallExpr. It's needed to create the callee 617 // StackFrameContext. 618 WList->enqueue(N, Block, Idx); 619 return; 620 } 621 622 // Do not create extra nodes. Move to the next CFG element. 623 if (N->getLocation().getAs<PostInitializer>() || 624 N->getLocation().getAs<PostImplicitCall>()|| 625 N->getLocation().getAs<LoopExit>()) { 626 WList->enqueue(N, Block, Idx+1); 627 return; 628 } 629 630 if (N->getLocation().getAs<EpsilonPoint>()) { 631 WList->enqueue(N, Block, Idx); 632 return; 633 } 634 635 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) { 636 WList->enqueue(N, Block, Idx+1); 637 return; 638 } 639 640 // At this point, we know we're processing a normal statement. 641 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>(); 642 PostStmt Loc(CS.getStmt(), N->getLocationContext()); 643 644 if (Loc == N->getLocation().withTag(nullptr)) { 645 // Note: 'N' should be a fresh node because otherwise it shouldn't be 646 // a member of Deferred. 647 WList->enqueue(N, Block, Idx+1); 648 return; 649 } 650 651 bool IsNew; 652 ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew); 653 Succ->addPredecessor(N, G); 654 655 if (IsNew) 656 WList->enqueue(Succ, Block, Idx+1); 657 } 658 659 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N, 660 const ReturnStmt *RS) { 661 // Create a CallExitBegin node and enqueue it. 662 const StackFrameContext *LocCtx 663 = cast<StackFrameContext>(N->getLocationContext()); 664 665 // Use the callee location context. 666 CallExitBegin Loc(LocCtx, RS); 667 668 bool isNew; 669 ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew); 670 Node->addPredecessor(N, G); 671 return isNew ? Node : nullptr; 672 } 673 674 675 void CoreEngine::enqueue(ExplodedNodeSet &Set) { 676 for (ExplodedNodeSet::iterator I = Set.begin(), 677 E = Set.end(); I != E; ++I) { 678 WList->enqueue(*I); 679 } 680 } 681 682 void CoreEngine::enqueue(ExplodedNodeSet &Set, 683 const CFGBlock *Block, unsigned Idx) { 684 for (ExplodedNodeSet::iterator I = Set.begin(), 685 E = Set.end(); I != E; ++I) { 686 enqueueStmtNode(*I, Block, Idx); 687 } 688 } 689 690 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) { 691 for (ExplodedNodeSet::iterator I = Set.begin(), E = Set.end(); I != E; ++I) { 692 ExplodedNode *N = *I; 693 // If we are in an inlined call, generate CallExitBegin node. 694 if (N->getLocationContext()->getParent()) { 695 N = generateCallExitBeginNode(N, RS); 696 if (N) 697 WList->enqueue(N); 698 } else { 699 // TODO: We should run remove dead bindings here. 700 G.addEndOfPath(N); 701 NumPathsExplored++; 702 } 703 } 704 } 705 706 707 void NodeBuilder::anchor() { } 708 709 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc, 710 ProgramStateRef State, 711 ExplodedNode *FromN, 712 bool MarkAsSink) { 713 HasGeneratedNodes = true; 714 bool IsNew; 715 ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew); 716 N->addPredecessor(FromN, C.Eng.G); 717 Frontier.erase(FromN); 718 719 if (!IsNew) 720 return nullptr; 721 722 if (!MarkAsSink) 723 Frontier.Add(N); 724 725 return N; 726 } 727 728 void NodeBuilderWithSinks::anchor() { } 729 730 StmtNodeBuilder::~StmtNodeBuilder() { 731 if (EnclosingBldr) 732 for (ExplodedNodeSet::iterator I = Frontier.begin(), 733 E = Frontier.end(); I != E; ++I ) 734 EnclosingBldr->addNodes(*I); 735 } 736 737 void BranchNodeBuilder::anchor() { } 738 739 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State, 740 bool branch, 741 ExplodedNode *NodePred) { 742 // If the branch has been marked infeasible we should not generate a node. 743 if (!isFeasible(branch)) 744 return nullptr; 745 746 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF, 747 NodePred->getLocationContext()); 748 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred); 749 return Succ; 750 } 751 752 ExplodedNode* 753 IndirectGotoNodeBuilder::generateNode(const iterator &I, 754 ProgramStateRef St, 755 bool IsSink) { 756 bool IsNew; 757 ExplodedNode *Succ = 758 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 759 St, IsSink, &IsNew); 760 Succ->addPredecessor(Pred, Eng.G); 761 762 if (!IsNew) 763 return nullptr; 764 765 if (!IsSink) 766 Eng.WList->enqueue(Succ); 767 768 return Succ; 769 } 770 771 772 ExplodedNode* 773 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I, 774 ProgramStateRef St) { 775 776 bool IsNew; 777 ExplodedNode *Succ = 778 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 779 St, false, &IsNew); 780 Succ->addPredecessor(Pred, Eng.G); 781 if (!IsNew) 782 return nullptr; 783 784 Eng.WList->enqueue(Succ); 785 return Succ; 786 } 787 788 789 ExplodedNode* 790 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St, 791 bool IsSink) { 792 // Get the block for the default case. 793 assert(Src->succ_rbegin() != Src->succ_rend()); 794 CFGBlock *DefaultBlock = *Src->succ_rbegin(); 795 796 // Sanity check for default blocks that are unreachable and not caught 797 // by earlier stages. 798 if (!DefaultBlock) 799 return nullptr; 800 801 bool IsNew; 802 ExplodedNode *Succ = 803 Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()), 804 St, IsSink, &IsNew); 805 Succ->addPredecessor(Pred, Eng.G); 806 807 if (!IsNew) 808 return nullptr; 809 810 if (!IsSink) 811 Eng.WList->enqueue(Succ); 812 813 return Succ; 814 } 815