1 //===- CoreEngine.cpp - Path-Sensitive Dataflow Engine --------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a generic engine for intraprocedural, path-sensitive, 11 // dataflow analysis via graph reachability engine. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h" 16 #include "clang/AST/Expr.h" 17 #include "clang/AST/ExprCXX.h" 18 #include "clang/AST/Stmt.h" 19 #include "clang/AST/StmtCXX.h" 20 #include "clang/Analysis/AnalysisDeclContext.h" 21 #include "clang/Analysis/CFG.h" 22 #include "clang/Analysis/ProgramPoint.h" 23 #include "clang/Basic/LLVM.h" 24 #include "clang/StaticAnalyzer/Core/AnalyzerOptions.h" 25 #include "clang/StaticAnalyzer/Core/PathSensitive/BlockCounter.h" 26 #include "clang/StaticAnalyzer/Core/PathSensitive/ExplodedGraph.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/FunctionSummary.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/WorkList.h" 30 #include "llvm/ADT/DenseSet.h" 31 #include "llvm/ADT/DenseMap.h" 32 #include "llvm/ADT/Optional.h" 33 #include "llvm/ADT/PriorityQueue.h" 34 #include "llvm/ADT/STLExtras.h" 35 #include "llvm/ADT/SmallVector.h" 36 #include "llvm/ADT/Statistic.h" 37 #include "llvm/Support/Casting.h" 38 #include "llvm/Support/ErrorHandling.h" 39 #include <algorithm> 40 #include <cassert> 41 #include <deque> 42 #include <memory> 43 #include <utility> 44 #include <vector> 45 46 using namespace clang; 47 using namespace ento; 48 49 #define DEBUG_TYPE "CoreEngine" 50 51 STATISTIC(NumSteps, 52 "The # of steps executed."); 53 STATISTIC(NumReachedMaxSteps, 54 "The # of times we reached the max number of steps."); 55 STATISTIC(NumPathsExplored, 56 "The # of paths explored by the analyzer."); 57 58 STATISTIC(MaxQueueSize, "Maximum size of the worklist"); 59 STATISTIC(MaxReachableSize, "Maximum size of auxiliary worklist set"); 60 61 //===----------------------------------------------------------------------===// 62 // Worklist classes for exploration of reachable states. 63 //===----------------------------------------------------------------------===// 64 65 namespace { 66 67 class DFS : public WorkList { 68 SmallVector<WorkListUnit, 20> Stack; 69 70 public: 71 bool hasWork() const override { 72 return !Stack.empty(); 73 } 74 75 void enqueue(const WorkListUnit& U) override { 76 Stack.push_back(U); 77 } 78 79 WorkListUnit dequeue() override { 80 assert(!Stack.empty()); 81 const WorkListUnit& U = Stack.back(); 82 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 83 return U; 84 } 85 }; 86 87 class BFS : public WorkList { 88 std::deque<WorkListUnit> Queue; 89 90 public: 91 bool hasWork() const override { 92 return !Queue.empty(); 93 } 94 95 void enqueue(const WorkListUnit& U) override { 96 Queue.push_back(U); 97 } 98 99 WorkListUnit dequeue() override { 100 WorkListUnit U = Queue.front(); 101 Queue.pop_front(); 102 return U; 103 } 104 }; 105 106 } // namespace 107 108 // Place the dstor for WorkList here because it contains virtual member 109 // functions, and we the code for the dstor generated in one compilation unit. 110 WorkList::~WorkList() = default; 111 112 std::unique_ptr<WorkList> WorkList::makeDFS() { 113 return llvm::make_unique<DFS>(); 114 } 115 116 std::unique_ptr<WorkList> WorkList::makeBFS() { 117 return llvm::make_unique<BFS>(); 118 } 119 120 namespace { 121 122 class BFSBlockDFSContents : public WorkList { 123 std::deque<WorkListUnit> Queue; 124 SmallVector<WorkListUnit, 20> Stack; 125 126 public: 127 bool hasWork() const override { 128 return !Queue.empty() || !Stack.empty(); 129 } 130 131 void enqueue(const WorkListUnit& U) override { 132 if (U.getNode()->getLocation().getAs<BlockEntrance>()) 133 Queue.push_front(U); 134 else 135 Stack.push_back(U); 136 } 137 138 WorkListUnit dequeue() override { 139 // Process all basic blocks to completion. 140 if (!Stack.empty()) { 141 const WorkListUnit& U = Stack.back(); 142 Stack.pop_back(); // This technically "invalidates" U, but we are fine. 143 return U; 144 } 145 146 assert(!Queue.empty()); 147 // Don't use const reference. The subsequent pop_back() might make it 148 // unsafe. 149 WorkListUnit U = Queue.front(); 150 Queue.pop_front(); 151 return U; 152 } 153 }; 154 155 } // namespace 156 157 std::unique_ptr<WorkList> WorkList::makeBFSBlockDFSContents() { 158 return llvm::make_unique<BFSBlockDFSContents>(); 159 } 160 161 namespace { 162 163 class UnexploredFirstStack : public WorkList { 164 /// Stack of nodes known to have statements we have not traversed yet. 165 SmallVector<WorkListUnit, 20> StackUnexplored; 166 167 /// Stack of all other nodes. 168 SmallVector<WorkListUnit, 20> StackOthers; 169 170 using BlockID = unsigned; 171 using LocIdentifier = std::pair<BlockID, const StackFrameContext *>; 172 173 llvm::DenseSet<LocIdentifier> Reachable; 174 175 public: 176 bool hasWork() const override { 177 return !(StackUnexplored.empty() && StackOthers.empty()); 178 } 179 180 void enqueue(const WorkListUnit &U) override { 181 const ExplodedNode *N = U.getNode(); 182 auto BE = N->getLocation().getAs<BlockEntrance>(); 183 184 if (!BE) { 185 // Assume the choice of the order of the preceeding block entrance was 186 // correct. 187 StackUnexplored.push_back(U); 188 } else { 189 LocIdentifier LocId = std::make_pair( 190 BE->getBlock()->getBlockID(), N->getStackFrame()); 191 auto InsertInfo = Reachable.insert(LocId); 192 193 if (InsertInfo.second) { 194 StackUnexplored.push_back(U); 195 } else { 196 StackOthers.push_back(U); 197 } 198 } 199 MaxReachableSize.updateMax(Reachable.size()); 200 MaxQueueSize.updateMax(StackUnexplored.size() + StackOthers.size()); 201 } 202 203 WorkListUnit dequeue() override { 204 if (!StackUnexplored.empty()) { 205 WorkListUnit &U = StackUnexplored.back(); 206 StackUnexplored.pop_back(); 207 return U; 208 } else { 209 WorkListUnit &U = StackOthers.back(); 210 StackOthers.pop_back(); 211 return U; 212 } 213 } 214 }; 215 216 } // namespace 217 218 std::unique_ptr<WorkList> WorkList::makeUnexploredFirst() { 219 return llvm::make_unique<UnexploredFirstStack>(); 220 } 221 222 class UnexploredFirstPriorityQueue : public WorkList { 223 using BlockID = unsigned; 224 using LocIdentifier = std::pair<BlockID, const StackFrameContext *>; 225 226 // How many times each location was visited. 227 // Is signed because we negate it later in order to have a reversed 228 // comparison. 229 using VisitedTimesMap = llvm::DenseMap<LocIdentifier, int>; 230 231 // Compare by number of times the location was visited first (negated 232 // to prefer less often visited locations), then by insertion time (prefer 233 // expanding nodes inserted sooner first). 234 using QueuePriority = std::pair<int, unsigned long>; 235 using QueueItem = std::pair<WorkListUnit, QueuePriority>; 236 237 struct ExplorationComparator { 238 bool operator() (const QueueItem &LHS, const QueueItem &RHS) { 239 return LHS.second < RHS.second; 240 } 241 }; 242 243 // Number of inserted nodes, used to emulate DFS ordering in the priority 244 // queue when insertions are equal. 245 unsigned long Counter = 0; 246 247 // Number of times a current location was reached. 248 VisitedTimesMap NumReached; 249 250 // The top item is the largest one. 251 llvm::PriorityQueue<QueueItem, std::vector<QueueItem>, ExplorationComparator> 252 queue; 253 254 public: 255 bool hasWork() const override { 256 return !queue.empty(); 257 } 258 259 void enqueue(const WorkListUnit &U) override { 260 const ExplodedNode *N = U.getNode(); 261 unsigned NumVisited = 0; 262 if (auto BE = N->getLocation().getAs<BlockEntrance>()) { 263 LocIdentifier LocId = std::make_pair( 264 BE->getBlock()->getBlockID(), N->getStackFrame()); 265 NumVisited = NumReached[LocId]++; 266 } 267 268 queue.push(std::make_pair(U, std::make_pair(-NumVisited, ++Counter))); 269 } 270 271 WorkListUnit dequeue() override { 272 QueueItem U = queue.top(); 273 queue.pop(); 274 return U.first; 275 } 276 }; 277 278 std::unique_ptr<WorkList> WorkList::makeUnexploredFirstPriorityQueue() { 279 return llvm::make_unique<UnexploredFirstPriorityQueue>(); 280 } 281 282 //===----------------------------------------------------------------------===// 283 // Core analysis engine. 284 //===----------------------------------------------------------------------===// 285 286 static std::unique_ptr<WorkList> generateWorkList(AnalyzerOptions &Opts) { 287 switch (Opts.getExplorationStrategy()) { 288 case AnalyzerOptions::ExplorationStrategyKind::DFS: 289 return WorkList::makeDFS(); 290 case AnalyzerOptions::ExplorationStrategyKind::BFS: 291 return WorkList::makeBFS(); 292 case AnalyzerOptions::ExplorationStrategyKind::BFSBlockDFSContents: 293 return WorkList::makeBFSBlockDFSContents(); 294 case AnalyzerOptions::ExplorationStrategyKind::UnexploredFirst: 295 return WorkList::makeUnexploredFirst(); 296 case AnalyzerOptions::ExplorationStrategyKind::UnexploredFirstQueue: 297 return WorkList::makeUnexploredFirstPriorityQueue(); 298 default: 299 llvm_unreachable("Unexpected case"); 300 } 301 } 302 303 CoreEngine::CoreEngine(SubEngine &subengine, FunctionSummariesTy *FS, 304 AnalyzerOptions &Opts) 305 : SubEng(subengine), WList(generateWorkList(Opts)), 306 BCounterFactory(G.getAllocator()), FunctionSummaries(FS) {} 307 308 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps. 309 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps, 310 ProgramStateRef InitState) { 311 if (G.num_roots() == 0) { // Initialize the analysis by constructing 312 // the root if none exists. 313 314 const CFGBlock *Entry = &(L->getCFG()->getEntry()); 315 316 assert(Entry->empty() && "Entry block must be empty."); 317 318 assert(Entry->succ_size() == 1 && "Entry block must have 1 successor."); 319 320 // Mark the entry block as visited. 321 FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(), 322 L->getDecl(), 323 L->getCFG()->getNumBlockIDs()); 324 325 // Get the solitary successor. 326 const CFGBlock *Succ = *(Entry->succ_begin()); 327 328 // Construct an edge representing the 329 // starting location in the function. 330 BlockEdge StartLoc(Entry, Succ, L); 331 332 // Set the current block counter to being empty. 333 WList->setBlockCounter(BCounterFactory.GetEmptyCounter()); 334 335 if (!InitState) 336 InitState = SubEng.getInitialState(L); 337 338 bool IsNew; 339 ExplodedNode *Node = G.getNode(StartLoc, InitState, false, &IsNew); 340 assert(IsNew); 341 G.addRoot(Node); 342 343 NodeBuilderContext BuilderCtx(*this, StartLoc.getDst(), Node); 344 ExplodedNodeSet DstBegin; 345 SubEng.processBeginOfFunction(BuilderCtx, Node, DstBegin, StartLoc); 346 347 enqueue(DstBegin); 348 } 349 350 // Check if we have a steps limit 351 bool UnlimitedSteps = Steps == 0; 352 // Cap our pre-reservation in the event that the user specifies 353 // a very large number of maximum steps. 354 const unsigned PreReservationCap = 4000000; 355 if(!UnlimitedSteps) 356 G.reserve(std::min(Steps,PreReservationCap)); 357 358 while (WList->hasWork()) { 359 if (!UnlimitedSteps) { 360 if (Steps == 0) { 361 NumReachedMaxSteps++; 362 break; 363 } 364 --Steps; 365 } 366 367 NumSteps++; 368 369 const WorkListUnit& WU = WList->dequeue(); 370 371 // Set the current block counter. 372 WList->setBlockCounter(WU.getBlockCounter()); 373 374 // Retrieve the node. 375 ExplodedNode *Node = WU.getNode(); 376 377 dispatchWorkItem(Node, Node->getLocation(), WU); 378 } 379 SubEng.processEndWorklist(hasWorkRemaining()); 380 return WList->hasWork(); 381 } 382 383 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc, 384 const WorkListUnit& WU) { 385 // Dispatch on the location type. 386 switch (Loc.getKind()) { 387 case ProgramPoint::BlockEdgeKind: 388 HandleBlockEdge(Loc.castAs<BlockEdge>(), Pred); 389 break; 390 391 case ProgramPoint::BlockEntranceKind: 392 HandleBlockEntrance(Loc.castAs<BlockEntrance>(), Pred); 393 break; 394 395 case ProgramPoint::BlockExitKind: 396 assert(false && "BlockExit location never occur in forward analysis."); 397 break; 398 399 case ProgramPoint::CallEnterKind: 400 HandleCallEnter(Loc.castAs<CallEnter>(), Pred); 401 break; 402 403 case ProgramPoint::CallExitBeginKind: 404 SubEng.processCallExit(Pred); 405 break; 406 407 case ProgramPoint::EpsilonKind: { 408 assert(Pred->hasSinglePred() && 409 "Assume epsilon has exactly one predecessor by construction"); 410 ExplodedNode *PNode = Pred->getFirstPred(); 411 dispatchWorkItem(Pred, PNode->getLocation(), WU); 412 break; 413 } 414 default: 415 assert(Loc.getAs<PostStmt>() || 416 Loc.getAs<PostInitializer>() || 417 Loc.getAs<PostImplicitCall>() || 418 Loc.getAs<CallExitEnd>() || 419 Loc.getAs<LoopExit>() || 420 Loc.getAs<PostAllocatorCall>()); 421 HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred); 422 break; 423 } 424 } 425 426 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L, 427 unsigned Steps, 428 ProgramStateRef InitState, 429 ExplodedNodeSet &Dst) { 430 bool DidNotFinish = ExecuteWorkList(L, Steps, InitState); 431 for (ExplodedGraph::eop_iterator I = G.eop_begin(), E = G.eop_end(); I != E; 432 ++I) { 433 Dst.Add(*I); 434 } 435 return DidNotFinish; 436 } 437 438 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) { 439 const CFGBlock *Blk = L.getDst(); 440 NodeBuilderContext BuilderCtx(*this, Blk, Pred); 441 442 // Mark this block as visited. 443 const LocationContext *LC = Pred->getLocationContext(); 444 FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(), 445 LC->getDecl(), 446 LC->getCFG()->getNumBlockIDs()); 447 448 // Check if we are entering the EXIT block. 449 if (Blk == &(L.getLocationContext()->getCFG()->getExit())) { 450 assert(L.getLocationContext()->getCFG()->getExit().empty() && 451 "EXIT block cannot contain Stmts."); 452 453 // Get return statement.. 454 const ReturnStmt *RS = nullptr; 455 if (!L.getSrc()->empty()) { 456 if (Optional<CFGStmt> LastStmt = L.getSrc()->back().getAs<CFGStmt>()) { 457 RS = dyn_cast<ReturnStmt>(LastStmt->getStmt()); 458 } 459 } 460 461 // Process the final state transition. 462 SubEng.processEndOfFunction(BuilderCtx, Pred, RS); 463 464 // This path is done. Don't enqueue any more nodes. 465 return; 466 } 467 468 // Call into the SubEngine to process entering the CFGBlock. 469 ExplodedNodeSet dstNodes; 470 BlockEntrance BE(Blk, Pred->getLocationContext()); 471 NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE); 472 SubEng.processCFGBlockEntrance(L, nodeBuilder, Pred); 473 474 // Auto-generate a node. 475 if (!nodeBuilder.hasGeneratedNodes()) { 476 nodeBuilder.generateNode(Pred->State, Pred); 477 } 478 479 // Enqueue nodes onto the worklist. 480 enqueue(dstNodes); 481 } 482 483 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L, 484 ExplodedNode *Pred) { 485 // Increment the block counter. 486 const LocationContext *LC = Pred->getLocationContext(); 487 unsigned BlockId = L.getBlock()->getBlockID(); 488 BlockCounter Counter = WList->getBlockCounter(); 489 Counter = BCounterFactory.IncrementCount(Counter, LC->getCurrentStackFrame(), 490 BlockId); 491 WList->setBlockCounter(Counter); 492 493 // Process the entrance of the block. 494 if (Optional<CFGElement> E = L.getFirstElement()) { 495 NodeBuilderContext Ctx(*this, L.getBlock(), Pred); 496 SubEng.processCFGElement(*E, Pred, 0, &Ctx); 497 } 498 else 499 HandleBlockExit(L.getBlock(), Pred); 500 } 501 502 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) { 503 if (const Stmt *Term = B->getTerminator()) { 504 switch (Term->getStmtClass()) { 505 default: 506 llvm_unreachable("Analysis for this terminator not implemented."); 507 508 case Stmt::CXXBindTemporaryExprClass: 509 HandleCleanupTemporaryBranch( 510 cast<CXXBindTemporaryExpr>(B->getTerminator().getStmt()), B, Pred); 511 return; 512 513 // Model static initializers. 514 case Stmt::DeclStmtClass: 515 HandleStaticInit(cast<DeclStmt>(Term), B, Pred); 516 return; 517 518 case Stmt::BinaryOperatorClass: // '&&' and '||' 519 HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred); 520 return; 521 522 case Stmt::BinaryConditionalOperatorClass: 523 case Stmt::ConditionalOperatorClass: 524 HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(), 525 Term, B, Pred); 526 return; 527 528 // FIXME: Use constant-folding in CFG construction to simplify this 529 // case. 530 531 case Stmt::ChooseExprClass: 532 HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred); 533 return; 534 535 case Stmt::CXXTryStmtClass: 536 // Generate a node for each of the successors. 537 // Our logic for EH analysis can certainly be improved. 538 for (CFGBlock::const_succ_iterator it = B->succ_begin(), 539 et = B->succ_end(); it != et; ++it) { 540 if (const CFGBlock *succ = *it) { 541 generateNode(BlockEdge(B, succ, Pred->getLocationContext()), 542 Pred->State, Pred); 543 } 544 } 545 return; 546 547 case Stmt::DoStmtClass: 548 HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred); 549 return; 550 551 case Stmt::CXXForRangeStmtClass: 552 HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred); 553 return; 554 555 case Stmt::ForStmtClass: 556 HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred); 557 return; 558 559 case Stmt::ContinueStmtClass: 560 case Stmt::BreakStmtClass: 561 case Stmt::GotoStmtClass: 562 break; 563 564 case Stmt::IfStmtClass: 565 HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred); 566 return; 567 568 case Stmt::IndirectGotoStmtClass: { 569 // Only 1 successor: the indirect goto dispatch block. 570 assert(B->succ_size() == 1); 571 572 IndirectGotoNodeBuilder 573 builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(), 574 *(B->succ_begin()), this); 575 576 SubEng.processIndirectGoto(builder); 577 return; 578 } 579 580 case Stmt::ObjCForCollectionStmtClass: 581 // In the case of ObjCForCollectionStmt, it appears twice in a CFG: 582 // 583 // (1) inside a basic block, which represents the binding of the 584 // 'element' variable to a value. 585 // (2) in a terminator, which represents the branch. 586 // 587 // For (1), subengines will bind a value (i.e., 0 or 1) indicating 588 // whether or not collection contains any more elements. We cannot 589 // just test to see if the element is nil because a container can 590 // contain nil elements. 591 HandleBranch(Term, Term, B, Pred); 592 return; 593 594 case Stmt::SwitchStmtClass: { 595 SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(), 596 this); 597 598 SubEng.processSwitch(builder); 599 return; 600 } 601 602 case Stmt::WhileStmtClass: 603 HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred); 604 return; 605 } 606 } 607 608 assert(B->succ_size() == 1 && 609 "Blocks with no terminator should have at most 1 successor."); 610 611 generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()), 612 Pred->State, Pred); 613 } 614 615 void CoreEngine::HandleCallEnter(const CallEnter &CE, ExplodedNode *Pred) { 616 NodeBuilderContext BuilderCtx(*this, CE.getEntry(), Pred); 617 SubEng.processCallEnter(BuilderCtx, CE, Pred); 618 } 619 620 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term, 621 const CFGBlock * B, ExplodedNode *Pred) { 622 assert(B->succ_size() == 2); 623 NodeBuilderContext Ctx(*this, B, Pred); 624 ExplodedNodeSet Dst; 625 SubEng.processBranch(Cond, Term, Ctx, Pred, Dst, 626 *(B->succ_begin()), *(B->succ_begin()+1)); 627 // Enqueue the new frontier onto the worklist. 628 enqueue(Dst); 629 } 630 631 void CoreEngine::HandleCleanupTemporaryBranch(const CXXBindTemporaryExpr *BTE, 632 const CFGBlock *B, 633 ExplodedNode *Pred) { 634 assert(B->succ_size() == 2); 635 NodeBuilderContext Ctx(*this, B, Pred); 636 ExplodedNodeSet Dst; 637 SubEng.processCleanupTemporaryBranch(BTE, Ctx, Pred, Dst, *(B->succ_begin()), 638 *(B->succ_begin() + 1)); 639 // Enqueue the new frontier onto the worklist. 640 enqueue(Dst); 641 } 642 643 void CoreEngine::HandleStaticInit(const DeclStmt *DS, const CFGBlock *B, 644 ExplodedNode *Pred) { 645 assert(B->succ_size() == 2); 646 NodeBuilderContext Ctx(*this, B, Pred); 647 ExplodedNodeSet Dst; 648 SubEng.processStaticInitializer(DS, Ctx, Pred, Dst, 649 *(B->succ_begin()), *(B->succ_begin()+1)); 650 // Enqueue the new frontier onto the worklist. 651 enqueue(Dst); 652 } 653 654 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx, 655 ExplodedNode *Pred) { 656 assert(B); 657 assert(!B->empty()); 658 659 if (StmtIdx == B->size()) 660 HandleBlockExit(B, Pred); 661 else { 662 NodeBuilderContext Ctx(*this, B, Pred); 663 SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx); 664 } 665 } 666 667 /// generateNode - Utility method to generate nodes, hook up successors, 668 /// and add nodes to the worklist. 669 void CoreEngine::generateNode(const ProgramPoint &Loc, 670 ProgramStateRef State, 671 ExplodedNode *Pred) { 672 bool IsNew; 673 ExplodedNode *Node = G.getNode(Loc, State, false, &IsNew); 674 675 if (Pred) 676 Node->addPredecessor(Pred, G); // Link 'Node' with its predecessor. 677 else { 678 assert(IsNew); 679 G.addRoot(Node); // 'Node' has no predecessor. Make it a root. 680 } 681 682 // Only add 'Node' to the worklist if it was freshly generated. 683 if (IsNew) WList->enqueue(Node); 684 } 685 686 void CoreEngine::enqueueStmtNode(ExplodedNode *N, 687 const CFGBlock *Block, unsigned Idx) { 688 assert(Block); 689 assert(!N->isSink()); 690 691 // Check if this node entered a callee. 692 if (N->getLocation().getAs<CallEnter>()) { 693 // Still use the index of the CallExpr. It's needed to create the callee 694 // StackFrameContext. 695 WList->enqueue(N, Block, Idx); 696 return; 697 } 698 699 // Do not create extra nodes. Move to the next CFG element. 700 if (N->getLocation().getAs<PostInitializer>() || 701 N->getLocation().getAs<PostImplicitCall>()|| 702 N->getLocation().getAs<LoopExit>()) { 703 WList->enqueue(N, Block, Idx+1); 704 return; 705 } 706 707 if (N->getLocation().getAs<EpsilonPoint>()) { 708 WList->enqueue(N, Block, Idx); 709 return; 710 } 711 712 if ((*Block)[Idx].getKind() == CFGElement::NewAllocator) { 713 WList->enqueue(N, Block, Idx+1); 714 return; 715 } 716 717 // At this point, we know we're processing a normal statement. 718 CFGStmt CS = (*Block)[Idx].castAs<CFGStmt>(); 719 PostStmt Loc(CS.getStmt(), N->getLocationContext()); 720 721 if (Loc == N->getLocation().withTag(nullptr)) { 722 // Note: 'N' should be a fresh node because otherwise it shouldn't be 723 // a member of Deferred. 724 WList->enqueue(N, Block, Idx+1); 725 return; 726 } 727 728 bool IsNew; 729 ExplodedNode *Succ = G.getNode(Loc, N->getState(), false, &IsNew); 730 Succ->addPredecessor(N, G); 731 732 if (IsNew) 733 WList->enqueue(Succ, Block, Idx+1); 734 } 735 736 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N, 737 const ReturnStmt *RS) { 738 // Create a CallExitBegin node and enqueue it. 739 const auto *LocCtx = cast<StackFrameContext>(N->getLocationContext()); 740 741 // Use the callee location context. 742 CallExitBegin Loc(LocCtx, RS); 743 744 bool isNew; 745 ExplodedNode *Node = G.getNode(Loc, N->getState(), false, &isNew); 746 Node->addPredecessor(N, G); 747 return isNew ? Node : nullptr; 748 } 749 750 void CoreEngine::enqueue(ExplodedNodeSet &Set) { 751 for (const auto I : Set) 752 WList->enqueue(I); 753 } 754 755 void CoreEngine::enqueue(ExplodedNodeSet &Set, 756 const CFGBlock *Block, unsigned Idx) { 757 for (const auto I : Set) 758 enqueueStmtNode(I, Block, Idx); 759 } 760 761 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set, const ReturnStmt *RS) { 762 for (auto I : Set) { 763 // If we are in an inlined call, generate CallExitBegin node. 764 if (I->getLocationContext()->getParent()) { 765 I = generateCallExitBeginNode(I, RS); 766 if (I) 767 WList->enqueue(I); 768 } else { 769 // TODO: We should run remove dead bindings here. 770 G.addEndOfPath(I); 771 NumPathsExplored++; 772 } 773 } 774 } 775 776 void NodeBuilder::anchor() {} 777 778 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc, 779 ProgramStateRef State, 780 ExplodedNode *FromN, 781 bool MarkAsSink) { 782 HasGeneratedNodes = true; 783 bool IsNew; 784 ExplodedNode *N = C.Eng.G.getNode(Loc, State, MarkAsSink, &IsNew); 785 N->addPredecessor(FromN, C.Eng.G); 786 Frontier.erase(FromN); 787 788 if (!IsNew) 789 return nullptr; 790 791 if (!MarkAsSink) 792 Frontier.Add(N); 793 794 return N; 795 } 796 797 void NodeBuilderWithSinks::anchor() {} 798 799 StmtNodeBuilder::~StmtNodeBuilder() { 800 if (EnclosingBldr) 801 for (const auto I : Frontier) 802 EnclosingBldr->addNodes(I); 803 } 804 805 void BranchNodeBuilder::anchor() {} 806 807 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State, 808 bool branch, 809 ExplodedNode *NodePred) { 810 // If the branch has been marked infeasible we should not generate a node. 811 if (!isFeasible(branch)) 812 return nullptr; 813 814 ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF, 815 NodePred->getLocationContext()); 816 ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred); 817 return Succ; 818 } 819 820 ExplodedNode* 821 IndirectGotoNodeBuilder::generateNode(const iterator &I, 822 ProgramStateRef St, 823 bool IsSink) { 824 bool IsNew; 825 ExplodedNode *Succ = 826 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 827 St, IsSink, &IsNew); 828 Succ->addPredecessor(Pred, Eng.G); 829 830 if (!IsNew) 831 return nullptr; 832 833 if (!IsSink) 834 Eng.WList->enqueue(Succ); 835 836 return Succ; 837 } 838 839 ExplodedNode* 840 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I, 841 ProgramStateRef St) { 842 bool IsNew; 843 ExplodedNode *Succ = 844 Eng.G.getNode(BlockEdge(Src, I.getBlock(), Pred->getLocationContext()), 845 St, false, &IsNew); 846 Succ->addPredecessor(Pred, Eng.G); 847 if (!IsNew) 848 return nullptr; 849 850 Eng.WList->enqueue(Succ); 851 return Succ; 852 } 853 854 ExplodedNode* 855 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St, 856 bool IsSink) { 857 // Get the block for the default case. 858 assert(Src->succ_rbegin() != Src->succ_rend()); 859 CFGBlock *DefaultBlock = *Src->succ_rbegin(); 860 861 // Sanity check for default blocks that are unreachable and not caught 862 // by earlier stages. 863 if (!DefaultBlock) 864 return nullptr; 865 866 bool IsNew; 867 ExplodedNode *Succ = 868 Eng.G.getNode(BlockEdge(Src, DefaultBlock, Pred->getLocationContext()), 869 St, IsSink, &IsNew); 870 Succ->addPredecessor(Pred, Eng.G); 871 872 if (!IsNew) 873 return nullptr; 874 875 if (!IsSink) 876 Eng.WList->enqueue(Succ); 877 878 return Succ; 879 } 880