1 //==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 //  This file defines a generic engine for intraprocedural, path-sensitive,
11 //  dataflow analysis via graph reachability engine.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #define DEBUG_TYPE "CoreEngine"
16 
17 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
18 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
19 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
20 #include "clang/AST/Expr.h"
21 #include "clang/AST/StmtCXX.h"
22 #include "llvm/Support/Casting.h"
23 #include "llvm/ADT/DenseMap.h"
24 #include "llvm/ADT/Statistic.h"
25 
26 using namespace clang;
27 using namespace ento;
28 
29 STATISTIC(NumSteps,
30             "The # of steps executed.");
31 STATISTIC(NumReachedMaxSteps,
32             "The # of times we reached the max number of steps.");
33 STATISTIC(NumPathsExplored,
34             "The # of paths explored by the analyzer.");
35 
36 //===----------------------------------------------------------------------===//
37 // Worklist classes for exploration of reachable states.
38 //===----------------------------------------------------------------------===//
39 
40 WorkList::Visitor::~Visitor() {}
41 
42 namespace {
43 class DFS : public WorkList {
44   SmallVector<WorkListUnit,20> Stack;
45 public:
46   virtual bool hasWork() const {
47     return !Stack.empty();
48   }
49 
50   virtual void enqueue(const WorkListUnit& U) {
51     Stack.push_back(U);
52   }
53 
54   virtual WorkListUnit dequeue() {
55     assert (!Stack.empty());
56     const WorkListUnit& U = Stack.back();
57     Stack.pop_back(); // This technically "invalidates" U, but we are fine.
58     return U;
59   }
60 
61   virtual bool visitItemsInWorkList(Visitor &V) {
62     for (SmallVectorImpl<WorkListUnit>::iterator
63          I = Stack.begin(), E = Stack.end(); I != E; ++I) {
64       if (V.visit(*I))
65         return true;
66     }
67     return false;
68   }
69 };
70 
71 class BFS : public WorkList {
72   std::deque<WorkListUnit> Queue;
73 public:
74   virtual bool hasWork() const {
75     return !Queue.empty();
76   }
77 
78   virtual void enqueue(const WorkListUnit& U) {
79     Queue.push_back(U);
80   }
81 
82   virtual WorkListUnit dequeue() {
83     WorkListUnit U = Queue.front();
84     Queue.pop_front();
85     return U;
86   }
87 
88   virtual bool visitItemsInWorkList(Visitor &V) {
89     for (std::deque<WorkListUnit>::iterator
90          I = Queue.begin(), E = Queue.end(); I != E; ++I) {
91       if (V.visit(*I))
92         return true;
93     }
94     return false;
95   }
96 };
97 
98 } // end anonymous namespace
99 
100 // Place the dstor for WorkList here because it contains virtual member
101 // functions, and we the code for the dstor generated in one compilation unit.
102 WorkList::~WorkList() {}
103 
104 WorkList *WorkList::makeDFS() { return new DFS(); }
105 WorkList *WorkList::makeBFS() { return new BFS(); }
106 
107 namespace {
108   class BFSBlockDFSContents : public WorkList {
109     std::deque<WorkListUnit> Queue;
110     SmallVector<WorkListUnit,20> Stack;
111   public:
112     virtual bool hasWork() const {
113       return !Queue.empty() || !Stack.empty();
114     }
115 
116     virtual void enqueue(const WorkListUnit& U) {
117       if (isa<BlockEntrance>(U.getNode()->getLocation()))
118         Queue.push_front(U);
119       else
120         Stack.push_back(U);
121     }
122 
123     virtual WorkListUnit dequeue() {
124       // Process all basic blocks to completion.
125       if (!Stack.empty()) {
126         const WorkListUnit& U = Stack.back();
127         Stack.pop_back(); // This technically "invalidates" U, but we are fine.
128         return U;
129       }
130 
131       assert(!Queue.empty());
132       // Don't use const reference.  The subsequent pop_back() might make it
133       // unsafe.
134       WorkListUnit U = Queue.front();
135       Queue.pop_front();
136       return U;
137     }
138     virtual bool visitItemsInWorkList(Visitor &V) {
139       for (SmallVectorImpl<WorkListUnit>::iterator
140            I = Stack.begin(), E = Stack.end(); I != E; ++I) {
141         if (V.visit(*I))
142           return true;
143       }
144       for (std::deque<WorkListUnit>::iterator
145            I = Queue.begin(), E = Queue.end(); I != E; ++I) {
146         if (V.visit(*I))
147           return true;
148       }
149       return false;
150     }
151 
152   };
153 } // end anonymous namespace
154 
155 WorkList* WorkList::makeBFSBlockDFSContents() {
156   return new BFSBlockDFSContents();
157 }
158 
159 //===----------------------------------------------------------------------===//
160 // Core analysis engine.
161 //===----------------------------------------------------------------------===//
162 
163 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
164 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps,
165                                    ProgramStateRef InitState) {
166 
167   if (G->num_roots() == 0) { // Initialize the analysis by constructing
168     // the root if none exists.
169 
170     const CFGBlock *Entry = &(L->getCFG()->getEntry());
171 
172     assert (Entry->empty() &&
173             "Entry block must be empty.");
174 
175     assert (Entry->succ_size() == 1 &&
176             "Entry block must have 1 successor.");
177 
178     // Mark the entry block as visited.
179     FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(),
180                                              L->getDecl(),
181                                              L->getCFG()->getNumBlockIDs());
182 
183     // Get the solitary successor.
184     const CFGBlock *Succ = *(Entry->succ_begin());
185 
186     // Construct an edge representing the
187     // starting location in the function.
188     BlockEdge StartLoc(Entry, Succ, L);
189 
190     // Set the current block counter to being empty.
191     WList->setBlockCounter(BCounterFactory.GetEmptyCounter());
192 
193     if (!InitState)
194       // Generate the root.
195       generateNode(StartLoc, SubEng.getInitialState(L), 0);
196     else
197       generateNode(StartLoc, InitState, 0);
198   }
199 
200   // Check if we have a steps limit
201   bool UnlimitedSteps = Steps == 0;
202 
203   while (WList->hasWork()) {
204     if (!UnlimitedSteps) {
205       if (Steps == 0) {
206         NumReachedMaxSteps++;
207         break;
208       }
209       --Steps;
210     }
211 
212     NumSteps++;
213 
214     const WorkListUnit& WU = WList->dequeue();
215 
216     // Set the current block counter.
217     WList->setBlockCounter(WU.getBlockCounter());
218 
219     // Retrieve the node.
220     ExplodedNode *Node = WU.getNode();
221 
222     dispatchWorkItem(Node, Node->getLocation(), WU);
223   }
224   SubEng.processEndWorklist(hasWorkRemaining());
225   return WList->hasWork();
226 }
227 
228 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc,
229                                   const WorkListUnit& WU) {
230   // Dispatch on the location type.
231   switch (Loc.getKind()) {
232     case ProgramPoint::BlockEdgeKind:
233       HandleBlockEdge(cast<BlockEdge>(Loc), Pred);
234       break;
235 
236     case ProgramPoint::BlockEntranceKind:
237       HandleBlockEntrance(cast<BlockEntrance>(Loc), Pred);
238       break;
239 
240     case ProgramPoint::BlockExitKind:
241       assert (false && "BlockExit location never occur in forward analysis.");
242       break;
243 
244     case ProgramPoint::CallEnterKind: {
245       CallEnter CEnter = cast<CallEnter>(Loc);
246       if (AnalyzedCallees)
247         if (const CallExpr* CE =
248             dyn_cast_or_null<CallExpr>(CEnter.getCallExpr()))
249           if (const Decl *CD = CE->getCalleeDecl())
250             AnalyzedCallees->insert(CD);
251       SubEng.processCallEnter(CEnter, Pred);
252       break;
253     }
254 
255     case ProgramPoint::CallExitBeginKind:
256       SubEng.processCallExit(Pred);
257       break;
258 
259     case ProgramPoint::EpsilonKind: {
260       assert(Pred->hasSinglePred() &&
261              "Assume epsilon has exactly one predecessor by construction");
262       ExplodedNode *PNode = Pred->getFirstPred();
263       dispatchWorkItem(Pred, PNode->getLocation(), WU);
264       break;
265     }
266     default:
267       assert(isa<PostStmt>(Loc) ||
268              isa<PostInitializer>(Loc) ||
269              isa<PostImplicitCall>(Loc) ||
270              isa<CallExitEnd>(Loc));
271       HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred);
272       break;
273   }
274 }
275 
276 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
277                                                  unsigned Steps,
278                                                  ProgramStateRef InitState,
279                                                  ExplodedNodeSet &Dst) {
280   bool DidNotFinish = ExecuteWorkList(L, Steps, InitState);
281   for (ExplodedGraph::eop_iterator I = G->eop_begin(),
282                                    E = G->eop_end(); I != E; ++I) {
283     Dst.Add(*I);
284   }
285   return DidNotFinish;
286 }
287 
288 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
289 
290   const CFGBlock *Blk = L.getDst();
291   NodeBuilderContext BuilderCtx(*this, Blk, Pred);
292 
293   // Mark this block as visited.
294   const LocationContext *LC = Pred->getLocationContext();
295   FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(),
296                                            LC->getDecl(),
297                                            LC->getCFG()->getNumBlockIDs());
298 
299   // Check if we are entering the EXIT block.
300   if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
301 
302     assert (L.getLocationContext()->getCFG()->getExit().size() == 0
303             && "EXIT block cannot contain Stmts.");
304 
305     // Process the final state transition.
306     SubEng.processEndOfFunction(BuilderCtx);
307 
308     // This path is done. Don't enqueue any more nodes.
309     return;
310   }
311 
312   // Call into the SubEngine to process entering the CFGBlock.
313   ExplodedNodeSet dstNodes;
314   BlockEntrance BE(Blk, Pred->getLocationContext());
315   NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE);
316   SubEng.processCFGBlockEntrance(L, nodeBuilder);
317 
318   // Auto-generate a node.
319   if (!nodeBuilder.hasGeneratedNodes()) {
320     nodeBuilder.generateNode(Pred->State, Pred);
321   }
322 
323   // Enqueue nodes onto the worklist.
324   enqueue(dstNodes);
325 }
326 
327 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
328                                        ExplodedNode *Pred) {
329 
330   // Increment the block counter.
331   const LocationContext *LC = Pred->getLocationContext();
332   unsigned BlockId = L.getBlock()->getBlockID();
333   BlockCounter Counter = WList->getBlockCounter();
334   Counter = BCounterFactory.IncrementCount(Counter, LC->getCurrentStackFrame(),
335                                            BlockId);
336   WList->setBlockCounter(Counter);
337 
338   // Process the entrance of the block.
339   if (CFGElement E = L.getFirstElement()) {
340     NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
341     SubEng.processCFGElement(E, Pred, 0, &Ctx);
342   }
343   else
344     HandleBlockExit(L.getBlock(), Pred);
345 }
346 
347 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
348 
349   if (const Stmt *Term = B->getTerminator()) {
350     switch (Term->getStmtClass()) {
351       default:
352         llvm_unreachable("Analysis for this terminator not implemented.");
353 
354       case Stmt::BinaryOperatorClass: // '&&' and '||'
355         HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
356         return;
357 
358       case Stmt::BinaryConditionalOperatorClass:
359       case Stmt::ConditionalOperatorClass:
360         HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(),
361                      Term, B, Pred);
362         return;
363 
364         // FIXME: Use constant-folding in CFG construction to simplify this
365         // case.
366 
367       case Stmt::ChooseExprClass:
368         HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
369         return;
370 
371       case Stmt::CXXTryStmtClass: {
372         // Generate a node for each of the successors.
373         // Our logic for EH analysis can certainly be improved.
374         for (CFGBlock::const_succ_iterator it = B->succ_begin(),
375              et = B->succ_end(); it != et; ++it) {
376           if (const CFGBlock *succ = *it) {
377             generateNode(BlockEdge(B, succ, Pred->getLocationContext()),
378                          Pred->State, Pred);
379           }
380         }
381         return;
382       }
383 
384       case Stmt::DoStmtClass:
385         HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
386         return;
387 
388       case Stmt::CXXForRangeStmtClass:
389         HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred);
390         return;
391 
392       case Stmt::ForStmtClass:
393         HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
394         return;
395 
396       case Stmt::ContinueStmtClass:
397       case Stmt::BreakStmtClass:
398       case Stmt::GotoStmtClass:
399         break;
400 
401       case Stmt::IfStmtClass:
402         HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
403         return;
404 
405       case Stmt::IndirectGotoStmtClass: {
406         // Only 1 successor: the indirect goto dispatch block.
407         assert (B->succ_size() == 1);
408 
409         IndirectGotoNodeBuilder
410            builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
411                    *(B->succ_begin()), this);
412 
413         SubEng.processIndirectGoto(builder);
414         return;
415       }
416 
417       case Stmt::ObjCForCollectionStmtClass: {
418         // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
419         //
420         //  (1) inside a basic block, which represents the binding of the
421         //      'element' variable to a value.
422         //  (2) in a terminator, which represents the branch.
423         //
424         // For (1), subengines will bind a value (i.e., 0 or 1) indicating
425         // whether or not collection contains any more elements.  We cannot
426         // just test to see if the element is nil because a container can
427         // contain nil elements.
428         HandleBranch(Term, Term, B, Pred);
429         return;
430       }
431 
432       case Stmt::SwitchStmtClass: {
433         SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
434                                     this);
435 
436         SubEng.processSwitch(builder);
437         return;
438       }
439 
440       case Stmt::WhileStmtClass:
441         HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
442         return;
443     }
444   }
445 
446   assert (B->succ_size() == 1 &&
447           "Blocks with no terminator should have at most 1 successor.");
448 
449   generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
450                Pred->State, Pred);
451 }
452 
453 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
454                                 const CFGBlock * B, ExplodedNode *Pred) {
455   assert(B->succ_size() == 2);
456   NodeBuilderContext Ctx(*this, B, Pred);
457   ExplodedNodeSet Dst;
458   SubEng.processBranch(Cond, Term, Ctx, Pred, Dst,
459                        *(B->succ_begin()), *(B->succ_begin()+1));
460   // Enqueue the new frontier onto the worklist.
461   enqueue(Dst);
462 }
463 
464 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
465                                   ExplodedNode *Pred) {
466   assert(B);
467   assert(!B->empty());
468 
469   if (StmtIdx == B->size())
470     HandleBlockExit(B, Pred);
471   else {
472     NodeBuilderContext Ctx(*this, B, Pred);
473     SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx);
474   }
475 }
476 
477 /// generateNode - Utility method to generate nodes, hook up successors,
478 ///  and add nodes to the worklist.
479 void CoreEngine::generateNode(const ProgramPoint &Loc,
480                               ProgramStateRef State,
481                               ExplodedNode *Pred) {
482 
483   bool IsNew;
484   ExplodedNode *Node = G->getNode(Loc, State, false, &IsNew);
485 
486   if (Pred)
487     Node->addPredecessor(Pred, *G);  // Link 'Node' with its predecessor.
488   else {
489     assert (IsNew);
490     G->addRoot(Node);  // 'Node' has no predecessor.  Make it a root.
491   }
492 
493   // Only add 'Node' to the worklist if it was freshly generated.
494   if (IsNew) WList->enqueue(Node);
495 }
496 
497 void CoreEngine::enqueueStmtNode(ExplodedNode *N,
498                                  const CFGBlock *Block, unsigned Idx) {
499   assert(Block);
500   assert (!N->isSink());
501 
502   // Check if this node entered a callee.
503   if (isa<CallEnter>(N->getLocation())) {
504     // Still use the index of the CallExpr. It's needed to create the callee
505     // StackFrameContext.
506     WList->enqueue(N, Block, Idx);
507     return;
508   }
509 
510   // Do not create extra nodes. Move to the next CFG element.
511   if (isa<PostInitializer>(N->getLocation()) ||
512       isa<PostImplicitCall>(N->getLocation())) {
513     WList->enqueue(N, Block, Idx+1);
514     return;
515   }
516 
517   if (isa<EpsilonPoint>(N->getLocation())) {
518     WList->enqueue(N, Block, Idx);
519     return;
520   }
521 
522   const CFGStmt *CS = (*Block)[Idx].getAs<CFGStmt>();
523   const Stmt *St = CS ? CS->getStmt() : 0;
524   PostStmt Loc(St, N->getLocationContext());
525 
526   if (Loc == N->getLocation()) {
527     // Note: 'N' should be a fresh node because otherwise it shouldn't be
528     // a member of Deferred.
529     WList->enqueue(N, Block, Idx+1);
530     return;
531   }
532 
533   bool IsNew;
534   ExplodedNode *Succ = G->getNode(Loc, N->getState(), false, &IsNew);
535   Succ->addPredecessor(N, *G);
536 
537   if (IsNew)
538     WList->enqueue(Succ, Block, Idx+1);
539 }
540 
541 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N) {
542   // Create a CallExitBegin node and enqueue it.
543   const StackFrameContext *LocCtx
544                          = cast<StackFrameContext>(N->getLocationContext());
545 
546   // Use the callee location context.
547   CallExitBegin Loc(LocCtx);
548 
549   bool isNew;
550   ExplodedNode *Node = G->getNode(Loc, N->getState(), false, &isNew);
551   Node->addPredecessor(N, *G);
552   return isNew ? Node : 0;
553 }
554 
555 
556 void CoreEngine::enqueue(ExplodedNodeSet &Set) {
557   for (ExplodedNodeSet::iterator I = Set.begin(),
558                                  E = Set.end(); I != E; ++I) {
559     WList->enqueue(*I);
560   }
561 }
562 
563 void CoreEngine::enqueue(ExplodedNodeSet &Set,
564                          const CFGBlock *Block, unsigned Idx) {
565   for (ExplodedNodeSet::iterator I = Set.begin(),
566                                  E = Set.end(); I != E; ++I) {
567     enqueueStmtNode(*I, Block, Idx);
568   }
569 }
570 
571 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set) {
572   for (ExplodedNodeSet::iterator I = Set.begin(), E = Set.end(); I != E; ++I) {
573     ExplodedNode *N = *I;
574     // If we are in an inlined call, generate CallExitBegin node.
575     if (N->getLocationContext()->getParent()) {
576       N = generateCallExitBeginNode(N);
577       if (N)
578         WList->enqueue(N);
579     } else {
580       // TODO: We should run remove dead bindings here.
581       G->addEndOfPath(N);
582       NumPathsExplored++;
583     }
584   }
585 }
586 
587 
588 void NodeBuilder::anchor() { }
589 
590 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc,
591                                             ProgramStateRef State,
592                                             ExplodedNode *FromN,
593                                             bool MarkAsSink) {
594   HasGeneratedNodes = true;
595   bool IsNew;
596   ExplodedNode *N = C.Eng.G->getNode(Loc, State, MarkAsSink, &IsNew);
597   N->addPredecessor(FromN, *C.Eng.G);
598   Frontier.erase(FromN);
599 
600   if (!IsNew)
601     return 0;
602 
603   if (!MarkAsSink)
604     Frontier.Add(N);
605 
606   return N;
607 }
608 
609 void NodeBuilderWithSinks::anchor() { }
610 
611 StmtNodeBuilder::~StmtNodeBuilder() {
612   if (EnclosingBldr)
613     for (ExplodedNodeSet::iterator I = Frontier.begin(),
614                                    E = Frontier.end(); I != E; ++I )
615       EnclosingBldr->addNodes(*I);
616 }
617 
618 void BranchNodeBuilder::anchor() { }
619 
620 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
621                                               bool branch,
622                                               ExplodedNode *NodePred) {
623   // If the branch has been marked infeasible we should not generate a node.
624   if (!isFeasible(branch))
625     return NULL;
626 
627   ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF,
628                                NodePred->getLocationContext());
629   ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred);
630   return Succ;
631 }
632 
633 ExplodedNode*
634 IndirectGotoNodeBuilder::generateNode(const iterator &I,
635                                       ProgramStateRef St,
636                                       bool IsSink) {
637   bool IsNew;
638   ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(),
639                                       Pred->getLocationContext()), St,
640                                       IsSink, &IsNew);
641   Succ->addPredecessor(Pred, *Eng.G);
642 
643   if (!IsNew)
644     return 0;
645 
646   if (!IsSink)
647     Eng.WList->enqueue(Succ);
648 
649   return Succ;
650 }
651 
652 
653 ExplodedNode*
654 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I,
655                                         ProgramStateRef St) {
656 
657   bool IsNew;
658   ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(),
659                                       Pred->getLocationContext()), St,
660                                       false, &IsNew);
661   Succ->addPredecessor(Pred, *Eng.G);
662   if (!IsNew)
663     return 0;
664 
665   Eng.WList->enqueue(Succ);
666   return Succ;
667 }
668 
669 
670 ExplodedNode*
671 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
672                                            bool IsSink) {
673   // Get the block for the default case.
674   assert(Src->succ_rbegin() != Src->succ_rend());
675   CFGBlock *DefaultBlock = *Src->succ_rbegin();
676 
677   // Sanity check for default blocks that are unreachable and not caught
678   // by earlier stages.
679   if (!DefaultBlock)
680     return NULL;
681 
682   bool IsNew;
683   ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock,
684                                       Pred->getLocationContext()), St,
685                                       IsSink, &IsNew);
686   Succ->addPredecessor(Pred, *Eng.G);
687 
688   if (!IsNew)
689     return 0;
690 
691   if (!IsSink)
692     Eng.WList->enqueue(Succ);
693 
694   return Succ;
695 }
696