1 //==- CoreEngine.cpp - Path-Sensitive Dataflow Engine ------------*- C++ -*-//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 //  This file defines a generic engine for intraprocedural, path-sensitive,
11 //  dataflow analysis via graph reachability engine.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #define DEBUG_TYPE "CoreEngine"
16 
17 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
18 #include "clang/StaticAnalyzer/Core/PathSensitive/CoreEngine.h"
19 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
20 #include "clang/AST/Expr.h"
21 #include "clang/AST/StmtCXX.h"
22 #include "llvm/Support/Casting.h"
23 #include "llvm/ADT/DenseMap.h"
24 #include "llvm/ADT/Statistic.h"
25 
26 using namespace clang;
27 using namespace ento;
28 
29 STATISTIC(NumSteps,
30             "The # of steps executed.");
31 STATISTIC(NumReachedMaxSteps,
32             "The # of times we reached the max number of steps.");
33 STATISTIC(NumPathsExplored,
34             "The # of paths explored by the analyzer.");
35 
36 //===----------------------------------------------------------------------===//
37 // Worklist classes for exploration of reachable states.
38 //===----------------------------------------------------------------------===//
39 
40 WorkList::Visitor::~Visitor() {}
41 
42 namespace {
43 class DFS : public WorkList {
44   SmallVector<WorkListUnit,20> Stack;
45 public:
46   virtual bool hasWork() const {
47     return !Stack.empty();
48   }
49 
50   virtual void enqueue(const WorkListUnit& U) {
51     Stack.push_back(U);
52   }
53 
54   virtual WorkListUnit dequeue() {
55     assert (!Stack.empty());
56     const WorkListUnit& U = Stack.back();
57     Stack.pop_back(); // This technically "invalidates" U, but we are fine.
58     return U;
59   }
60 
61   virtual bool visitItemsInWorkList(Visitor &V) {
62     for (SmallVectorImpl<WorkListUnit>::iterator
63          I = Stack.begin(), E = Stack.end(); I != E; ++I) {
64       if (V.visit(*I))
65         return true;
66     }
67     return false;
68   }
69 };
70 
71 class BFS : public WorkList {
72   std::deque<WorkListUnit> Queue;
73 public:
74   virtual bool hasWork() const {
75     return !Queue.empty();
76   }
77 
78   virtual void enqueue(const WorkListUnit& U) {
79     Queue.push_back(U);
80   }
81 
82   virtual WorkListUnit dequeue() {
83     WorkListUnit U = Queue.front();
84     Queue.pop_front();
85     return U;
86   }
87 
88   virtual bool visitItemsInWorkList(Visitor &V) {
89     for (std::deque<WorkListUnit>::iterator
90          I = Queue.begin(), E = Queue.end(); I != E; ++I) {
91       if (V.visit(*I))
92         return true;
93     }
94     return false;
95   }
96 };
97 
98 } // end anonymous namespace
99 
100 // Place the dstor for WorkList here because it contains virtual member
101 // functions, and we the code for the dstor generated in one compilation unit.
102 WorkList::~WorkList() {}
103 
104 WorkList *WorkList::makeDFS() { return new DFS(); }
105 WorkList *WorkList::makeBFS() { return new BFS(); }
106 
107 namespace {
108   class BFSBlockDFSContents : public WorkList {
109     std::deque<WorkListUnit> Queue;
110     SmallVector<WorkListUnit,20> Stack;
111   public:
112     virtual bool hasWork() const {
113       return !Queue.empty() || !Stack.empty();
114     }
115 
116     virtual void enqueue(const WorkListUnit& U) {
117       if (isa<BlockEntrance>(U.getNode()->getLocation()))
118         Queue.push_front(U);
119       else
120         Stack.push_back(U);
121     }
122 
123     virtual WorkListUnit dequeue() {
124       // Process all basic blocks to completion.
125       if (!Stack.empty()) {
126         const WorkListUnit& U = Stack.back();
127         Stack.pop_back(); // This technically "invalidates" U, but we are fine.
128         return U;
129       }
130 
131       assert(!Queue.empty());
132       // Don't use const reference.  The subsequent pop_back() might make it
133       // unsafe.
134       WorkListUnit U = Queue.front();
135       Queue.pop_front();
136       return U;
137     }
138     virtual bool visitItemsInWorkList(Visitor &V) {
139       for (SmallVectorImpl<WorkListUnit>::iterator
140            I = Stack.begin(), E = Stack.end(); I != E; ++I) {
141         if (V.visit(*I))
142           return true;
143       }
144       for (std::deque<WorkListUnit>::iterator
145            I = Queue.begin(), E = Queue.end(); I != E; ++I) {
146         if (V.visit(*I))
147           return true;
148       }
149       return false;
150     }
151 
152   };
153 } // end anonymous namespace
154 
155 WorkList* WorkList::makeBFSBlockDFSContents() {
156   return new BFSBlockDFSContents();
157 }
158 
159 //===----------------------------------------------------------------------===//
160 // Core analysis engine.
161 //===----------------------------------------------------------------------===//
162 
163 /// ExecuteWorkList - Run the worklist algorithm for a maximum number of steps.
164 bool CoreEngine::ExecuteWorkList(const LocationContext *L, unsigned Steps,
165                                    ProgramStateRef InitState) {
166 
167   if (G->num_roots() == 0) { // Initialize the analysis by constructing
168     // the root if none exists.
169 
170     const CFGBlock *Entry = &(L->getCFG()->getEntry());
171 
172     assert (Entry->empty() &&
173             "Entry block must be empty.");
174 
175     assert (Entry->succ_size() == 1 &&
176             "Entry block must have 1 successor.");
177 
178     // Mark the entry block as visited.
179     FunctionSummaries->markVisitedBasicBlock(Entry->getBlockID(),
180                                              L->getDecl(),
181                                              L->getCFG()->getNumBlockIDs());
182 
183     // Get the solitary successor.
184     const CFGBlock *Succ = *(Entry->succ_begin());
185 
186     // Construct an edge representing the
187     // starting location in the function.
188     BlockEdge StartLoc(Entry, Succ, L);
189 
190     // Set the current block counter to being empty.
191     WList->setBlockCounter(BCounterFactory.GetEmptyCounter());
192 
193     if (!InitState)
194       // Generate the root.
195       generateNode(StartLoc, SubEng.getInitialState(L), 0);
196     else
197       generateNode(StartLoc, InitState, 0);
198   }
199 
200   // Check if we have a steps limit
201   bool UnlimitedSteps = Steps == 0;
202 
203   while (WList->hasWork()) {
204     if (!UnlimitedSteps) {
205       if (Steps == 0) {
206         NumReachedMaxSteps++;
207         break;
208       }
209       --Steps;
210     }
211 
212     NumSteps++;
213 
214     const WorkListUnit& WU = WList->dequeue();
215 
216     // Set the current block counter.
217     WList->setBlockCounter(WU.getBlockCounter());
218 
219     // Retrieve the node.
220     ExplodedNode *Node = WU.getNode();
221 
222     dispatchWorkItem(Node, Node->getLocation(), WU);
223   }
224   SubEng.processEndWorklist(hasWorkRemaining());
225   return WList->hasWork();
226 }
227 
228 void CoreEngine::dispatchWorkItem(ExplodedNode* Pred, ProgramPoint Loc,
229                                   const WorkListUnit& WU) {
230   // Dispatch on the location type.
231   switch (Loc.getKind()) {
232     case ProgramPoint::BlockEdgeKind:
233       HandleBlockEdge(cast<BlockEdge>(Loc), Pred);
234       break;
235 
236     case ProgramPoint::BlockEntranceKind:
237       HandleBlockEntrance(cast<BlockEntrance>(Loc), Pred);
238       break;
239 
240     case ProgramPoint::BlockExitKind:
241       assert (false && "BlockExit location never occur in forward analysis.");
242       break;
243 
244     case ProgramPoint::CallEnterKind: {
245       CallEnter CEnter = cast<CallEnter>(Loc);
246       if (AnalyzedCallees)
247         if (const CallExpr* CE =
248             dyn_cast_or_null<CallExpr>(CEnter.getCallExpr()))
249           if (const Decl *CD = CE->getCalleeDecl())
250             AnalyzedCallees->insert(CD);
251       SubEng.processCallEnter(CEnter, Pred);
252       break;
253     }
254 
255     case ProgramPoint::CallExitBeginKind:
256       SubEng.processCallExit(Pred);
257       break;
258 
259     case ProgramPoint::EpsilonKind: {
260       assert(Pred->hasSinglePred() &&
261              "Assume epsilon has exactly one predecessor by construction");
262       ExplodedNode *PNode = Pred->getFirstPred();
263       dispatchWorkItem(Pred, PNode->getLocation(), WU);
264       break;
265     }
266     default:
267       assert(isa<PostStmt>(Loc) ||
268              isa<PostInitializer>(Loc) ||
269              isa<CallExitEnd>(Loc));
270       HandlePostStmt(WU.getBlock(), WU.getIndex(), Pred);
271       break;
272   }
273 }
274 
275 bool CoreEngine::ExecuteWorkListWithInitialState(const LocationContext *L,
276                                                  unsigned Steps,
277                                                  ProgramStateRef InitState,
278                                                  ExplodedNodeSet &Dst) {
279   bool DidNotFinish = ExecuteWorkList(L, Steps, InitState);
280   for (ExplodedGraph::eop_iterator I = G->eop_begin(),
281                                    E = G->eop_end(); I != E; ++I) {
282     Dst.Add(*I);
283   }
284   return DidNotFinish;
285 }
286 
287 void CoreEngine::HandleBlockEdge(const BlockEdge &L, ExplodedNode *Pred) {
288 
289   const CFGBlock *Blk = L.getDst();
290   NodeBuilderContext BuilderCtx(*this, Blk, Pred);
291 
292   // Mark this block as visited.
293   const LocationContext *LC = Pred->getLocationContext();
294   FunctionSummaries->markVisitedBasicBlock(Blk->getBlockID(),
295                                            LC->getDecl(),
296                                            LC->getCFG()->getNumBlockIDs());
297 
298   // Check if we are entering the EXIT block.
299   if (Blk == &(L.getLocationContext()->getCFG()->getExit())) {
300 
301     assert (L.getLocationContext()->getCFG()->getExit().size() == 0
302             && "EXIT block cannot contain Stmts.");
303 
304     // Process the final state transition.
305     SubEng.processEndOfFunction(BuilderCtx);
306 
307     // This path is done. Don't enqueue any more nodes.
308     return;
309   }
310 
311   // Call into the SubEngine to process entering the CFGBlock.
312   ExplodedNodeSet dstNodes;
313   BlockEntrance BE(Blk, Pred->getLocationContext());
314   NodeBuilderWithSinks nodeBuilder(Pred, dstNodes, BuilderCtx, BE);
315   SubEng.processCFGBlockEntrance(L, nodeBuilder);
316 
317   // Auto-generate a node.
318   if (!nodeBuilder.hasGeneratedNodes()) {
319     nodeBuilder.generateNode(Pred->State, Pred);
320   }
321 
322   // Enqueue nodes onto the worklist.
323   enqueue(dstNodes);
324 }
325 
326 void CoreEngine::HandleBlockEntrance(const BlockEntrance &L,
327                                        ExplodedNode *Pred) {
328 
329   // Increment the block counter.
330   const LocationContext *LC = Pred->getLocationContext();
331   unsigned BlockId = L.getBlock()->getBlockID();
332   BlockCounter Counter = WList->getBlockCounter();
333   Counter = BCounterFactory.IncrementCount(Counter, LC->getCurrentStackFrame(),
334                                            BlockId);
335   WList->setBlockCounter(Counter);
336 
337   // Process the entrance of the block.
338   if (CFGElement E = L.getFirstElement()) {
339     NodeBuilderContext Ctx(*this, L.getBlock(), Pred);
340     SubEng.processCFGElement(E, Pred, 0, &Ctx);
341   }
342   else
343     HandleBlockExit(L.getBlock(), Pred);
344 }
345 
346 void CoreEngine::HandleBlockExit(const CFGBlock * B, ExplodedNode *Pred) {
347 
348   if (const Stmt *Term = B->getTerminator()) {
349     switch (Term->getStmtClass()) {
350       default:
351         llvm_unreachable("Analysis for this terminator not implemented.");
352 
353       case Stmt::BinaryOperatorClass: // '&&' and '||'
354         HandleBranch(cast<BinaryOperator>(Term)->getLHS(), Term, B, Pred);
355         return;
356 
357       case Stmt::BinaryConditionalOperatorClass:
358       case Stmt::ConditionalOperatorClass:
359         HandleBranch(cast<AbstractConditionalOperator>(Term)->getCond(),
360                      Term, B, Pred);
361         return;
362 
363         // FIXME: Use constant-folding in CFG construction to simplify this
364         // case.
365 
366       case Stmt::ChooseExprClass:
367         HandleBranch(cast<ChooseExpr>(Term)->getCond(), Term, B, Pred);
368         return;
369 
370       case Stmt::CXXTryStmtClass: {
371         // Generate a node for each of the successors.
372         // Our logic for EH analysis can certainly be improved.
373         for (CFGBlock::const_succ_iterator it = B->succ_begin(),
374              et = B->succ_end(); it != et; ++it) {
375           if (const CFGBlock *succ = *it) {
376             generateNode(BlockEdge(B, succ, Pred->getLocationContext()),
377                          Pred->State, Pred);
378           }
379         }
380         return;
381       }
382 
383       case Stmt::DoStmtClass:
384         HandleBranch(cast<DoStmt>(Term)->getCond(), Term, B, Pred);
385         return;
386 
387       case Stmt::CXXForRangeStmtClass:
388         HandleBranch(cast<CXXForRangeStmt>(Term)->getCond(), Term, B, Pred);
389         return;
390 
391       case Stmt::ForStmtClass:
392         HandleBranch(cast<ForStmt>(Term)->getCond(), Term, B, Pred);
393         return;
394 
395       case Stmt::ContinueStmtClass:
396       case Stmt::BreakStmtClass:
397       case Stmt::GotoStmtClass:
398         break;
399 
400       case Stmt::IfStmtClass:
401         HandleBranch(cast<IfStmt>(Term)->getCond(), Term, B, Pred);
402         return;
403 
404       case Stmt::IndirectGotoStmtClass: {
405         // Only 1 successor: the indirect goto dispatch block.
406         assert (B->succ_size() == 1);
407 
408         IndirectGotoNodeBuilder
409            builder(Pred, B, cast<IndirectGotoStmt>(Term)->getTarget(),
410                    *(B->succ_begin()), this);
411 
412         SubEng.processIndirectGoto(builder);
413         return;
414       }
415 
416       case Stmt::ObjCForCollectionStmtClass: {
417         // In the case of ObjCForCollectionStmt, it appears twice in a CFG:
418         //
419         //  (1) inside a basic block, which represents the binding of the
420         //      'element' variable to a value.
421         //  (2) in a terminator, which represents the branch.
422         //
423         // For (1), subengines will bind a value (i.e., 0 or 1) indicating
424         // whether or not collection contains any more elements.  We cannot
425         // just test to see if the element is nil because a container can
426         // contain nil elements.
427         HandleBranch(Term, Term, B, Pred);
428         return;
429       }
430 
431       case Stmt::SwitchStmtClass: {
432         SwitchNodeBuilder builder(Pred, B, cast<SwitchStmt>(Term)->getCond(),
433                                     this);
434 
435         SubEng.processSwitch(builder);
436         return;
437       }
438 
439       case Stmt::WhileStmtClass:
440         HandleBranch(cast<WhileStmt>(Term)->getCond(), Term, B, Pred);
441         return;
442     }
443   }
444 
445   assert (B->succ_size() == 1 &&
446           "Blocks with no terminator should have at most 1 successor.");
447 
448   generateNode(BlockEdge(B, *(B->succ_begin()), Pred->getLocationContext()),
449                Pred->State, Pred);
450 }
451 
452 void CoreEngine::HandleBranch(const Stmt *Cond, const Stmt *Term,
453                                 const CFGBlock * B, ExplodedNode *Pred) {
454   assert(B->succ_size() == 2);
455   NodeBuilderContext Ctx(*this, B, Pred);
456   ExplodedNodeSet Dst;
457   SubEng.processBranch(Cond, Term, Ctx, Pred, Dst,
458                        *(B->succ_begin()), *(B->succ_begin()+1));
459   // Enqueue the new frontier onto the worklist.
460   enqueue(Dst);
461 }
462 
463 void CoreEngine::HandlePostStmt(const CFGBlock *B, unsigned StmtIdx,
464                                   ExplodedNode *Pred) {
465   assert(B);
466   assert(!B->empty());
467 
468   if (StmtIdx == B->size())
469     HandleBlockExit(B, Pred);
470   else {
471     NodeBuilderContext Ctx(*this, B, Pred);
472     SubEng.processCFGElement((*B)[StmtIdx], Pred, StmtIdx, &Ctx);
473   }
474 }
475 
476 /// generateNode - Utility method to generate nodes, hook up successors,
477 ///  and add nodes to the worklist.
478 void CoreEngine::generateNode(const ProgramPoint &Loc,
479                               ProgramStateRef State,
480                               ExplodedNode *Pred) {
481 
482   bool IsNew;
483   ExplodedNode *Node = G->getNode(Loc, State, false, &IsNew);
484 
485   if (Pred)
486     Node->addPredecessor(Pred, *G);  // Link 'Node' with its predecessor.
487   else {
488     assert (IsNew);
489     G->addRoot(Node);  // 'Node' has no predecessor.  Make it a root.
490   }
491 
492   // Only add 'Node' to the worklist if it was freshly generated.
493   if (IsNew) WList->enqueue(Node);
494 }
495 
496 void CoreEngine::enqueueStmtNode(ExplodedNode *N,
497                                  const CFGBlock *Block, unsigned Idx) {
498   assert(Block);
499   assert (!N->isSink());
500 
501   // Check if this node entered a callee.
502   if (isa<CallEnter>(N->getLocation())) {
503     // Still use the index of the CallExpr. It's needed to create the callee
504     // StackFrameContext.
505     WList->enqueue(N, Block, Idx);
506     return;
507   }
508 
509   // Do not create extra nodes. Move to the next CFG element.
510   if (isa<PostInitializer>(N->getLocation())) {
511     WList->enqueue(N, Block, Idx+1);
512     return;
513   }
514 
515   if (isa<EpsilonPoint>(N->getLocation())) {
516     WList->enqueue(N, Block, Idx);
517     return;
518   }
519 
520   const CFGStmt *CS = (*Block)[Idx].getAs<CFGStmt>();
521   const Stmt *St = CS ? CS->getStmt() : 0;
522   PostStmt Loc(St, N->getLocationContext());
523 
524   if (Loc == N->getLocation()) {
525     // Note: 'N' should be a fresh node because otherwise it shouldn't be
526     // a member of Deferred.
527     WList->enqueue(N, Block, Idx+1);
528     return;
529   }
530 
531   bool IsNew;
532   ExplodedNode *Succ = G->getNode(Loc, N->getState(), false, &IsNew);
533   Succ->addPredecessor(N, *G);
534 
535   if (IsNew)
536     WList->enqueue(Succ, Block, Idx+1);
537 }
538 
539 ExplodedNode *CoreEngine::generateCallExitBeginNode(ExplodedNode *N) {
540   // Create a CallExitBegin node and enqueue it.
541   const StackFrameContext *LocCtx
542                          = cast<StackFrameContext>(N->getLocationContext());
543 
544   // Use the the callee location context.
545   CallExitBegin Loc(LocCtx);
546 
547   bool isNew;
548   ExplodedNode *Node = G->getNode(Loc, N->getState(), false, &isNew);
549   Node->addPredecessor(N, *G);
550   return isNew ? Node : 0;
551 }
552 
553 
554 void CoreEngine::enqueue(ExplodedNodeSet &Set) {
555   for (ExplodedNodeSet::iterator I = Set.begin(),
556                                  E = Set.end(); I != E; ++I) {
557     WList->enqueue(*I);
558   }
559 }
560 
561 void CoreEngine::enqueue(ExplodedNodeSet &Set,
562                          const CFGBlock *Block, unsigned Idx) {
563   for (ExplodedNodeSet::iterator I = Set.begin(),
564                                  E = Set.end(); I != E; ++I) {
565     enqueueStmtNode(*I, Block, Idx);
566   }
567 }
568 
569 void CoreEngine::enqueueEndOfFunction(ExplodedNodeSet &Set) {
570   for (ExplodedNodeSet::iterator I = Set.begin(), E = Set.end(); I != E; ++I) {
571     ExplodedNode *N = *I;
572     // If we are in an inlined call, generate CallExitBegin node.
573     if (N->getLocationContext()->getParent()) {
574       N = generateCallExitBeginNode(N);
575       if (N)
576         WList->enqueue(N);
577     } else {
578       // TODO: We should run remove dead bindings here.
579       G->addEndOfPath(N);
580       NumPathsExplored++;
581     }
582   }
583 }
584 
585 
586 void NodeBuilder::anchor() { }
587 
588 ExplodedNode* NodeBuilder::generateNodeImpl(const ProgramPoint &Loc,
589                                             ProgramStateRef State,
590                                             ExplodedNode *FromN,
591                                             bool MarkAsSink) {
592   HasGeneratedNodes = true;
593   bool IsNew;
594   ExplodedNode *N = C.Eng.G->getNode(Loc, State, MarkAsSink, &IsNew);
595   N->addPredecessor(FromN, *C.Eng.G);
596   Frontier.erase(FromN);
597 
598   if (!IsNew)
599     return 0;
600 
601   if (!MarkAsSink)
602     Frontier.Add(N);
603 
604   return N;
605 }
606 
607 void NodeBuilderWithSinks::anchor() { }
608 
609 StmtNodeBuilder::~StmtNodeBuilder() {
610   if (EnclosingBldr)
611     for (ExplodedNodeSet::iterator I = Frontier.begin(),
612                                    E = Frontier.end(); I != E; ++I )
613       EnclosingBldr->addNodes(*I);
614 }
615 
616 void BranchNodeBuilder::anchor() { }
617 
618 ExplodedNode *BranchNodeBuilder::generateNode(ProgramStateRef State,
619                                               bool branch,
620                                               ExplodedNode *NodePred) {
621   // If the branch has been marked infeasible we should not generate a node.
622   if (!isFeasible(branch))
623     return NULL;
624 
625   ProgramPoint Loc = BlockEdge(C.Block, branch ? DstT:DstF,
626                                NodePred->getLocationContext());
627   ExplodedNode *Succ = generateNodeImpl(Loc, State, NodePred);
628   return Succ;
629 }
630 
631 ExplodedNode*
632 IndirectGotoNodeBuilder::generateNode(const iterator &I,
633                                       ProgramStateRef St,
634                                       bool IsSink) {
635   bool IsNew;
636   ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(),
637                                       Pred->getLocationContext()), St,
638                                       IsSink, &IsNew);
639   Succ->addPredecessor(Pred, *Eng.G);
640 
641   if (!IsNew)
642     return 0;
643 
644   if (!IsSink)
645     Eng.WList->enqueue(Succ);
646 
647   return Succ;
648 }
649 
650 
651 ExplodedNode*
652 SwitchNodeBuilder::generateCaseStmtNode(const iterator &I,
653                                         ProgramStateRef St) {
654 
655   bool IsNew;
656   ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, I.getBlock(),
657                                       Pred->getLocationContext()), St,
658                                       false, &IsNew);
659   Succ->addPredecessor(Pred, *Eng.G);
660   if (!IsNew)
661     return 0;
662 
663   Eng.WList->enqueue(Succ);
664   return Succ;
665 }
666 
667 
668 ExplodedNode*
669 SwitchNodeBuilder::generateDefaultCaseNode(ProgramStateRef St,
670                                            bool IsSink) {
671   // Get the block for the default case.
672   assert(Src->succ_rbegin() != Src->succ_rend());
673   CFGBlock *DefaultBlock = *Src->succ_rbegin();
674 
675   // Sanity check for default blocks that are unreachable and not caught
676   // by earlier stages.
677   if (!DefaultBlock)
678     return NULL;
679 
680   bool IsNew;
681   ExplodedNode *Succ = Eng.G->getNode(BlockEdge(Src, DefaultBlock,
682                                       Pred->getLocationContext()), St,
683                                       IsSink, &IsNew);
684   Succ->addPredecessor(Pred, *Eng.G);
685 
686   if (!IsNew)
687     return 0;
688 
689   if (!IsSink)
690     Eng.WList->enqueue(Succ);
691 
692   return Succ;
693 }
694