1 //==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements uninitialized values analysis for source-level CFGs.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include <utility>
15 #include "llvm/ADT/Optional.h"
16 #include "llvm/ADT/SmallVector.h"
17 #include "llvm/ADT/PackedVector.h"
18 #include "llvm/ADT/DenseMap.h"
19 #include "clang/AST/Decl.h"
20 #include "clang/Analysis/CFG.h"
21 #include "clang/Analysis/AnalysisContext.h"
22 #include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
23 #include "clang/Analysis/Analyses/UninitializedValues.h"
24 #include "clang/Analysis/Support/SaveAndRestore.h"
25 
26 using namespace clang;
27 
28 static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
29   if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
30       !vd->isExceptionVariable() &&
31       vd->getDeclContext() == dc) {
32     QualType ty = vd->getType();
33     return ty->isScalarType() || ty->isVectorType();
34   }
35   return false;
36 }
37 
38 //------------------------------------------------------------------------====//
39 // DeclToIndex: a mapping from Decls we track to value indices.
40 //====------------------------------------------------------------------------//
41 
42 namespace {
43 class DeclToIndex {
44   llvm::DenseMap<const VarDecl *, unsigned> map;
45 public:
46   DeclToIndex() {}
47 
48   /// Compute the actual mapping from declarations to bits.
49   void computeMap(const DeclContext &dc);
50 
51   /// Return the number of declarations in the map.
52   unsigned size() const { return map.size(); }
53 
54   /// Returns the bit vector index for a given declaration.
55   llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
56 };
57 }
58 
59 void DeclToIndex::computeMap(const DeclContext &dc) {
60   unsigned count = 0;
61   DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
62                                                E(dc.decls_end());
63   for ( ; I != E; ++I) {
64     const VarDecl *vd = *I;
65     if (isTrackedVar(vd, &dc))
66       map[vd] = count++;
67   }
68 }
69 
70 llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
71   llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
72   if (I == map.end())
73     return llvm::Optional<unsigned>();
74   return I->second;
75 }
76 
77 //------------------------------------------------------------------------====//
78 // CFGBlockValues: dataflow values for CFG blocks.
79 //====------------------------------------------------------------------------//
80 
81 // These values are defined in such a way that a merge can be done using
82 // a bitwise OR.
83 enum Value { Unknown = 0x0,         /* 00 */
84              Initialized = 0x1,     /* 01 */
85              Uninitialized = 0x2,   /* 10 */
86              MayUninitialized = 0x3 /* 11 */ };
87 
88 static bool isUninitialized(const Value v) {
89   return v >= Uninitialized;
90 }
91 static bool isAlwaysUninit(const Value v) {
92   return v == Uninitialized;
93 }
94 
95 namespace {
96 
97 typedef llvm::PackedVector<Value, 2> ValueVector;
98 typedef std::pair<ValueVector *, ValueVector *> BVPair;
99 
100 class CFGBlockValues {
101   const CFG &cfg;
102   BVPair *vals;
103   ValueVector scratch;
104   DeclToIndex declToIndex;
105 
106   ValueVector &lazyCreate(ValueVector *&bv);
107 public:
108   CFGBlockValues(const CFG &cfg);
109   ~CFGBlockValues();
110 
111   unsigned getNumEntries() const { return declToIndex.size(); }
112 
113   void computeSetOfDeclarations(const DeclContext &dc);
114   ValueVector &getValueVector(const CFGBlock *block,
115                                 const CFGBlock *dstBlock);
116 
117   BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate);
118 
119   void mergeIntoScratch(ValueVector const &source, bool isFirst);
120   bool updateValueVectorWithScratch(const CFGBlock *block);
121   bool updateValueVectors(const CFGBlock *block, const BVPair &newVals);
122 
123   bool hasNoDeclarations() const {
124     return declToIndex.size() == 0;
125   }
126 
127   void resetScratch();
128   ValueVector &getScratch() { return scratch; }
129 
130   ValueVector::reference operator[](const VarDecl *vd);
131 };
132 } // end anonymous namespace
133 
134 CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {
135   unsigned n = cfg.getNumBlockIDs();
136   if (!n)
137     return;
138   vals = new std::pair<ValueVector*, ValueVector*>[n];
139   memset((void*)vals, 0, sizeof(*vals) * n);
140 }
141 
142 CFGBlockValues::~CFGBlockValues() {
143   unsigned n = cfg.getNumBlockIDs();
144   if (n == 0)
145     return;
146   for (unsigned i = 0; i < n; ++i) {
147     delete vals[i].first;
148     delete vals[i].second;
149   }
150   delete [] vals;
151 }
152 
153 void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
154   declToIndex.computeMap(dc);
155   scratch.resize(declToIndex.size());
156 }
157 
158 ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) {
159   if (!bv)
160     bv = new ValueVector(declToIndex.size());
161   return *bv;
162 }
163 
164 /// This function pattern matches for a '&&' or '||' that appears at
165 /// the beginning of a CFGBlock that also (1) has a terminator and
166 /// (2) has no other elements.  If such an expression is found, it is returned.
167 static const BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) {
168   if (block->empty())
169     return 0;
170 
171   const CFGStmt *cstmt = block->front().getAs<CFGStmt>();
172   if (!cstmt)
173     return 0;
174 
175   const BinaryOperator *b = dyn_cast_or_null<BinaryOperator>(cstmt->getStmt());
176 
177   if (!b || !b->isLogicalOp())
178     return 0;
179 
180   if (block->pred_size() == 2) {
181     if (block->getTerminatorCondition() == b) {
182       if (block->succ_size() == 2)
183       return b;
184     }
185     else if (block->size() == 1)
186       return b;
187   }
188 
189   return 0;
190 }
191 
192 ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block,
193                                             const CFGBlock *dstBlock) {
194   unsigned idx = block->getBlockID();
195   if (dstBlock && getLogicalOperatorInChain(block)) {
196     if (*block->succ_begin() == dstBlock)
197       return lazyCreate(vals[idx].first);
198     assert(*(block->succ_begin()+1) == dstBlock);
199     return lazyCreate(vals[idx].second);
200   }
201 
202   assert(vals[idx].second == 0);
203   return lazyCreate(vals[idx].first);
204 }
205 
206 BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block,
207                                         bool shouldLazyCreate) {
208   unsigned idx = block->getBlockID();
209   lazyCreate(vals[idx].first);
210   if (shouldLazyCreate)
211     lazyCreate(vals[idx].second);
212   return vals[idx];
213 }
214 
215 void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
216                                       bool isFirst) {
217   if (isFirst)
218     scratch = source;
219   else
220     scratch |= source;
221 }
222 #if 0
223 static void printVector(const CFGBlock *block, ValueVector &bv,
224                         unsigned num) {
225 
226   llvm::errs() << block->getBlockID() << " :";
227   for (unsigned i = 0; i < bv.size(); ++i) {
228     llvm::errs() << ' ' << bv[i];
229   }
230   llvm::errs() << " : " << num << '\n';
231 }
232 #endif
233 
234 bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
235   ValueVector &dst = getValueVector(block, 0);
236   bool changed = (dst != scratch);
237   if (changed)
238     dst = scratch;
239 #if 0
240   printVector(block, scratch, 0);
241 #endif
242   return changed;
243 }
244 
245 bool CFGBlockValues::updateValueVectors(const CFGBlock *block,
246                                       const BVPair &newVals) {
247   BVPair &vals = getValueVectors(block, true);
248   bool changed = *newVals.first != *vals.first ||
249                  *newVals.second != *vals.second;
250   *vals.first = *newVals.first;
251   *vals.second = *newVals.second;
252 #if 0
253   printVector(block, *vals.first, 1);
254   printVector(block, *vals.second, 2);
255 #endif
256   return changed;
257 }
258 
259 void CFGBlockValues::resetScratch() {
260   scratch.reset();
261 }
262 
263 ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
264   const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
265   assert(idx.hasValue());
266   return scratch[idx.getValue()];
267 }
268 
269 //------------------------------------------------------------------------====//
270 // Worklist: worklist for dataflow analysis.
271 //====------------------------------------------------------------------------//
272 
273 namespace {
274 class DataflowWorklist {
275   SmallVector<const CFGBlock *, 20> worklist;
276   llvm::BitVector enqueuedBlocks;
277 public:
278   DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
279 
280   void enqueueSuccessors(const CFGBlock *block);
281   const CFGBlock *dequeue();
282 };
283 }
284 
285 void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
286   unsigned OldWorklistSize = worklist.size();
287   for (CFGBlock::const_succ_iterator I = block->succ_begin(),
288        E = block->succ_end(); I != E; ++I) {
289     const CFGBlock *Successor = *I;
290     if (!Successor || enqueuedBlocks[Successor->getBlockID()])
291       continue;
292     worklist.push_back(Successor);
293     enqueuedBlocks[Successor->getBlockID()] = true;
294   }
295   if (OldWorklistSize == 0 || OldWorklistSize == worklist.size())
296     return;
297 
298   // Rotate the newly added blocks to the start of the worklist so that it forms
299   // a proper queue when we pop off the end of the worklist.
300   std::rotate(worklist.begin(), worklist.begin() + OldWorklistSize,
301               worklist.end());
302 }
303 
304 const CFGBlock *DataflowWorklist::dequeue() {
305   if (worklist.empty())
306     return 0;
307   const CFGBlock *b = worklist.back();
308   worklist.pop_back();
309   enqueuedBlocks[b->getBlockID()] = false;
310   return b;
311 }
312 
313 //------------------------------------------------------------------------====//
314 // Transfer function for uninitialized values analysis.
315 //====------------------------------------------------------------------------//
316 
317 namespace {
318 class FindVarResult {
319   const VarDecl *vd;
320   const DeclRefExpr *dr;
321 public:
322   FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {}
323 
324   const DeclRefExpr *getDeclRefExpr() const { return dr; }
325   const VarDecl *getDecl() const { return vd; }
326 };
327 
328 class TransferFunctions : public StmtVisitor<TransferFunctions> {
329   CFGBlockValues &vals;
330   const CFG &cfg;
331   AnalysisContext &ac;
332   UninitVariablesHandler *handler;
333 
334   /// The last DeclRefExpr seen when analyzing a block.  Used to
335   /// cheat when detecting cases when the address of a variable is taken.
336   DeclRefExpr *lastDR;
337 
338   /// The last lvalue-to-rvalue conversion of a variable whose value
339   /// was uninitialized.  Normally this results in a warning, but it is
340   /// possible to either silence the warning in some cases, or we
341   /// propagate the uninitialized value.
342   CastExpr *lastLoad;
343 
344   /// For some expressions, we want to ignore any post-processing after
345   /// visitation.
346   bool skipProcessUses;
347 
348 public:
349   TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
350                     AnalysisContext &ac,
351                     UninitVariablesHandler *handler)
352     : vals(vals), cfg(cfg), ac(ac), handler(handler),
353       lastDR(0), lastLoad(0),
354       skipProcessUses(false) {}
355 
356   void reportUninit(const DeclRefExpr *ex, const VarDecl *vd,
357                     bool isAlwaysUninit);
358 
359   void VisitBlockExpr(BlockExpr *be);
360   void VisitDeclStmt(DeclStmt *ds);
361   void VisitDeclRefExpr(DeclRefExpr *dr);
362   void VisitUnaryOperator(UnaryOperator *uo);
363   void VisitBinaryOperator(BinaryOperator *bo);
364   void VisitCastExpr(CastExpr *ce);
365   void VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs);
366   void Visit(Stmt *s);
367 
368   bool isTrackedVar(const VarDecl *vd) {
369     return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
370   }
371 
372   FindVarResult findBlockVarDecl(Expr *ex);
373 
374   void ProcessUses(Stmt *s = 0);
375 };
376 }
377 
378 static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
379   while (Ex) {
380     Ex = Ex->IgnoreParenNoopCasts(C);
381     if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
382       if (CE->getCastKind() == CK_LValueBitCast) {
383         Ex = CE->getSubExpr();
384         continue;
385       }
386     }
387     break;
388   }
389   return Ex;
390 }
391 
392 void TransferFunctions::reportUninit(const DeclRefExpr *ex,
393                                      const VarDecl *vd, bool isAlwaysUnit) {
394   if (handler) handler->handleUseOfUninitVariable(ex, vd, isAlwaysUnit);
395 }
396 
397 FindVarResult TransferFunctions::findBlockVarDecl(Expr *ex) {
398   if (DeclRefExpr *dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts()))
399     if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
400       if (isTrackedVar(vd))
401         return FindVarResult(vd, dr);
402   return FindVarResult(0, 0);
403 }
404 
405 void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs) {
406   // This represents an initialization of the 'element' value.
407   Stmt *element = fs->getElement();
408   const VarDecl *vd = 0;
409 
410   if (DeclStmt *ds = dyn_cast<DeclStmt>(element)) {
411     vd = cast<VarDecl>(ds->getSingleDecl());
412     if (!isTrackedVar(vd))
413       vd = 0;
414   } else {
415     // Initialize the value of the reference variable.
416     const FindVarResult &res = findBlockVarDecl(cast<Expr>(element));
417     vd = res.getDecl();
418   }
419 
420   if (vd)
421     vals[vd] = Initialized;
422 }
423 
424 void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
425   const BlockDecl *bd = be->getBlockDecl();
426   for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
427         e = bd->capture_end() ; i != e; ++i) {
428     const VarDecl *vd = i->getVariable();
429     if (!isTrackedVar(vd))
430       continue;
431     if (i->isByRef()) {
432       vals[vd] = Initialized;
433       continue;
434     }
435     Value v = vals[vd];
436     if (handler && isUninitialized(v))
437       handler->handleUseOfUninitVariable(be, vd, isAlwaysUninit(v));
438   }
439 }
440 
441 void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
442   // Record the last DeclRefExpr seen.  This is an lvalue computation.
443   // We use this value to later detect if a variable "escapes" the analysis.
444   if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
445     if (isTrackedVar(vd)) {
446       ProcessUses();
447       lastDR = dr;
448     }
449 }
450 
451 void TransferFunctions::VisitDeclStmt(DeclStmt *ds) {
452   for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end();
453        DI != DE; ++DI) {
454     if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) {
455       if (isTrackedVar(vd)) {
456         if (Expr *init = vd->getInit()) {
457           // If the initializer consists solely of a reference to itself, we
458           // explicitly mark the variable as uninitialized. This allows code
459           // like the following:
460           //
461           //   int x = x;
462           //
463           // to deliberately leave a variable uninitialized. Different analysis
464           // clients can detect this pattern and adjust their reporting
465           // appropriately, but we need to continue to analyze subsequent uses
466           // of the variable.
467           if (init == lastLoad) {
468             const DeclRefExpr *DR
469               = cast<DeclRefExpr>(stripCasts(ac.getASTContext(),
470                                              lastLoad->getSubExpr()));
471             if (DR->getDecl() == vd) {
472               // int x = x;
473               // Propagate uninitialized value, but don't immediately report
474               // a problem.
475               vals[vd] = Uninitialized;
476               lastLoad = 0;
477               lastDR = 0;
478               return;
479             }
480           }
481 
482           // All other cases: treat the new variable as initialized.
483           vals[vd] = Initialized;
484         }
485       }
486     }
487   }
488 }
489 
490 void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) {
491   if (bo->isAssignmentOp()) {
492     const FindVarResult &res = findBlockVarDecl(bo->getLHS());
493     if (const VarDecl *vd = res.getDecl()) {
494       ValueVector::reference val = vals[vd];
495       if (isUninitialized(val)) {
496         if (bo->getOpcode() != BO_Assign)
497           reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
498         else
499           val = Initialized;
500       }
501     }
502   }
503 }
504 
505 void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) {
506   switch (uo->getOpcode()) {
507     case clang::UO_PostDec:
508     case clang::UO_PostInc:
509     case clang::UO_PreDec:
510     case clang::UO_PreInc: {
511       const FindVarResult &res = findBlockVarDecl(uo->getSubExpr());
512       if (const VarDecl *vd = res.getDecl()) {
513         assert(res.getDeclRefExpr() == lastDR);
514         // We null out lastDR to indicate we have fully processed it
515         // and we don't want the auto-value setting in Visit().
516         lastDR = 0;
517 
518         ValueVector::reference val = vals[vd];
519         if (isUninitialized(val))
520           reportUninit(res.getDeclRefExpr(), vd, isAlwaysUninit(val));
521       }
522       break;
523     }
524     default:
525       break;
526   }
527 }
528 
529 void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) {
530   if (ce->getCastKind() == CK_LValueToRValue) {
531     const FindVarResult &res = findBlockVarDecl(ce->getSubExpr());
532     if (const VarDecl *vd = res.getDecl()) {
533       assert(res.getDeclRefExpr() == lastDR);
534       if (isUninitialized(vals[vd])) {
535         // Record this load of an uninitialized value.  Normally this
536         // results in a warning, but we delay reporting the issue
537         // in case it is wrapped in a void cast, etc.
538         lastLoad = ce;
539       }
540     }
541   }
542   else if (ce->getCastKind() == CK_NoOp ||
543            ce->getCastKind() == CK_LValueBitCast) {
544     skipProcessUses = true;
545   }
546   else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) {
547     if (cse->getType()->isVoidType()) {
548       // e.g. (void) x;
549       if (lastLoad == cse->getSubExpr()) {
550         // Squelch any detected load of an uninitialized value if
551         // we cast it to void.
552         lastLoad = 0;
553         lastDR = 0;
554       }
555     }
556   }
557 }
558 
559 void TransferFunctions::Visit(clang::Stmt *s) {
560   skipProcessUses = false;
561   StmtVisitor<TransferFunctions>::Visit(s);
562   if (!skipProcessUses)
563     ProcessUses(s);
564 }
565 
566 void TransferFunctions::ProcessUses(Stmt *s) {
567   // This method is typically called after visiting a CFGElement statement
568   // in the CFG.  We delay processing of reporting many loads of uninitialized
569   // values until here.
570   if (lastLoad) {
571     // If we just visited the lvalue-to-rvalue cast, there is nothing
572     // left to do.
573     if (lastLoad == s)
574       return;
575 
576     // If we reach here, we have seen a load of an uninitialized value
577     // and it hasn't been casted to void or otherwise handled.  In this
578     // situation, report the incident.
579     const DeclRefExpr *DR =
580       cast<DeclRefExpr>(stripCasts(ac.getASTContext(),
581                                    lastLoad->getSubExpr()));
582     const VarDecl *VD = cast<VarDecl>(DR->getDecl());
583     reportUninit(DR, VD, isAlwaysUninit(vals[VD]));
584     lastLoad = 0;
585 
586     if (DR == lastDR) {
587       lastDR = 0;
588       return;
589     }
590   }
591 
592   // Any other uses of 'lastDR' involve taking an lvalue of variable.
593   // In this case, it "escapes" the analysis.
594   if (lastDR && lastDR != s) {
595     vals[cast<VarDecl>(lastDR->getDecl())] = Initialized;
596     lastDR = 0;
597   }
598 }
599 
600 //------------------------------------------------------------------------====//
601 // High-level "driver" logic for uninitialized values analysis.
602 //====------------------------------------------------------------------------//
603 
604 static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
605                        AnalysisContext &ac, CFGBlockValues &vals,
606                        llvm::BitVector &wasAnalyzed,
607                        UninitVariablesHandler *handler = 0) {
608 
609   wasAnalyzed[block->getBlockID()] = true;
610 
611   if (const BinaryOperator *b = getLogicalOperatorInChain(block)) {
612     CFGBlock::const_pred_iterator itr = block->pred_begin();
613     BVPair vA = vals.getValueVectors(*itr, false);
614     ++itr;
615     BVPair vB = vals.getValueVectors(*itr, false);
616 
617     BVPair valsAB;
618 
619     if (b->getOpcode() == BO_LAnd) {
620       // Merge the 'F' bits from the first and second.
621       vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true);
622       vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false);
623       valsAB.first = vA.first;
624       valsAB.second = &vals.getScratch();
625     } else {
626       // Merge the 'T' bits from the first and second.
627       assert(b->getOpcode() == BO_LOr);
628       vals.mergeIntoScratch(*vA.first, true);
629       vals.mergeIntoScratch(*vB.first, false);
630       valsAB.first = &vals.getScratch();
631       valsAB.second = vA.second ? vA.second : vA.first;
632     }
633     return vals.updateValueVectors(block, valsAB);
634   }
635 
636   // Default behavior: merge in values of predecessor blocks.
637   vals.resetScratch();
638   bool isFirst = true;
639   for (CFGBlock::const_pred_iterator I = block->pred_begin(),
640        E = block->pred_end(); I != E; ++I) {
641     const CFGBlock *pred = *I;
642     if (wasAnalyzed[pred->getBlockID()]) {
643       vals.mergeIntoScratch(vals.getValueVector(pred, block), isFirst);
644       isFirst = false;
645     }
646   }
647   // Apply the transfer function.
648   TransferFunctions tf(vals, cfg, ac, handler);
649   for (CFGBlock::const_iterator I = block->begin(), E = block->end();
650        I != E; ++I) {
651     if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
652       tf.Visit(const_cast<Stmt*>(cs->getStmt()));
653     }
654   }
655   tf.ProcessUses();
656   return vals.updateValueVectorWithScratch(block);
657 }
658 
659 void clang::runUninitializedVariablesAnalysis(
660     const DeclContext &dc,
661     const CFG &cfg,
662     AnalysisContext &ac,
663     UninitVariablesHandler &handler,
664     UninitVariablesAnalysisStats &stats) {
665   CFGBlockValues vals(cfg);
666   vals.computeSetOfDeclarations(dc);
667   if (vals.hasNoDeclarations())
668     return;
669 
670   stats.NumVariablesAnalyzed = vals.getNumEntries();
671 
672   // Mark all variables uninitialized at the entry.
673   const CFGBlock &entry = cfg.getEntry();
674   for (CFGBlock::const_succ_iterator i = entry.succ_begin(),
675         e = entry.succ_end(); i != e; ++i) {
676     if (const CFGBlock *succ = *i) {
677       ValueVector &vec = vals.getValueVector(&entry, succ);
678       const unsigned n = vals.getNumEntries();
679       for (unsigned j = 0; j < n ; ++j) {
680         vec[j] = Uninitialized;
681       }
682     }
683   }
684 
685   // Proceed with the workist.
686   DataflowWorklist worklist(cfg);
687   llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
688   worklist.enqueueSuccessors(&cfg.getEntry());
689   llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
690   wasAnalyzed[cfg.getEntry().getBlockID()] = true;
691 
692   while (const CFGBlock *block = worklist.dequeue()) {
693     // Did the block change?
694     bool changed = runOnBlock(block, cfg, ac, vals, wasAnalyzed);
695     ++stats.NumBlockVisits;
696     if (changed || !previouslyVisited[block->getBlockID()])
697       worklist.enqueueSuccessors(block);
698     previouslyVisited[block->getBlockID()] = true;
699   }
700 
701   // Run through the blocks one more time, and report uninitialized variabes.
702   for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
703     const CFGBlock *block = *BI;
704     if (wasAnalyzed[block->getBlockID()]) {
705       runOnBlock(block, cfg, ac, vals, wasAnalyzed, &handler);
706       ++stats.NumBlockVisits;
707     }
708   }
709 }
710 
711 UninitVariablesHandler::~UninitVariablesHandler() {}
712