1 //==- UninitializedValues.cpp - Find Uninitialized Values -------*- C++ --*-==//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements uninitialized values analysis for source-level CFGs.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include <utility>
15 #include "llvm/ADT/Optional.h"
16 #include "llvm/ADT/SmallVector.h"
17 #include "llvm/ADT/PackedVector.h"
18 #include "llvm/ADT/DenseMap.h"
19 #include "clang/AST/Decl.h"
20 #include "clang/Analysis/CFG.h"
21 #include "clang/Analysis/AnalysisContext.h"
22 #include "clang/Analysis/Visitors/CFGRecStmtDeclVisitor.h"
23 #include "clang/Analysis/Analyses/UninitializedValues.h"
24 #include "llvm/Support/SaveAndRestore.h"
25 
26 using namespace clang;
27 
28 static bool isTrackedVar(const VarDecl *vd, const DeclContext *dc) {
29   if (vd->isLocalVarDecl() && !vd->hasGlobalStorage() &&
30       !vd->isExceptionVariable() &&
31       vd->getDeclContext() == dc) {
32     QualType ty = vd->getType();
33     return ty->isScalarType() || ty->isVectorType();
34   }
35   return false;
36 }
37 
38 //------------------------------------------------------------------------====//
39 // DeclToIndex: a mapping from Decls we track to value indices.
40 //====------------------------------------------------------------------------//
41 
42 namespace {
43 class DeclToIndex {
44   llvm::DenseMap<const VarDecl *, unsigned> map;
45 public:
46   DeclToIndex() {}
47 
48   /// Compute the actual mapping from declarations to bits.
49   void computeMap(const DeclContext &dc);
50 
51   /// Return the number of declarations in the map.
52   unsigned size() const { return map.size(); }
53 
54   /// Returns the bit vector index for a given declaration.
55   llvm::Optional<unsigned> getValueIndex(const VarDecl *d) const;
56 };
57 }
58 
59 void DeclToIndex::computeMap(const DeclContext &dc) {
60   unsigned count = 0;
61   DeclContext::specific_decl_iterator<VarDecl> I(dc.decls_begin()),
62                                                E(dc.decls_end());
63   for ( ; I != E; ++I) {
64     const VarDecl *vd = *I;
65     if (isTrackedVar(vd, &dc))
66       map[vd] = count++;
67   }
68 }
69 
70 llvm::Optional<unsigned> DeclToIndex::getValueIndex(const VarDecl *d) const {
71   llvm::DenseMap<const VarDecl *, unsigned>::const_iterator I = map.find(d);
72   if (I == map.end())
73     return llvm::Optional<unsigned>();
74   return I->second;
75 }
76 
77 //------------------------------------------------------------------------====//
78 // CFGBlockValues: dataflow values for CFG blocks.
79 //====------------------------------------------------------------------------//
80 
81 // These values are defined in such a way that a merge can be done using
82 // a bitwise OR.
83 enum Value { Unknown = 0x0,         /* 00 */
84              Initialized = 0x1,     /* 01 */
85              Uninitialized = 0x2,   /* 10 */
86              MayUninitialized = 0x3 /* 11 */ };
87 
88 static bool isUninitialized(const Value v) {
89   return v >= Uninitialized;
90 }
91 static bool isAlwaysUninit(const Value v) {
92   return v == Uninitialized;
93 }
94 
95 namespace {
96 
97 typedef llvm::PackedVector<Value, 2> ValueVector;
98 typedef std::pair<ValueVector *, ValueVector *> BVPair;
99 
100 class CFGBlockValues {
101   const CFG &cfg;
102   BVPair *vals;
103   ValueVector scratch;
104   DeclToIndex declToIndex;
105 
106   ValueVector &lazyCreate(ValueVector *&bv);
107 public:
108   CFGBlockValues(const CFG &cfg);
109   ~CFGBlockValues();
110 
111   unsigned getNumEntries() const { return declToIndex.size(); }
112 
113   void computeSetOfDeclarations(const DeclContext &dc);
114   ValueVector &getValueVector(const CFGBlock *block,
115                               const CFGBlock *dstBlock);
116 
117   BVPair &getValueVectors(const CFGBlock *block, bool shouldLazyCreate);
118 
119   void mergeIntoScratch(ValueVector const &source, bool isFirst);
120   bool updateValueVectorWithScratch(const CFGBlock *block);
121   bool updateValueVectors(const CFGBlock *block, const BVPair &newVals);
122 
123   bool hasNoDeclarations() const {
124     return declToIndex.size() == 0;
125   }
126 
127   void resetScratch();
128   ValueVector &getScratch() { return scratch; }
129 
130   ValueVector::reference operator[](const VarDecl *vd);
131 
132   Value getValue(const CFGBlock *block, const CFGBlock *dstBlock,
133                  const VarDecl *vd) {
134     const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
135     assert(idx.hasValue());
136     return getValueVector(block, dstBlock)[idx.getValue()];
137   }
138 };
139 } // end anonymous namespace
140 
141 CFGBlockValues::CFGBlockValues(const CFG &c) : cfg(c), vals(0) {
142   unsigned n = cfg.getNumBlockIDs();
143   if (!n)
144     return;
145   vals = new std::pair<ValueVector*, ValueVector*>[n];
146   memset((void*)vals, 0, sizeof(*vals) * n);
147 }
148 
149 CFGBlockValues::~CFGBlockValues() {
150   unsigned n = cfg.getNumBlockIDs();
151   if (n == 0)
152     return;
153   for (unsigned i = 0; i < n; ++i) {
154     delete vals[i].first;
155     delete vals[i].second;
156   }
157   delete [] vals;
158 }
159 
160 void CFGBlockValues::computeSetOfDeclarations(const DeclContext &dc) {
161   declToIndex.computeMap(dc);
162   scratch.resize(declToIndex.size());
163 }
164 
165 ValueVector &CFGBlockValues::lazyCreate(ValueVector *&bv) {
166   if (!bv)
167     bv = new ValueVector(declToIndex.size());
168   return *bv;
169 }
170 
171 /// This function pattern matches for a '&&' or '||' that appears at
172 /// the beginning of a CFGBlock that also (1) has a terminator and
173 /// (2) has no other elements.  If such an expression is found, it is returned.
174 static const BinaryOperator *getLogicalOperatorInChain(const CFGBlock *block) {
175   if (block->empty())
176     return 0;
177 
178   CFGElement front = block->front();
179   const CFGStmt *cstmt = front.getAs<CFGStmt>();
180   if (!cstmt)
181     return 0;
182 
183   const BinaryOperator *b = dyn_cast_or_null<BinaryOperator>(cstmt->getStmt());
184 
185   if (!b || !b->isLogicalOp())
186     return 0;
187 
188   if (block->pred_size() == 2) {
189     if (block->getTerminatorCondition() == b) {
190       if (block->succ_size() == 2)
191       return b;
192     }
193     else if (block->size() == 1)
194       return b;
195   }
196 
197   return 0;
198 }
199 
200 ValueVector &CFGBlockValues::getValueVector(const CFGBlock *block,
201                                             const CFGBlock *dstBlock) {
202   unsigned idx = block->getBlockID();
203   if (dstBlock && getLogicalOperatorInChain(block)) {
204     if (*block->succ_begin() == dstBlock)
205       return lazyCreate(vals[idx].first);
206     assert(*(block->succ_begin()+1) == dstBlock);
207     return lazyCreate(vals[idx].second);
208   }
209 
210   assert(vals[idx].second == 0);
211   return lazyCreate(vals[idx].first);
212 }
213 
214 BVPair &CFGBlockValues::getValueVectors(const clang::CFGBlock *block,
215                                         bool shouldLazyCreate) {
216   unsigned idx = block->getBlockID();
217   lazyCreate(vals[idx].first);
218   if (shouldLazyCreate)
219     lazyCreate(vals[idx].second);
220   return vals[idx];
221 }
222 
223 #if 0
224 static void printVector(const CFGBlock *block, ValueVector &bv,
225                         unsigned num) {
226 
227   llvm::errs() << block->getBlockID() << " :";
228   for (unsigned i = 0; i < bv.size(); ++i) {
229     llvm::errs() << ' ' << bv[i];
230   }
231   llvm::errs() << " : " << num << '\n';
232 }
233 
234 static void printVector(const char *name, ValueVector const &bv) {
235   llvm::errs() << name << " : ";
236   for (unsigned i = 0; i < bv.size(); ++i) {
237     llvm::errs() << ' ' << bv[i];
238   }
239   llvm::errs() << "\n";
240 }
241 #endif
242 
243 void CFGBlockValues::mergeIntoScratch(ValueVector const &source,
244                                       bool isFirst) {
245   if (isFirst)
246     scratch = source;
247   else
248     scratch |= source;
249 }
250 
251 bool CFGBlockValues::updateValueVectorWithScratch(const CFGBlock *block) {
252   ValueVector &dst = getValueVector(block, 0);
253   bool changed = (dst != scratch);
254   if (changed)
255     dst = scratch;
256 #if 0
257   printVector(block, scratch, 0);
258 #endif
259   return changed;
260 }
261 
262 bool CFGBlockValues::updateValueVectors(const CFGBlock *block,
263                                       const BVPair &newVals) {
264   BVPair &vals = getValueVectors(block, true);
265   bool changed = *newVals.first != *vals.first ||
266                  *newVals.second != *vals.second;
267   *vals.first = *newVals.first;
268   *vals.second = *newVals.second;
269 #if 0
270   printVector(block, *vals.first, 1);
271   printVector(block, *vals.second, 2);
272 #endif
273   return changed;
274 }
275 
276 void CFGBlockValues::resetScratch() {
277   scratch.reset();
278 }
279 
280 ValueVector::reference CFGBlockValues::operator[](const VarDecl *vd) {
281   const llvm::Optional<unsigned> &idx = declToIndex.getValueIndex(vd);
282   assert(idx.hasValue());
283   return scratch[idx.getValue()];
284 }
285 
286 //------------------------------------------------------------------------====//
287 // Worklist: worklist for dataflow analysis.
288 //====------------------------------------------------------------------------//
289 
290 namespace {
291 class DataflowWorklist {
292   SmallVector<const CFGBlock *, 20> worklist;
293   llvm::BitVector enqueuedBlocks;
294 public:
295   DataflowWorklist(const CFG &cfg) : enqueuedBlocks(cfg.getNumBlockIDs()) {}
296 
297   void enqueueSuccessors(const CFGBlock *block);
298   const CFGBlock *dequeue();
299 };
300 }
301 
302 void DataflowWorklist::enqueueSuccessors(const clang::CFGBlock *block) {
303   unsigned OldWorklistSize = worklist.size();
304   for (CFGBlock::const_succ_iterator I = block->succ_begin(),
305        E = block->succ_end(); I != E; ++I) {
306     const CFGBlock *Successor = *I;
307     if (!Successor || enqueuedBlocks[Successor->getBlockID()])
308       continue;
309     worklist.push_back(Successor);
310     enqueuedBlocks[Successor->getBlockID()] = true;
311   }
312   if (OldWorklistSize == 0 || OldWorklistSize == worklist.size())
313     return;
314 
315   // Rotate the newly added blocks to the start of the worklist so that it forms
316   // a proper queue when we pop off the end of the worklist.
317   std::rotate(worklist.begin(), worklist.begin() + OldWorklistSize,
318               worklist.end());
319 }
320 
321 const CFGBlock *DataflowWorklist::dequeue() {
322   if (worklist.empty())
323     return 0;
324   const CFGBlock *b = worklist.back();
325   worklist.pop_back();
326   enqueuedBlocks[b->getBlockID()] = false;
327   return b;
328 }
329 
330 //------------------------------------------------------------------------====//
331 // Transfer function for uninitialized values analysis.
332 //====------------------------------------------------------------------------//
333 
334 namespace {
335 class FindVarResult {
336   const VarDecl *vd;
337   const DeclRefExpr *dr;
338 public:
339   FindVarResult(VarDecl *vd, DeclRefExpr *dr) : vd(vd), dr(dr) {}
340 
341   const DeclRefExpr *getDeclRefExpr() const { return dr; }
342   const VarDecl *getDecl() const { return vd; }
343 };
344 
345 class TransferFunctions : public StmtVisitor<TransferFunctions> {
346   CFGBlockValues &vals;
347   const CFG &cfg;
348   const CFGBlock *block;
349   AnalysisDeclContext &ac;
350   UninitVariablesHandler *handler;
351 
352   /// The last DeclRefExpr seen when analyzing a block.  Used to
353   /// cheat when detecting cases when the address of a variable is taken.
354   DeclRefExpr *lastDR;
355 
356   /// The last lvalue-to-rvalue conversion of a variable whose value
357   /// was uninitialized.  Normally this results in a warning, but it is
358   /// possible to either silence the warning in some cases, or we
359   /// propagate the uninitialized value.
360   CastExpr *lastLoad;
361 
362   /// For some expressions, we want to ignore any post-processing after
363   /// visitation.
364   bool skipProcessUses;
365 
366 public:
367   TransferFunctions(CFGBlockValues &vals, const CFG &cfg,
368                     const CFGBlock *block, AnalysisDeclContext &ac,
369                     UninitVariablesHandler *handler)
370     : vals(vals), cfg(cfg), block(block), ac(ac), handler(handler),
371       lastDR(0), lastLoad(0),
372       skipProcessUses(false) {}
373 
374   void reportUse(const Expr *ex, const VarDecl *vd);
375 
376   void VisitBlockExpr(BlockExpr *be);
377   void VisitDeclStmt(DeclStmt *ds);
378   void VisitDeclRefExpr(DeclRefExpr *dr);
379   void VisitUnaryOperator(UnaryOperator *uo);
380   void VisitBinaryOperator(BinaryOperator *bo);
381   void VisitCastExpr(CastExpr *ce);
382   void VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs);
383   void Visit(Stmt *s);
384 
385   bool isTrackedVar(const VarDecl *vd) {
386     return ::isTrackedVar(vd, cast<DeclContext>(ac.getDecl()));
387   }
388 
389   UninitUse getUninitUse(const Expr *ex, const VarDecl *vd, Value v) {
390     UninitUse Use(ex, isAlwaysUninit(v));
391 
392     assert(isUninitialized(v));
393     if (Use.getKind() == UninitUse::Always)
394       return Use;
395 
396     // If an edge which leads unconditionally to this use did not initialize
397     // the variable, we can say something stronger than 'may be uninitialized':
398     // we can say 'either it's used uninitialized or you have dead code'.
399     //
400     // We track the number of successors of a node which have been visited, and
401     // visit a node once we have visited all of its successors. Only edges where
402     // the variable might still be uninitialized are followed. Since a variable
403     // can't transfer from being initialized to being uninitialized, this will
404     // trace out the subgraph which inevitably leads to the use and does not
405     // initialize the variable. We do not want to skip past loops, since their
406     // non-termination might be correlated with the initialization condition.
407     //
408     // For example:
409     //
410     //         void f(bool a, bool b) {
411     // block1:   int n;
412     //           if (a) {
413     // block2:     if (b)
414     // block3:       n = 1;
415     // block4:   } else if (b) {
416     // block5:     while (!a) {
417     // block6:       do_work(&a);
418     //               n = 2;
419     //             }
420     //           }
421     // block7:   if (a)
422     // block8:     g();
423     // block9:   return n;
424     //         }
425     //
426     // Starting from the maybe-uninitialized use in block 9:
427     //  * Block 7 is not visited because we have only visited one of its two
428     //    successors.
429     //  * Block 8 is visited because we've visited its only successor.
430     // From block 8:
431     //  * Block 7 is visited because we've now visited both of its successors.
432     // From block 7:
433     //  * Blocks 1, 2, 4, 5, and 6 are not visited because we didn't visit all
434     //    of their successors (we didn't visit 4, 3, 5, 6, and 5, respectively).
435     //  * Block 3 is not visited because it initializes 'n'.
436     // Now the algorithm terminates, having visited blocks 7 and 8, and having
437     // found the frontier is blocks 2, 4, and 5.
438     //
439     // 'n' is definitely uninitialized for two edges into block 7 (from blocks 2
440     // and 4), so we report that any time either of those edges is taken (in
441     // each case when 'b == false'), 'n' is used uninitialized.
442     llvm::SmallVector<const CFGBlock*, 32> Queue;
443     llvm::SmallVector<unsigned, 32> SuccsVisited(cfg.getNumBlockIDs(), 0);
444     Queue.push_back(block);
445     // Specify that we've already visited all successors of the starting block.
446     // This has the dual purpose of ensuring we never add it to the queue, and
447     // of marking it as not being a candidate element of the frontier.
448     SuccsVisited[block->getBlockID()] = block->succ_size();
449     while (!Queue.empty()) {
450       const CFGBlock *B = Queue.back();
451       Queue.pop_back();
452       for (CFGBlock::const_pred_iterator I = B->pred_begin(), E = B->pred_end();
453            I != E; ++I) {
454         const CFGBlock *Pred = *I;
455         if (vals.getValue(Pred, B, vd) == Initialized)
456           // This block initializes the variable.
457           continue;
458 
459         if (++SuccsVisited[Pred->getBlockID()] == Pred->succ_size())
460           // All paths from this block lead to the use and don't initialize the
461           // variable.
462           Queue.push_back(Pred);
463       }
464     }
465 
466     // Scan the frontier, looking for blocks where the variable was
467     // uninitialized.
468     for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
469       const CFGBlock *Block = *BI;
470       unsigned BlockID = Block->getBlockID();
471       const Stmt *Term = Block->getTerminator();
472       if (SuccsVisited[BlockID] && SuccsVisited[BlockID] < Block->succ_size() &&
473           Term) {
474         // This block inevitably leads to the use. If we have an edge from here
475         // to a post-dominator block, and the variable is uninitialized on that
476         // edge, we have found a bug.
477         for (CFGBlock::const_succ_iterator I = Block->succ_begin(),
478              E = Block->succ_end(); I != E; ++I) {
479           const CFGBlock *Succ = *I;
480           if (Succ && SuccsVisited[Succ->getBlockID()] >= Succ->succ_size() &&
481               vals.getValue(Block, Succ, vd) == Uninitialized) {
482             // Switch cases are a special case: report the label to the caller
483             // as the 'terminator', not the switch statement itself. Suppress
484             // situations where no label matched: we can't be sure that's
485             // possible.
486             if (isa<SwitchStmt>(Term)) {
487               const Stmt *Label = Succ->getLabel();
488               if (!Label || !isa<SwitchCase>(Label))
489                 // Might not be possible.
490                 continue;
491               UninitUse::Branch Branch;
492               Branch.Terminator = Label;
493               Branch.Output = 0; // Ignored.
494               Use.addUninitBranch(Branch);
495             } else {
496               UninitUse::Branch Branch;
497               Branch.Terminator = Term;
498               Branch.Output = I - Block->succ_begin();
499               Use.addUninitBranch(Branch);
500             }
501           }
502         }
503       }
504     }
505 
506     return Use;
507   }
508 
509   FindVarResult findBlockVarDecl(Expr *ex);
510 
511   void ProcessUses(Stmt *s = 0);
512 };
513 }
514 
515 static const Expr *stripCasts(ASTContext &C, const Expr *Ex) {
516   while (Ex) {
517     Ex = Ex->IgnoreParenNoopCasts(C);
518     if (const CastExpr *CE = dyn_cast<CastExpr>(Ex)) {
519       if (CE->getCastKind() == CK_LValueBitCast) {
520         Ex = CE->getSubExpr();
521         continue;
522       }
523     }
524     break;
525   }
526   return Ex;
527 }
528 
529 void TransferFunctions::reportUse(const Expr *ex, const VarDecl *vd) {
530   if (!handler)
531     return;
532   Value v = vals[vd];
533   if (isUninitialized(v))
534     handler->handleUseOfUninitVariable(vd, getUninitUse(ex, vd, v));
535 }
536 
537 FindVarResult TransferFunctions::findBlockVarDecl(Expr *ex) {
538   if (DeclRefExpr *dr = dyn_cast<DeclRefExpr>(ex->IgnoreParenCasts()))
539     if (VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
540       if (isTrackedVar(vd))
541         return FindVarResult(vd, dr);
542   return FindVarResult(0, 0);
543 }
544 
545 void TransferFunctions::VisitObjCForCollectionStmt(ObjCForCollectionStmt *fs) {
546   // This represents an initialization of the 'element' value.
547   Stmt *element = fs->getElement();
548   const VarDecl *vd = 0;
549 
550   if (DeclStmt *ds = dyn_cast<DeclStmt>(element)) {
551     vd = cast<VarDecl>(ds->getSingleDecl());
552     if (!isTrackedVar(vd))
553       vd = 0;
554   } else {
555     // Initialize the value of the reference variable.
556     const FindVarResult &res = findBlockVarDecl(cast<Expr>(element));
557     vd = res.getDecl();
558   }
559 
560   if (vd)
561     vals[vd] = Initialized;
562 }
563 
564 void TransferFunctions::VisitBlockExpr(BlockExpr *be) {
565   const BlockDecl *bd = be->getBlockDecl();
566   for (BlockDecl::capture_const_iterator i = bd->capture_begin(),
567         e = bd->capture_end() ; i != e; ++i) {
568     const VarDecl *vd = i->getVariable();
569     if (!isTrackedVar(vd))
570       continue;
571     if (i->isByRef()) {
572       vals[vd] = Initialized;
573       continue;
574     }
575     reportUse(be, vd);
576   }
577 }
578 
579 void TransferFunctions::VisitDeclRefExpr(DeclRefExpr *dr) {
580   // Record the last DeclRefExpr seen.  This is an lvalue computation.
581   // We use this value to later detect if a variable "escapes" the analysis.
582   if (const VarDecl *vd = dyn_cast<VarDecl>(dr->getDecl()))
583     if (isTrackedVar(vd)) {
584       ProcessUses();
585       lastDR = dr;
586     }
587 }
588 
589 void TransferFunctions::VisitDeclStmt(DeclStmt *ds) {
590   for (DeclStmt::decl_iterator DI = ds->decl_begin(), DE = ds->decl_end();
591        DI != DE; ++DI) {
592     if (VarDecl *vd = dyn_cast<VarDecl>(*DI)) {
593       if (isTrackedVar(vd)) {
594         if (Expr *init = vd->getInit()) {
595           // If the initializer consists solely of a reference to itself, we
596           // explicitly mark the variable as uninitialized. This allows code
597           // like the following:
598           //
599           //   int x = x;
600           //
601           // to deliberately leave a variable uninitialized. Different analysis
602           // clients can detect this pattern and adjust their reporting
603           // appropriately, but we need to continue to analyze subsequent uses
604           // of the variable.
605           if (init == lastLoad) {
606             const DeclRefExpr *DR
607               = cast<DeclRefExpr>(stripCasts(ac.getASTContext(),
608                                              lastLoad->getSubExpr()));
609             if (DR->getDecl() == vd) {
610               // int x = x;
611               // Propagate uninitialized value, but don't immediately report
612               // a problem.
613               vals[vd] = Uninitialized;
614               lastLoad = 0;
615               lastDR = 0;
616               if (handler)
617                 handler->handleSelfInit(vd);
618               return;
619             }
620           }
621 
622           // All other cases: treat the new variable as initialized.
623           // This is a minor optimization to reduce the propagation
624           // of the analysis, since we will have already reported
625           // the use of the uninitialized value (which visiting the
626           // initializer).
627           vals[vd] = Initialized;
628         }
629       }
630     }
631   }
632 }
633 
634 void TransferFunctions::VisitBinaryOperator(clang::BinaryOperator *bo) {
635   if (bo->isAssignmentOp()) {
636     const FindVarResult &res = findBlockVarDecl(bo->getLHS());
637     if (const VarDecl *vd = res.getDecl()) {
638       if (bo->getOpcode() != BO_Assign)
639         reportUse(res.getDeclRefExpr(), vd);
640       else
641         vals[vd] = Initialized;
642     }
643   }
644 }
645 
646 void TransferFunctions::VisitUnaryOperator(clang::UnaryOperator *uo) {
647   switch (uo->getOpcode()) {
648     case clang::UO_PostDec:
649     case clang::UO_PostInc:
650     case clang::UO_PreDec:
651     case clang::UO_PreInc: {
652       const FindVarResult &res = findBlockVarDecl(uo->getSubExpr());
653       if (const VarDecl *vd = res.getDecl()) {
654         assert(res.getDeclRefExpr() == lastDR);
655         // We null out lastDR to indicate we have fully processed it
656         // and we don't want the auto-value setting in Visit().
657         lastDR = 0;
658         reportUse(res.getDeclRefExpr(), vd);
659       }
660       break;
661     }
662     default:
663       break;
664   }
665 }
666 
667 void TransferFunctions::VisitCastExpr(clang::CastExpr *ce) {
668   if (ce->getCastKind() == CK_LValueToRValue) {
669     const FindVarResult &res = findBlockVarDecl(ce->getSubExpr());
670     if (res.getDecl()) {
671       assert(res.getDeclRefExpr() == lastDR);
672       lastLoad = ce;
673     }
674   }
675   else if (ce->getCastKind() == CK_NoOp ||
676            ce->getCastKind() == CK_LValueBitCast) {
677     skipProcessUses = true;
678   }
679   else if (CStyleCastExpr *cse = dyn_cast<CStyleCastExpr>(ce)) {
680     if (cse->getType()->isVoidType()) {
681       // e.g. (void) x;
682       if (lastLoad == cse->getSubExpr()) {
683         // Squelch any detected load of an uninitialized value if
684         // we cast it to void.
685         lastLoad = 0;
686         lastDR = 0;
687       }
688     }
689   }
690 }
691 
692 void TransferFunctions::Visit(clang::Stmt *s) {
693   skipProcessUses = false;
694   StmtVisitor<TransferFunctions>::Visit(s);
695   if (!skipProcessUses)
696     ProcessUses(s);
697 }
698 
699 void TransferFunctions::ProcessUses(Stmt *s) {
700   // This method is typically called after visiting a CFGElement statement
701   // in the CFG.  We delay processing of reporting many loads of uninitialized
702   // values until here.
703   if (lastLoad) {
704     // If we just visited the lvalue-to-rvalue cast, there is nothing
705     // left to do.
706     if (lastLoad == s)
707       return;
708 
709     const DeclRefExpr *DR =
710       cast<DeclRefExpr>(stripCasts(ac.getASTContext(),
711                                    lastLoad->getSubExpr()));
712     const VarDecl *VD = cast<VarDecl>(DR->getDecl());
713 
714     // If we reach here, we may have seen a load of an uninitialized value
715     // and it hasn't been casted to void or otherwise handled.  In this
716     // situation, report the incident.
717     reportUse(DR, VD);
718 
719     lastLoad = 0;
720 
721     if (DR == lastDR) {
722       lastDR = 0;
723       return;
724     }
725   }
726 
727   // Any other uses of 'lastDR' involve taking an lvalue of variable.
728   // In this case, it "escapes" the analysis.
729   if (lastDR && lastDR != s) {
730     vals[cast<VarDecl>(lastDR->getDecl())] = Initialized;
731     lastDR = 0;
732   }
733 }
734 
735 //------------------------------------------------------------------------====//
736 // High-level "driver" logic for uninitialized values analysis.
737 //====------------------------------------------------------------------------//
738 
739 static bool runOnBlock(const CFGBlock *block, const CFG &cfg,
740                        AnalysisDeclContext &ac, CFGBlockValues &vals,
741                        llvm::BitVector &wasAnalyzed,
742                        UninitVariablesHandler *handler = 0) {
743 
744   wasAnalyzed[block->getBlockID()] = true;
745 
746   if (const BinaryOperator *b = getLogicalOperatorInChain(block)) {
747     CFGBlock::const_pred_iterator itr = block->pred_begin();
748     BVPair vA = vals.getValueVectors(*itr, false);
749     ++itr;
750     BVPair vB = vals.getValueVectors(*itr, false);
751 
752     BVPair valsAB;
753 
754     if (b->getOpcode() == BO_LAnd) {
755       // Merge the 'F' bits from the first and second.
756       vals.mergeIntoScratch(*(vA.second ? vA.second : vA.first), true);
757       vals.mergeIntoScratch(*(vB.second ? vB.second : vB.first), false);
758       valsAB.first = vA.first;
759       valsAB.second = &vals.getScratch();
760     } else {
761       // Merge the 'T' bits from the first and second.
762       assert(b->getOpcode() == BO_LOr);
763       vals.mergeIntoScratch(*vA.first, true);
764       vals.mergeIntoScratch(*vB.first, false);
765       valsAB.first = &vals.getScratch();
766       valsAB.second = vA.second ? vA.second : vA.first;
767     }
768     return vals.updateValueVectors(block, valsAB);
769   }
770 
771   // Default behavior: merge in values of predecessor blocks.
772   vals.resetScratch();
773   bool isFirst = true;
774   for (CFGBlock::const_pred_iterator I = block->pred_begin(),
775        E = block->pred_end(); I != E; ++I) {
776     const CFGBlock *pred = *I;
777     if (wasAnalyzed[pred->getBlockID()]) {
778       vals.mergeIntoScratch(vals.getValueVector(pred, block), isFirst);
779       isFirst = false;
780     }
781   }
782   // Apply the transfer function.
783   TransferFunctions tf(vals, cfg, block, ac, handler);
784   for (CFGBlock::const_iterator I = block->begin(), E = block->end();
785        I != E; ++I) {
786     if (const CFGStmt *cs = dyn_cast<CFGStmt>(&*I)) {
787       tf.Visit(const_cast<Stmt*>(cs->getStmt()));
788     }
789   }
790   tf.ProcessUses();
791   return vals.updateValueVectorWithScratch(block);
792 }
793 
794 void clang::runUninitializedVariablesAnalysis(
795     const DeclContext &dc,
796     const CFG &cfg,
797     AnalysisDeclContext &ac,
798     UninitVariablesHandler &handler,
799     UninitVariablesAnalysisStats &stats) {
800   CFGBlockValues vals(cfg);
801   vals.computeSetOfDeclarations(dc);
802   if (vals.hasNoDeclarations())
803     return;
804 #if 0
805   cfg.dump(dc.getParentASTContext().getLangOpts(), true);
806 #endif
807 
808   stats.NumVariablesAnalyzed = vals.getNumEntries();
809 
810   // Mark all variables uninitialized at the entry.
811   const CFGBlock &entry = cfg.getEntry();
812   for (CFGBlock::const_succ_iterator i = entry.succ_begin(),
813         e = entry.succ_end(); i != e; ++i) {
814     if (const CFGBlock *succ = *i) {
815       ValueVector &vec = vals.getValueVector(&entry, succ);
816       const unsigned n = vals.getNumEntries();
817       for (unsigned j = 0; j < n ; ++j) {
818         vec[j] = Uninitialized;
819       }
820     }
821   }
822 
823   // Proceed with the workist.
824   DataflowWorklist worklist(cfg);
825   llvm::BitVector previouslyVisited(cfg.getNumBlockIDs());
826   worklist.enqueueSuccessors(&cfg.getEntry());
827   llvm::BitVector wasAnalyzed(cfg.getNumBlockIDs(), false);
828   wasAnalyzed[cfg.getEntry().getBlockID()] = true;
829 
830   while (const CFGBlock *block = worklist.dequeue()) {
831     // Did the block change?
832     bool changed = runOnBlock(block, cfg, ac, vals, wasAnalyzed);
833     ++stats.NumBlockVisits;
834     if (changed || !previouslyVisited[block->getBlockID()])
835       worklist.enqueueSuccessors(block);
836     previouslyVisited[block->getBlockID()] = true;
837   }
838 
839   // Run through the blocks one more time, and report uninitialized variabes.
840   for (CFG::const_iterator BI = cfg.begin(), BE = cfg.end(); BI != BE; ++BI) {
841     const CFGBlock *block = *BI;
842     if (wasAnalyzed[block->getBlockID()]) {
843       runOnBlock(block, cfg, ac, vals, wasAnalyzed, &handler);
844       ++stats.NumBlockVisits;
845     }
846   }
847 }
848 
849 UninitVariablesHandler::~UninitVariablesHandler() {}
850