1 //== RegionStore.cpp - Field-sensitive store model --------------*- C++ -*--==//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines a basic region store model. In this model, we do have field
11 // sensitivity. But we assume nothing about the heap shape. So recursive data
12 // structures are largely ignored. Basically we do 1-limiting analysis.
13 // Parameter pointers are assumed with no aliasing. Pointee objects of
14 // parameters are created lazily.
15 //
16 //===----------------------------------------------------------------------===//
17 
18 #include "clang/AST/Attr.h"
19 #include "clang/AST/CharUnits.h"
20 #include "clang/ASTMatchers/ASTMatchFinder.h"
21 #include "clang/Analysis/Analyses/LiveVariables.h"
22 #include "clang/Analysis/AnalysisDeclContext.h"
23 #include "clang/Basic/TargetInfo.h"
24 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h"
25 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
26 #include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h"
27 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
28 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramStateTrait.h"
29 #include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h"
30 #include "llvm/ADT/ImmutableMap.h"
31 #include "llvm/ADT/Optional.h"
32 #include "llvm/Support/raw_ostream.h"
33 #include <utility>
34 
35 using namespace clang;
36 using namespace ento;
37 
38 //===----------------------------------------------------------------------===//
39 // Representation of binding keys.
40 //===----------------------------------------------------------------------===//
41 
42 namespace {
43 class BindingKey {
44 public:
45   enum Kind { Default = 0x0, Direct = 0x1 };
46 private:
47   enum { Symbolic = 0x2 };
48 
49   llvm::PointerIntPair<const MemRegion *, 2> P;
50   uint64_t Data;
51 
52   /// Create a key for a binding to region \p r, which has a symbolic offset
53   /// from region \p Base.
54   explicit BindingKey(const SubRegion *r, const SubRegion *Base, Kind k)
55     : P(r, k | Symbolic), Data(reinterpret_cast<uintptr_t>(Base)) {
56     assert(r && Base && "Must have known regions.");
57     assert(getConcreteOffsetRegion() == Base && "Failed to store base region");
58   }
59 
60   /// Create a key for a binding at \p offset from base region \p r.
61   explicit BindingKey(const MemRegion *r, uint64_t offset, Kind k)
62     : P(r, k), Data(offset) {
63     assert(r && "Must have known regions.");
64     assert(getOffset() == offset && "Failed to store offset");
65     assert((r == r->getBaseRegion() || isa<ObjCIvarRegion>(r)) && "Not a base");
66   }
67 public:
68 
69   bool isDirect() const { return P.getInt() & Direct; }
70   bool hasSymbolicOffset() const { return P.getInt() & Symbolic; }
71 
72   const MemRegion *getRegion() const { return P.getPointer(); }
73   uint64_t getOffset() const {
74     assert(!hasSymbolicOffset());
75     return Data;
76   }
77 
78   const SubRegion *getConcreteOffsetRegion() const {
79     assert(hasSymbolicOffset());
80     return reinterpret_cast<const SubRegion *>(static_cast<uintptr_t>(Data));
81   }
82 
83   const MemRegion *getBaseRegion() const {
84     if (hasSymbolicOffset())
85       return getConcreteOffsetRegion()->getBaseRegion();
86     return getRegion()->getBaseRegion();
87   }
88 
89   void Profile(llvm::FoldingSetNodeID& ID) const {
90     ID.AddPointer(P.getOpaqueValue());
91     ID.AddInteger(Data);
92   }
93 
94   static BindingKey Make(const MemRegion *R, Kind k);
95 
96   bool operator<(const BindingKey &X) const {
97     if (P.getOpaqueValue() < X.P.getOpaqueValue())
98       return true;
99     if (P.getOpaqueValue() > X.P.getOpaqueValue())
100       return false;
101     return Data < X.Data;
102   }
103 
104   bool operator==(const BindingKey &X) const {
105     return P.getOpaqueValue() == X.P.getOpaqueValue() &&
106            Data == X.Data;
107   }
108 
109   void dump() const;
110 };
111 } // end anonymous namespace
112 
113 BindingKey BindingKey::Make(const MemRegion *R, Kind k) {
114   const RegionOffset &RO = R->getAsOffset();
115   if (RO.hasSymbolicOffset())
116     return BindingKey(cast<SubRegion>(R), cast<SubRegion>(RO.getRegion()), k);
117 
118   return BindingKey(RO.getRegion(), RO.getOffset(), k);
119 }
120 
121 namespace llvm {
122   static inline
123   raw_ostream &operator<<(raw_ostream &os, BindingKey K) {
124     os << '(' << K.getRegion();
125     if (!K.hasSymbolicOffset())
126       os << ',' << K.getOffset();
127     os << ',' << (K.isDirect() ? "direct" : "default")
128        << ')';
129     return os;
130   }
131 
132   template <typename T> struct isPodLike;
133   template <> struct isPodLike<BindingKey> {
134     static const bool value = true;
135   };
136 } // end llvm namespace
137 
138 #ifndef NDEBUG
139 LLVM_DUMP_METHOD void BindingKey::dump() const { llvm::errs() << *this; }
140 #endif
141 
142 //===----------------------------------------------------------------------===//
143 // Actual Store type.
144 //===----------------------------------------------------------------------===//
145 
146 typedef llvm::ImmutableMap<BindingKey, SVal>    ClusterBindings;
147 typedef llvm::ImmutableMapRef<BindingKey, SVal> ClusterBindingsRef;
148 typedef std::pair<BindingKey, SVal> BindingPair;
149 
150 typedef llvm::ImmutableMap<const MemRegion *, ClusterBindings>
151         RegionBindings;
152 
153 namespace {
154 class RegionBindingsRef : public llvm::ImmutableMapRef<const MemRegion *,
155                                  ClusterBindings> {
156   ClusterBindings::Factory *CBFactory;
157 
158 public:
159   typedef llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>
160           ParentTy;
161 
162   RegionBindingsRef(ClusterBindings::Factory &CBFactory,
163                     const RegionBindings::TreeTy *T,
164                     RegionBindings::TreeTy::Factory *F)
165       : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(T, F),
166         CBFactory(&CBFactory) {}
167 
168   RegionBindingsRef(const ParentTy &P, ClusterBindings::Factory &CBFactory)
169       : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(P),
170         CBFactory(&CBFactory) {}
171 
172   RegionBindingsRef add(key_type_ref K, data_type_ref D) const {
173     return RegionBindingsRef(static_cast<const ParentTy *>(this)->add(K, D),
174                              *CBFactory);
175   }
176 
177   RegionBindingsRef remove(key_type_ref K) const {
178     return RegionBindingsRef(static_cast<const ParentTy *>(this)->remove(K),
179                              *CBFactory);
180   }
181 
182   RegionBindingsRef addBinding(BindingKey K, SVal V) const;
183 
184   RegionBindingsRef addBinding(const MemRegion *R,
185                                BindingKey::Kind k, SVal V) const;
186 
187   const SVal *lookup(BindingKey K) const;
188   const SVal *lookup(const MemRegion *R, BindingKey::Kind k) const;
189   using llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>::lookup;
190 
191   RegionBindingsRef removeBinding(BindingKey K);
192 
193   RegionBindingsRef removeBinding(const MemRegion *R,
194                                   BindingKey::Kind k);
195 
196   RegionBindingsRef removeBinding(const MemRegion *R) {
197     return removeBinding(R, BindingKey::Direct).
198            removeBinding(R, BindingKey::Default);
199   }
200 
201   Optional<SVal> getDirectBinding(const MemRegion *R) const;
202 
203   /// getDefaultBinding - Returns an SVal* representing an optional default
204   ///  binding associated with a region and its subregions.
205   Optional<SVal> getDefaultBinding(const MemRegion *R) const;
206 
207   /// Return the internal tree as a Store.
208   Store asStore() const {
209     return asImmutableMap().getRootWithoutRetain();
210   }
211 
212   void dump(raw_ostream &OS, const char *nl) const {
213    for (iterator I = begin(), E = end(); I != E; ++I) {
214      const ClusterBindings &Cluster = I.getData();
215      for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end();
216           CI != CE; ++CI) {
217        OS << ' ' << CI.getKey() << " : " << CI.getData() << nl;
218      }
219      OS << nl;
220    }
221   }
222 
223   LLVM_DUMP_METHOD void dump() const { dump(llvm::errs(), "\n"); }
224 };
225 } // end anonymous namespace
226 
227 typedef const RegionBindingsRef& RegionBindingsConstRef;
228 
229 Optional<SVal> RegionBindingsRef::getDirectBinding(const MemRegion *R) const {
230   return Optional<SVal>::create(lookup(R, BindingKey::Direct));
231 }
232 
233 Optional<SVal> RegionBindingsRef::getDefaultBinding(const MemRegion *R) const {
234   return Optional<SVal>::create(lookup(R, BindingKey::Default));
235 }
236 
237 RegionBindingsRef RegionBindingsRef::addBinding(BindingKey K, SVal V) const {
238   const MemRegion *Base = K.getBaseRegion();
239 
240   const ClusterBindings *ExistingCluster = lookup(Base);
241   ClusterBindings Cluster =
242       (ExistingCluster ? *ExistingCluster : CBFactory->getEmptyMap());
243 
244   ClusterBindings NewCluster = CBFactory->add(Cluster, K, V);
245   return add(Base, NewCluster);
246 }
247 
248 
249 RegionBindingsRef RegionBindingsRef::addBinding(const MemRegion *R,
250                                                 BindingKey::Kind k,
251                                                 SVal V) const {
252   return addBinding(BindingKey::Make(R, k), V);
253 }
254 
255 const SVal *RegionBindingsRef::lookup(BindingKey K) const {
256   const ClusterBindings *Cluster = lookup(K.getBaseRegion());
257   if (!Cluster)
258     return nullptr;
259   return Cluster->lookup(K);
260 }
261 
262 const SVal *RegionBindingsRef::lookup(const MemRegion *R,
263                                       BindingKey::Kind k) const {
264   return lookup(BindingKey::Make(R, k));
265 }
266 
267 RegionBindingsRef RegionBindingsRef::removeBinding(BindingKey K) {
268   const MemRegion *Base = K.getBaseRegion();
269   const ClusterBindings *Cluster = lookup(Base);
270   if (!Cluster)
271     return *this;
272 
273   ClusterBindings NewCluster = CBFactory->remove(*Cluster, K);
274   if (NewCluster.isEmpty())
275     return remove(Base);
276   return add(Base, NewCluster);
277 }
278 
279 RegionBindingsRef RegionBindingsRef::removeBinding(const MemRegion *R,
280                                                 BindingKey::Kind k){
281   return removeBinding(BindingKey::Make(R, k));
282 }
283 
284 //===----------------------------------------------------------------------===//
285 // Fine-grained control of RegionStoreManager.
286 //===----------------------------------------------------------------------===//
287 
288 namespace {
289 struct minimal_features_tag {};
290 struct maximal_features_tag {};
291 
292 class RegionStoreFeatures {
293   bool SupportsFields;
294 public:
295   RegionStoreFeatures(minimal_features_tag) :
296     SupportsFields(false) {}
297 
298   RegionStoreFeatures(maximal_features_tag) :
299     SupportsFields(true) {}
300 
301   void enableFields(bool t) { SupportsFields = t; }
302 
303   bool supportsFields() const { return SupportsFields; }
304 };
305 }
306 
307 //===----------------------------------------------------------------------===//
308 // Main RegionStore logic.
309 //===----------------------------------------------------------------------===//
310 
311 namespace {
312 class invalidateRegionsWorker;
313 
314 class RegionStoreManager : public StoreManager {
315 public:
316   const RegionStoreFeatures Features;
317 
318   RegionBindings::Factory RBFactory;
319   mutable ClusterBindings::Factory CBFactory;
320 
321   typedef std::vector<SVal> SValListTy;
322 private:
323   typedef llvm::DenseMap<const LazyCompoundValData *,
324                          SValListTy> LazyBindingsMapTy;
325   LazyBindingsMapTy LazyBindingsMap;
326 
327   /// The largest number of fields a struct can have and still be
328   /// considered "small".
329   ///
330   /// This is currently used to decide whether or not it is worth "forcing" a
331   /// LazyCompoundVal on bind.
332   ///
333   /// This is controlled by 'region-store-small-struct-limit' option.
334   /// To disable all small-struct-dependent behavior, set the option to "0".
335   unsigned SmallStructLimit;
336 
337   /// A helper used to populate the work list with the given set of
338   /// regions.
339   void populateWorkList(invalidateRegionsWorker &W,
340                         ArrayRef<SVal> Values,
341                         InvalidatedRegions *TopLevelRegions);
342 
343 public:
344   RegionStoreManager(ProgramStateManager& mgr, const RegionStoreFeatures &f)
345     : StoreManager(mgr), Features(f),
346       RBFactory(mgr.getAllocator()), CBFactory(mgr.getAllocator()),
347       SmallStructLimit(0) {
348     if (SubEngine *Eng = StateMgr.getOwningEngine()) {
349       AnalyzerOptions &Options = Eng->getAnalysisManager().options;
350       SmallStructLimit =
351         Options.getOptionAsInteger("region-store-small-struct-limit", 2);
352     }
353   }
354 
355 
356   /// setImplicitDefaultValue - Set the default binding for the provided
357   ///  MemRegion to the value implicitly defined for compound literals when
358   ///  the value is not specified.
359   RegionBindingsRef setImplicitDefaultValue(RegionBindingsConstRef B,
360                                             const MemRegion *R, QualType T);
361 
362   /// ArrayToPointer - Emulates the "decay" of an array to a pointer
363   ///  type.  'Array' represents the lvalue of the array being decayed
364   ///  to a pointer, and the returned SVal represents the decayed
365   ///  version of that lvalue (i.e., a pointer to the first element of
366   ///  the array).  This is called by ExprEngine when evaluating
367   ///  casts from arrays to pointers.
368   SVal ArrayToPointer(Loc Array, QualType ElementTy) override;
369 
370   StoreRef getInitialStore(const LocationContext *InitLoc) override {
371     return StoreRef(RBFactory.getEmptyMap().getRootWithoutRetain(), *this);
372   }
373 
374   //===-------------------------------------------------------------------===//
375   // Binding values to regions.
376   //===-------------------------------------------------------------------===//
377   RegionBindingsRef invalidateGlobalRegion(MemRegion::Kind K,
378                                            const Expr *Ex,
379                                            unsigned Count,
380                                            const LocationContext *LCtx,
381                                            RegionBindingsRef B,
382                                            InvalidatedRegions *Invalidated);
383 
384   StoreRef invalidateRegions(Store store,
385                              ArrayRef<SVal> Values,
386                              const Expr *E, unsigned Count,
387                              const LocationContext *LCtx,
388                              const CallEvent *Call,
389                              InvalidatedSymbols &IS,
390                              RegionAndSymbolInvalidationTraits &ITraits,
391                              InvalidatedRegions *Invalidated,
392                              InvalidatedRegions *InvalidatedTopLevel) override;
393 
394   bool scanReachableSymbols(Store S, const MemRegion *R,
395                             ScanReachableSymbols &Callbacks) override;
396 
397   RegionBindingsRef removeSubRegionBindings(RegionBindingsConstRef B,
398                                             const SubRegion *R);
399 
400 public: // Part of public interface to class.
401 
402   StoreRef Bind(Store store, Loc LV, SVal V) override {
403     return StoreRef(bind(getRegionBindings(store), LV, V).asStore(), *this);
404   }
405 
406   RegionBindingsRef bind(RegionBindingsConstRef B, Loc LV, SVal V);
407 
408   // BindDefaultInitial is only used to initialize a region with
409   // a default value.
410   StoreRef BindDefaultInitial(Store store, const MemRegion *R,
411                               SVal V) override {
412     RegionBindingsRef B = getRegionBindings(store);
413     // Use other APIs when you have to wipe the region that was initialized
414     // earlier.
415     assert(!(B.getDefaultBinding(R) || B.getDirectBinding(R)) &&
416            "Double initialization!");
417     B = B.addBinding(BindingKey::Make(R, BindingKey::Default), V);
418     return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this);
419   }
420 
421   // BindDefaultZero is used for zeroing constructors that may accidentally
422   // overwrite existing bindings.
423   StoreRef BindDefaultZero(Store store, const MemRegion *R) override {
424     // FIXME: The offsets of empty bases can be tricky because of
425     // of the so called "empty base class optimization".
426     // If a base class has been optimized out
427     // we should not try to create a binding, otherwise we should.
428     // Unfortunately, at the moment ASTRecordLayout doesn't expose
429     // the actual sizes of the empty bases
430     // and trying to infer them from offsets/alignments
431     // seems to be error-prone and non-trivial because of the trailing padding.
432     // As a temporary mitigation we don't create bindings for empty bases.
433     if (const auto *BR = dyn_cast<CXXBaseObjectRegion>(R))
434       if (BR->getDecl()->isEmpty())
435         return StoreRef(store, *this);
436 
437     RegionBindingsRef B = getRegionBindings(store);
438     SVal V = svalBuilder.makeZeroVal(Ctx.CharTy);
439     B = removeSubRegionBindings(B, cast<SubRegion>(R));
440     B = B.addBinding(BindingKey::Make(R, BindingKey::Default), V);
441     return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this);
442   }
443 
444   /// Attempt to extract the fields of \p LCV and bind them to the struct region
445   /// \p R.
446   ///
447   /// This path is used when it seems advantageous to "force" loading the values
448   /// within a LazyCompoundVal to bind memberwise to the struct region, rather
449   /// than using a Default binding at the base of the entire region. This is a
450   /// heuristic attempting to avoid building long chains of LazyCompoundVals.
451   ///
452   /// \returns The updated store bindings, or \c None if binding non-lazily
453   ///          would be too expensive.
454   Optional<RegionBindingsRef> tryBindSmallStruct(RegionBindingsConstRef B,
455                                                  const TypedValueRegion *R,
456                                                  const RecordDecl *RD,
457                                                  nonloc::LazyCompoundVal LCV);
458 
459   /// BindStruct - Bind a compound value to a structure.
460   RegionBindingsRef bindStruct(RegionBindingsConstRef B,
461                                const TypedValueRegion* R, SVal V);
462 
463   /// BindVector - Bind a compound value to a vector.
464   RegionBindingsRef bindVector(RegionBindingsConstRef B,
465                                const TypedValueRegion* R, SVal V);
466 
467   RegionBindingsRef bindArray(RegionBindingsConstRef B,
468                               const TypedValueRegion* R,
469                               SVal V);
470 
471   /// Clears out all bindings in the given region and assigns a new value
472   /// as a Default binding.
473   RegionBindingsRef bindAggregate(RegionBindingsConstRef B,
474                                   const TypedRegion *R,
475                                   SVal DefaultVal);
476 
477   /// Create a new store with the specified binding removed.
478   /// \param ST the original store, that is the basis for the new store.
479   /// \param L the location whose binding should be removed.
480   StoreRef killBinding(Store ST, Loc L) override;
481 
482   void incrementReferenceCount(Store store) override {
483     getRegionBindings(store).manualRetain();
484   }
485 
486   /// If the StoreManager supports it, decrement the reference count of
487   /// the specified Store object.  If the reference count hits 0, the memory
488   /// associated with the object is recycled.
489   void decrementReferenceCount(Store store) override {
490     getRegionBindings(store).manualRelease();
491   }
492 
493   bool includedInBindings(Store store, const MemRegion *region) const override;
494 
495   /// Return the value bound to specified location in a given state.
496   ///
497   /// The high level logic for this method is this:
498   /// getBinding (L)
499   ///   if L has binding
500   ///     return L's binding
501   ///   else if L is in killset
502   ///     return unknown
503   ///   else
504   ///     if L is on stack or heap
505   ///       return undefined
506   ///     else
507   ///       return symbolic
508   SVal getBinding(Store S, Loc L, QualType T) override {
509     return getBinding(getRegionBindings(S), L, T);
510   }
511 
512   Optional<SVal> getDefaultBinding(Store S, const MemRegion *R) override {
513     RegionBindingsRef B = getRegionBindings(S);
514     // Default bindings are always applied over a base region so look up the
515     // base region's default binding, otherwise the lookup will fail when R
516     // is at an offset from R->getBaseRegion().
517     return B.getDefaultBinding(R->getBaseRegion());
518   }
519 
520   SVal getBinding(RegionBindingsConstRef B, Loc L, QualType T = QualType());
521 
522   SVal getBindingForElement(RegionBindingsConstRef B, const ElementRegion *R);
523 
524   SVal getBindingForField(RegionBindingsConstRef B, const FieldRegion *R);
525 
526   SVal getBindingForObjCIvar(RegionBindingsConstRef B, const ObjCIvarRegion *R);
527 
528   SVal getBindingForVar(RegionBindingsConstRef B, const VarRegion *R);
529 
530   SVal getBindingForLazySymbol(const TypedValueRegion *R);
531 
532   SVal getBindingForFieldOrElementCommon(RegionBindingsConstRef B,
533                                          const TypedValueRegion *R,
534                                          QualType Ty);
535 
536   SVal getLazyBinding(const SubRegion *LazyBindingRegion,
537                       RegionBindingsRef LazyBinding);
538 
539   /// Get bindings for the values in a struct and return a CompoundVal, used
540   /// when doing struct copy:
541   /// struct s x, y;
542   /// x = y;
543   /// y's value is retrieved by this method.
544   SVal getBindingForStruct(RegionBindingsConstRef B, const TypedValueRegion *R);
545   SVal getBindingForArray(RegionBindingsConstRef B, const TypedValueRegion *R);
546   NonLoc createLazyBinding(RegionBindingsConstRef B, const TypedValueRegion *R);
547 
548   /// Used to lazily generate derived symbols for bindings that are defined
549   /// implicitly by default bindings in a super region.
550   ///
551   /// Note that callers may need to specially handle LazyCompoundVals, which
552   /// are returned as is in case the caller needs to treat them differently.
553   Optional<SVal> getBindingForDerivedDefaultValue(RegionBindingsConstRef B,
554                                                   const MemRegion *superR,
555                                                   const TypedValueRegion *R,
556                                                   QualType Ty);
557 
558   /// Get the state and region whose binding this region \p R corresponds to.
559   ///
560   /// If there is no lazy binding for \p R, the returned value will have a null
561   /// \c second. Note that a null pointer can represents a valid Store.
562   std::pair<Store, const SubRegion *>
563   findLazyBinding(RegionBindingsConstRef B, const SubRegion *R,
564                   const SubRegion *originalRegion);
565 
566   /// Returns the cached set of interesting SVals contained within a lazy
567   /// binding.
568   ///
569   /// The precise value of "interesting" is determined for the purposes of
570   /// RegionStore's internal analysis. It must always contain all regions and
571   /// symbols, but may omit constants and other kinds of SVal.
572   const SValListTy &getInterestingValues(nonloc::LazyCompoundVal LCV);
573 
574   //===------------------------------------------------------------------===//
575   // State pruning.
576   //===------------------------------------------------------------------===//
577 
578   /// removeDeadBindings - Scans the RegionStore of 'state' for dead values.
579   ///  It returns a new Store with these values removed.
580   StoreRef removeDeadBindings(Store store, const StackFrameContext *LCtx,
581                               SymbolReaper& SymReaper) override;
582 
583   //===------------------------------------------------------------------===//
584   // Region "extents".
585   //===------------------------------------------------------------------===//
586 
587   // FIXME: This method will soon be eliminated; see the note in Store.h.
588   DefinedOrUnknownSVal getSizeInElements(ProgramStateRef state,
589                                          const MemRegion* R,
590                                          QualType EleTy) override;
591 
592   //===------------------------------------------------------------------===//
593   // Utility methods.
594   //===------------------------------------------------------------------===//
595 
596   RegionBindingsRef getRegionBindings(Store store) const {
597     return RegionBindingsRef(CBFactory,
598                              static_cast<const RegionBindings::TreeTy*>(store),
599                              RBFactory.getTreeFactory());
600   }
601 
602   void print(Store store, raw_ostream &Out, const char* nl,
603              const char *sep) override;
604 
605   void iterBindings(Store store, BindingsHandler& f) override {
606     RegionBindingsRef B = getRegionBindings(store);
607     for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) {
608       const ClusterBindings &Cluster = I.getData();
609       for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end();
610            CI != CE; ++CI) {
611         const BindingKey &K = CI.getKey();
612         if (!K.isDirect())
613           continue;
614         if (const SubRegion *R = dyn_cast<SubRegion>(K.getRegion())) {
615           // FIXME: Possibly incorporate the offset?
616           if (!f.HandleBinding(*this, store, R, CI.getData()))
617             return;
618         }
619       }
620     }
621   }
622 };
623 
624 } // end anonymous namespace
625 
626 //===----------------------------------------------------------------------===//
627 // RegionStore creation.
628 //===----------------------------------------------------------------------===//
629 
630 std::unique_ptr<StoreManager>
631 ento::CreateRegionStoreManager(ProgramStateManager &StMgr) {
632   RegionStoreFeatures F = maximal_features_tag();
633   return llvm::make_unique<RegionStoreManager>(StMgr, F);
634 }
635 
636 std::unique_ptr<StoreManager>
637 ento::CreateFieldsOnlyRegionStoreManager(ProgramStateManager &StMgr) {
638   RegionStoreFeatures F = minimal_features_tag();
639   F.enableFields(true);
640   return llvm::make_unique<RegionStoreManager>(StMgr, F);
641 }
642 
643 
644 //===----------------------------------------------------------------------===//
645 // Region Cluster analysis.
646 //===----------------------------------------------------------------------===//
647 
648 namespace {
649 /// Used to determine which global regions are automatically included in the
650 /// initial worklist of a ClusterAnalysis.
651 enum GlobalsFilterKind {
652   /// Don't include any global regions.
653   GFK_None,
654   /// Only include system globals.
655   GFK_SystemOnly,
656   /// Include all global regions.
657   GFK_All
658 };
659 
660 template <typename DERIVED>
661 class ClusterAnalysis  {
662 protected:
663   typedef llvm::DenseMap<const MemRegion *, const ClusterBindings *> ClusterMap;
664   typedef const MemRegion * WorkListElement;
665   typedef SmallVector<WorkListElement, 10> WorkList;
666 
667   llvm::SmallPtrSet<const ClusterBindings *, 16> Visited;
668 
669   WorkList WL;
670 
671   RegionStoreManager &RM;
672   ASTContext &Ctx;
673   SValBuilder &svalBuilder;
674 
675   RegionBindingsRef B;
676 
677 
678 protected:
679   const ClusterBindings *getCluster(const MemRegion *R) {
680     return B.lookup(R);
681   }
682 
683   /// Returns true if all clusters in the given memspace should be initially
684   /// included in the cluster analysis. Subclasses may provide their
685   /// own implementation.
686   bool includeEntireMemorySpace(const MemRegion *Base) {
687     return false;
688   }
689 
690 public:
691   ClusterAnalysis(RegionStoreManager &rm, ProgramStateManager &StateMgr,
692                   RegionBindingsRef b)
693       : RM(rm), Ctx(StateMgr.getContext()),
694         svalBuilder(StateMgr.getSValBuilder()), B(std::move(b)) {}
695 
696   RegionBindingsRef getRegionBindings() const { return B; }
697 
698   bool isVisited(const MemRegion *R) {
699     return Visited.count(getCluster(R));
700   }
701 
702   void GenerateClusters() {
703     // Scan the entire set of bindings and record the region clusters.
704     for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end();
705          RI != RE; ++RI){
706       const MemRegion *Base = RI.getKey();
707 
708       const ClusterBindings &Cluster = RI.getData();
709       assert(!Cluster.isEmpty() && "Empty clusters should be removed");
710       static_cast<DERIVED*>(this)->VisitAddedToCluster(Base, Cluster);
711 
712       // If the base's memspace should be entirely invalidated, add the cluster
713       // to the workspace up front.
714       if (static_cast<DERIVED*>(this)->includeEntireMemorySpace(Base))
715         AddToWorkList(WorkListElement(Base), &Cluster);
716     }
717   }
718 
719   bool AddToWorkList(WorkListElement E, const ClusterBindings *C) {
720     if (C && !Visited.insert(C).second)
721       return false;
722     WL.push_back(E);
723     return true;
724   }
725 
726   bool AddToWorkList(const MemRegion *R) {
727     return static_cast<DERIVED*>(this)->AddToWorkList(R);
728   }
729 
730   void RunWorkList() {
731     while (!WL.empty()) {
732       WorkListElement E = WL.pop_back_val();
733       const MemRegion *BaseR = E;
734 
735       static_cast<DERIVED*>(this)->VisitCluster(BaseR, getCluster(BaseR));
736     }
737   }
738 
739   void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C) {}
740   void VisitCluster(const MemRegion *baseR, const ClusterBindings *C) {}
741 
742   void VisitCluster(const MemRegion *BaseR, const ClusterBindings *C,
743                     bool Flag) {
744     static_cast<DERIVED*>(this)->VisitCluster(BaseR, C);
745   }
746 };
747 }
748 
749 //===----------------------------------------------------------------------===//
750 // Binding invalidation.
751 //===----------------------------------------------------------------------===//
752 
753 bool RegionStoreManager::scanReachableSymbols(Store S, const MemRegion *R,
754                                               ScanReachableSymbols &Callbacks) {
755   assert(R == R->getBaseRegion() && "Should only be called for base regions");
756   RegionBindingsRef B = getRegionBindings(S);
757   const ClusterBindings *Cluster = B.lookup(R);
758 
759   if (!Cluster)
760     return true;
761 
762   for (ClusterBindings::iterator RI = Cluster->begin(), RE = Cluster->end();
763        RI != RE; ++RI) {
764     if (!Callbacks.scan(RI.getData()))
765       return false;
766   }
767 
768   return true;
769 }
770 
771 static inline bool isUnionField(const FieldRegion *FR) {
772   return FR->getDecl()->getParent()->isUnion();
773 }
774 
775 typedef SmallVector<const FieldDecl *, 8> FieldVector;
776 
777 static void getSymbolicOffsetFields(BindingKey K, FieldVector &Fields) {
778   assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys");
779 
780   const MemRegion *Base = K.getConcreteOffsetRegion();
781   const MemRegion *R = K.getRegion();
782 
783   while (R != Base) {
784     if (const FieldRegion *FR = dyn_cast<FieldRegion>(R))
785       if (!isUnionField(FR))
786         Fields.push_back(FR->getDecl());
787 
788     R = cast<SubRegion>(R)->getSuperRegion();
789   }
790 }
791 
792 static bool isCompatibleWithFields(BindingKey K, const FieldVector &Fields) {
793   assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys");
794 
795   if (Fields.empty())
796     return true;
797 
798   FieldVector FieldsInBindingKey;
799   getSymbolicOffsetFields(K, FieldsInBindingKey);
800 
801   ptrdiff_t Delta = FieldsInBindingKey.size() - Fields.size();
802   if (Delta >= 0)
803     return std::equal(FieldsInBindingKey.begin() + Delta,
804                       FieldsInBindingKey.end(),
805                       Fields.begin());
806   else
807     return std::equal(FieldsInBindingKey.begin(), FieldsInBindingKey.end(),
808                       Fields.begin() - Delta);
809 }
810 
811 /// Collects all bindings in \p Cluster that may refer to bindings within
812 /// \p Top.
813 ///
814 /// Each binding is a pair whose \c first is the key (a BindingKey) and whose
815 /// \c second is the value (an SVal).
816 ///
817 /// The \p IncludeAllDefaultBindings parameter specifies whether to include
818 /// default bindings that may extend beyond \p Top itself, e.g. if \p Top is
819 /// an aggregate within a larger aggregate with a default binding.
820 static void
821 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings,
822                          SValBuilder &SVB, const ClusterBindings &Cluster,
823                          const SubRegion *Top, BindingKey TopKey,
824                          bool IncludeAllDefaultBindings) {
825   FieldVector FieldsInSymbolicSubregions;
826   if (TopKey.hasSymbolicOffset()) {
827     getSymbolicOffsetFields(TopKey, FieldsInSymbolicSubregions);
828     Top = TopKey.getConcreteOffsetRegion();
829     TopKey = BindingKey::Make(Top, BindingKey::Default);
830   }
831 
832   // Find the length (in bits) of the region being invalidated.
833   uint64_t Length = UINT64_MAX;
834   SVal Extent = Top->getExtent(SVB);
835   if (Optional<nonloc::ConcreteInt> ExtentCI =
836           Extent.getAs<nonloc::ConcreteInt>()) {
837     const llvm::APSInt &ExtentInt = ExtentCI->getValue();
838     assert(ExtentInt.isNonNegative() || ExtentInt.isUnsigned());
839     // Extents are in bytes but region offsets are in bits. Be careful!
840     Length = ExtentInt.getLimitedValue() * SVB.getContext().getCharWidth();
841   } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(Top)) {
842     if (FR->getDecl()->isBitField())
843       Length = FR->getDecl()->getBitWidthValue(SVB.getContext());
844   }
845 
846   for (ClusterBindings::iterator I = Cluster.begin(), E = Cluster.end();
847        I != E; ++I) {
848     BindingKey NextKey = I.getKey();
849     if (NextKey.getRegion() == TopKey.getRegion()) {
850       // FIXME: This doesn't catch the case where we're really invalidating a
851       // region with a symbolic offset. Example:
852       //      R: points[i].y
853       //   Next: points[0].x
854 
855       if (NextKey.getOffset() > TopKey.getOffset() &&
856           NextKey.getOffset() - TopKey.getOffset() < Length) {
857         // Case 1: The next binding is inside the region we're invalidating.
858         // Include it.
859         Bindings.push_back(*I);
860 
861       } else if (NextKey.getOffset() == TopKey.getOffset()) {
862         // Case 2: The next binding is at the same offset as the region we're
863         // invalidating. In this case, we need to leave default bindings alone,
864         // since they may be providing a default value for a regions beyond what
865         // we're invalidating.
866         // FIXME: This is probably incorrect; consider invalidating an outer
867         // struct whose first field is bound to a LazyCompoundVal.
868         if (IncludeAllDefaultBindings || NextKey.isDirect())
869           Bindings.push_back(*I);
870       }
871 
872     } else if (NextKey.hasSymbolicOffset()) {
873       const MemRegion *Base = NextKey.getConcreteOffsetRegion();
874       if (Top->isSubRegionOf(Base) && Top != Base) {
875         // Case 3: The next key is symbolic and we just changed something within
876         // its concrete region. We don't know if the binding is still valid, so
877         // we'll be conservative and include it.
878         if (IncludeAllDefaultBindings || NextKey.isDirect())
879           if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions))
880             Bindings.push_back(*I);
881       } else if (const SubRegion *BaseSR = dyn_cast<SubRegion>(Base)) {
882         // Case 4: The next key is symbolic, but we changed a known
883         // super-region. In this case the binding is certainly included.
884         if (BaseSR->isSubRegionOf(Top))
885           if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions))
886             Bindings.push_back(*I);
887       }
888     }
889   }
890 }
891 
892 static void
893 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings,
894                          SValBuilder &SVB, const ClusterBindings &Cluster,
895                          const SubRegion *Top, bool IncludeAllDefaultBindings) {
896   collectSubRegionBindings(Bindings, SVB, Cluster, Top,
897                            BindingKey::Make(Top, BindingKey::Default),
898                            IncludeAllDefaultBindings);
899 }
900 
901 RegionBindingsRef
902 RegionStoreManager::removeSubRegionBindings(RegionBindingsConstRef B,
903                                             const SubRegion *Top) {
904   BindingKey TopKey = BindingKey::Make(Top, BindingKey::Default);
905   const MemRegion *ClusterHead = TopKey.getBaseRegion();
906 
907   if (Top == ClusterHead) {
908     // We can remove an entire cluster's bindings all in one go.
909     return B.remove(Top);
910   }
911 
912   const ClusterBindings *Cluster = B.lookup(ClusterHead);
913   if (!Cluster) {
914     // If we're invalidating a region with a symbolic offset, we need to make
915     // sure we don't treat the base region as uninitialized anymore.
916     if (TopKey.hasSymbolicOffset()) {
917       const SubRegion *Concrete = TopKey.getConcreteOffsetRegion();
918       return B.addBinding(Concrete, BindingKey::Default, UnknownVal());
919     }
920     return B;
921   }
922 
923   SmallVector<BindingPair, 32> Bindings;
924   collectSubRegionBindings(Bindings, svalBuilder, *Cluster, Top, TopKey,
925                            /*IncludeAllDefaultBindings=*/false);
926 
927   ClusterBindingsRef Result(*Cluster, CBFactory);
928   for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(),
929                                                     E = Bindings.end();
930        I != E; ++I)
931     Result = Result.remove(I->first);
932 
933   // If we're invalidating a region with a symbolic offset, we need to make sure
934   // we don't treat the base region as uninitialized anymore.
935   // FIXME: This isn't very precise; see the example in
936   // collectSubRegionBindings.
937   if (TopKey.hasSymbolicOffset()) {
938     const SubRegion *Concrete = TopKey.getConcreteOffsetRegion();
939     Result = Result.add(BindingKey::Make(Concrete, BindingKey::Default),
940                         UnknownVal());
941   }
942 
943   if (Result.isEmpty())
944     return B.remove(ClusterHead);
945   return B.add(ClusterHead, Result.asImmutableMap());
946 }
947 
948 namespace {
949 class invalidateRegionsWorker : public ClusterAnalysis<invalidateRegionsWorker>
950 {
951   const Expr *Ex;
952   unsigned Count;
953   const LocationContext *LCtx;
954   InvalidatedSymbols &IS;
955   RegionAndSymbolInvalidationTraits &ITraits;
956   StoreManager::InvalidatedRegions *Regions;
957   GlobalsFilterKind GlobalsFilter;
958 public:
959   invalidateRegionsWorker(RegionStoreManager &rm,
960                           ProgramStateManager &stateMgr,
961                           RegionBindingsRef b,
962                           const Expr *ex, unsigned count,
963                           const LocationContext *lctx,
964                           InvalidatedSymbols &is,
965                           RegionAndSymbolInvalidationTraits &ITraitsIn,
966                           StoreManager::InvalidatedRegions *r,
967                           GlobalsFilterKind GFK)
968      : ClusterAnalysis<invalidateRegionsWorker>(rm, stateMgr, b),
969        Ex(ex), Count(count), LCtx(lctx), IS(is), ITraits(ITraitsIn), Regions(r),
970        GlobalsFilter(GFK) {}
971 
972   void VisitCluster(const MemRegion *baseR, const ClusterBindings *C);
973   void VisitBinding(SVal V);
974 
975   using ClusterAnalysis::AddToWorkList;
976 
977   bool AddToWorkList(const MemRegion *R);
978 
979   /// Returns true if all clusters in the memory space for \p Base should be
980   /// be invalidated.
981   bool includeEntireMemorySpace(const MemRegion *Base);
982 
983   /// Returns true if the memory space of the given region is one of the global
984   /// regions specially included at the start of invalidation.
985   bool isInitiallyIncludedGlobalRegion(const MemRegion *R);
986 };
987 }
988 
989 bool invalidateRegionsWorker::AddToWorkList(const MemRegion *R) {
990   bool doNotInvalidateSuperRegion = ITraits.hasTrait(
991       R, RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion);
992   const MemRegion *BaseR = doNotInvalidateSuperRegion ? R : R->getBaseRegion();
993   return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR));
994 }
995 
996 void invalidateRegionsWorker::VisitBinding(SVal V) {
997   // A symbol?  Mark it touched by the invalidation.
998   if (SymbolRef Sym = V.getAsSymbol())
999     IS.insert(Sym);
1000 
1001   if (const MemRegion *R = V.getAsRegion()) {
1002     AddToWorkList(R);
1003     return;
1004   }
1005 
1006   // Is it a LazyCompoundVal?  All references get invalidated as well.
1007   if (Optional<nonloc::LazyCompoundVal> LCS =
1008           V.getAs<nonloc::LazyCompoundVal>()) {
1009 
1010     const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS);
1011 
1012     for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(),
1013                                                         E = Vals.end();
1014          I != E; ++I)
1015       VisitBinding(*I);
1016 
1017     return;
1018   }
1019 }
1020 
1021 void invalidateRegionsWorker::VisitCluster(const MemRegion *baseR,
1022                                            const ClusterBindings *C) {
1023 
1024   bool PreserveRegionsContents =
1025       ITraits.hasTrait(baseR,
1026                        RegionAndSymbolInvalidationTraits::TK_PreserveContents);
1027 
1028   if (C) {
1029     for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I)
1030       VisitBinding(I.getData());
1031 
1032     // Invalidate regions contents.
1033     if (!PreserveRegionsContents)
1034       B = B.remove(baseR);
1035   }
1036 
1037   if (const auto *TO = dyn_cast<TypedValueRegion>(baseR)) {
1038     if (const auto *RD = TO->getValueType()->getAsCXXRecordDecl()) {
1039 
1040       // Lambdas can affect all static local variables without explicitly
1041       // capturing those.
1042       // We invalidate all static locals referenced inside the lambda body.
1043       if (RD->isLambda() && RD->getLambdaCallOperator()->getBody()) {
1044         using namespace ast_matchers;
1045 
1046         const char *DeclBind = "DeclBind";
1047         StatementMatcher RefToStatic = stmt(hasDescendant(declRefExpr(
1048               to(varDecl(hasStaticStorageDuration()).bind(DeclBind)))));
1049         auto Matches =
1050             match(RefToStatic, *RD->getLambdaCallOperator()->getBody(),
1051                   RD->getASTContext());
1052 
1053         for (BoundNodes &Match : Matches) {
1054           auto *VD = Match.getNodeAs<VarDecl>(DeclBind);
1055           const VarRegion *ToInvalidate =
1056               RM.getRegionManager().getVarRegion(VD, LCtx);
1057           AddToWorkList(ToInvalidate);
1058         }
1059       }
1060     }
1061   }
1062 
1063   // BlockDataRegion?  If so, invalidate captured variables that are passed
1064   // by reference.
1065   if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(baseR)) {
1066     for (BlockDataRegion::referenced_vars_iterator
1067          BI = BR->referenced_vars_begin(), BE = BR->referenced_vars_end() ;
1068          BI != BE; ++BI) {
1069       const VarRegion *VR = BI.getCapturedRegion();
1070       const VarDecl *VD = VR->getDecl();
1071       if (VD->hasAttr<BlocksAttr>() || !VD->hasLocalStorage()) {
1072         AddToWorkList(VR);
1073       }
1074       else if (Loc::isLocType(VR->getValueType())) {
1075         // Map the current bindings to a Store to retrieve the value
1076         // of the binding.  If that binding itself is a region, we should
1077         // invalidate that region.  This is because a block may capture
1078         // a pointer value, but the thing pointed by that pointer may
1079         // get invalidated.
1080         SVal V = RM.getBinding(B, loc::MemRegionVal(VR));
1081         if (Optional<Loc> L = V.getAs<Loc>()) {
1082           if (const MemRegion *LR = L->getAsRegion())
1083             AddToWorkList(LR);
1084         }
1085       }
1086     }
1087     return;
1088   }
1089 
1090   // Symbolic region?
1091   if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR))
1092     IS.insert(SR->getSymbol());
1093 
1094   // Nothing else should be done in the case when we preserve regions context.
1095   if (PreserveRegionsContents)
1096     return;
1097 
1098   // Otherwise, we have a normal data region. Record that we touched the region.
1099   if (Regions)
1100     Regions->push_back(baseR);
1101 
1102   if (isa<AllocaRegion>(baseR) || isa<SymbolicRegion>(baseR)) {
1103     // Invalidate the region by setting its default value to
1104     // conjured symbol. The type of the symbol is irrelevant.
1105     DefinedOrUnknownSVal V =
1106       svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, Ctx.IntTy, Count);
1107     B = B.addBinding(baseR, BindingKey::Default, V);
1108     return;
1109   }
1110 
1111   if (!baseR->isBoundable())
1112     return;
1113 
1114   const TypedValueRegion *TR = cast<TypedValueRegion>(baseR);
1115   QualType T = TR->getValueType();
1116 
1117   if (isInitiallyIncludedGlobalRegion(baseR)) {
1118     // If the region is a global and we are invalidating all globals,
1119     // erasing the entry is good enough.  This causes all globals to be lazily
1120     // symbolicated from the same base symbol.
1121     return;
1122   }
1123 
1124   if (T->isRecordType()) {
1125     // Invalidate the region by setting its default value to
1126     // conjured symbol. The type of the symbol is irrelevant.
1127     DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx,
1128                                                           Ctx.IntTy, Count);
1129     B = B.addBinding(baseR, BindingKey::Default, V);
1130     return;
1131   }
1132 
1133   if (const ArrayType *AT = Ctx.getAsArrayType(T)) {
1134     bool doNotInvalidateSuperRegion = ITraits.hasTrait(
1135         baseR,
1136         RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion);
1137 
1138     if (doNotInvalidateSuperRegion) {
1139       // We are not doing blank invalidation of the whole array region so we
1140       // have to manually invalidate each elements.
1141       Optional<uint64_t> NumElements;
1142 
1143       // Compute lower and upper offsets for region within array.
1144       if (const ConstantArrayType *CAT = dyn_cast<ConstantArrayType>(AT))
1145         NumElements = CAT->getSize().getZExtValue();
1146       if (!NumElements) // We are not dealing with a constant size array
1147         goto conjure_default;
1148       QualType ElementTy = AT->getElementType();
1149       uint64_t ElemSize = Ctx.getTypeSize(ElementTy);
1150       const RegionOffset &RO = baseR->getAsOffset();
1151       const MemRegion *SuperR = baseR->getBaseRegion();
1152       if (RO.hasSymbolicOffset()) {
1153         // If base region has a symbolic offset,
1154         // we revert to invalidating the super region.
1155         if (SuperR)
1156           AddToWorkList(SuperR);
1157         goto conjure_default;
1158       }
1159 
1160       uint64_t LowerOffset = RO.getOffset();
1161       uint64_t UpperOffset = LowerOffset + *NumElements * ElemSize;
1162       bool UpperOverflow = UpperOffset < LowerOffset;
1163 
1164       // Invalidate regions which are within array boundaries,
1165       // or have a symbolic offset.
1166       if (!SuperR)
1167         goto conjure_default;
1168 
1169       const ClusterBindings *C = B.lookup(SuperR);
1170       if (!C)
1171         goto conjure_default;
1172 
1173       for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E;
1174            ++I) {
1175         const BindingKey &BK = I.getKey();
1176         Optional<uint64_t> ROffset =
1177             BK.hasSymbolicOffset() ? Optional<uint64_t>() : BK.getOffset();
1178 
1179         // Check offset is not symbolic and within array's boundaries.
1180         // Handles arrays of 0 elements and of 0-sized elements as well.
1181         if (!ROffset ||
1182             ((*ROffset >= LowerOffset && *ROffset < UpperOffset) ||
1183              (UpperOverflow &&
1184               (*ROffset >= LowerOffset || *ROffset < UpperOffset)) ||
1185              (LowerOffset == UpperOffset && *ROffset == LowerOffset))) {
1186           B = B.removeBinding(I.getKey());
1187           // Bound symbolic regions need to be invalidated for dead symbol
1188           // detection.
1189           SVal V = I.getData();
1190           const MemRegion *R = V.getAsRegion();
1191           if (R && isa<SymbolicRegion>(R))
1192             VisitBinding(V);
1193         }
1194       }
1195     }
1196   conjure_default:
1197       // Set the default value of the array to conjured symbol.
1198     DefinedOrUnknownSVal V =
1199     svalBuilder.conjureSymbolVal(baseR, Ex, LCtx,
1200                                      AT->getElementType(), Count);
1201     B = B.addBinding(baseR, BindingKey::Default, V);
1202     return;
1203   }
1204 
1205   DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx,
1206                                                         T,Count);
1207   assert(SymbolManager::canSymbolicate(T) || V.isUnknown());
1208   B = B.addBinding(baseR, BindingKey::Direct, V);
1209 }
1210 
1211 bool invalidateRegionsWorker::isInitiallyIncludedGlobalRegion(
1212     const MemRegion *R) {
1213   switch (GlobalsFilter) {
1214   case GFK_None:
1215     return false;
1216   case GFK_SystemOnly:
1217     return isa<GlobalSystemSpaceRegion>(R->getMemorySpace());
1218   case GFK_All:
1219     return isa<NonStaticGlobalSpaceRegion>(R->getMemorySpace());
1220   }
1221 
1222   llvm_unreachable("unknown globals filter");
1223 }
1224 
1225 bool invalidateRegionsWorker::includeEntireMemorySpace(const MemRegion *Base) {
1226   if (isInitiallyIncludedGlobalRegion(Base))
1227     return true;
1228 
1229   const MemSpaceRegion *MemSpace = Base->getMemorySpace();
1230   return ITraits.hasTrait(MemSpace,
1231                           RegionAndSymbolInvalidationTraits::TK_EntireMemSpace);
1232 }
1233 
1234 RegionBindingsRef
1235 RegionStoreManager::invalidateGlobalRegion(MemRegion::Kind K,
1236                                            const Expr *Ex,
1237                                            unsigned Count,
1238                                            const LocationContext *LCtx,
1239                                            RegionBindingsRef B,
1240                                            InvalidatedRegions *Invalidated) {
1241   // Bind the globals memory space to a new symbol that we will use to derive
1242   // the bindings for all globals.
1243   const GlobalsSpaceRegion *GS = MRMgr.getGlobalsRegion(K);
1244   SVal V = svalBuilder.conjureSymbolVal(/* SymbolTag = */ (const void*) GS, Ex, LCtx,
1245                                         /* type does not matter */ Ctx.IntTy,
1246                                         Count);
1247 
1248   B = B.removeBinding(GS)
1249        .addBinding(BindingKey::Make(GS, BindingKey::Default), V);
1250 
1251   // Even if there are no bindings in the global scope, we still need to
1252   // record that we touched it.
1253   if (Invalidated)
1254     Invalidated->push_back(GS);
1255 
1256   return B;
1257 }
1258 
1259 void RegionStoreManager::populateWorkList(invalidateRegionsWorker &W,
1260                                           ArrayRef<SVal> Values,
1261                                           InvalidatedRegions *TopLevelRegions) {
1262   for (ArrayRef<SVal>::iterator I = Values.begin(),
1263                                 E = Values.end(); I != E; ++I) {
1264     SVal V = *I;
1265     if (Optional<nonloc::LazyCompoundVal> LCS =
1266         V.getAs<nonloc::LazyCompoundVal>()) {
1267 
1268       const SValListTy &Vals = getInterestingValues(*LCS);
1269 
1270       for (SValListTy::const_iterator I = Vals.begin(),
1271                                       E = Vals.end(); I != E; ++I) {
1272         // Note: the last argument is false here because these are
1273         // non-top-level regions.
1274         if (const MemRegion *R = (*I).getAsRegion())
1275           W.AddToWorkList(R);
1276       }
1277       continue;
1278     }
1279 
1280     if (const MemRegion *R = V.getAsRegion()) {
1281       if (TopLevelRegions)
1282         TopLevelRegions->push_back(R);
1283       W.AddToWorkList(R);
1284       continue;
1285     }
1286   }
1287 }
1288 
1289 StoreRef
1290 RegionStoreManager::invalidateRegions(Store store,
1291                                      ArrayRef<SVal> Values,
1292                                      const Expr *Ex, unsigned Count,
1293                                      const LocationContext *LCtx,
1294                                      const CallEvent *Call,
1295                                      InvalidatedSymbols &IS,
1296                                      RegionAndSymbolInvalidationTraits &ITraits,
1297                                      InvalidatedRegions *TopLevelRegions,
1298                                      InvalidatedRegions *Invalidated) {
1299   GlobalsFilterKind GlobalsFilter;
1300   if (Call) {
1301     if (Call->isInSystemHeader())
1302       GlobalsFilter = GFK_SystemOnly;
1303     else
1304       GlobalsFilter = GFK_All;
1305   } else {
1306     GlobalsFilter = GFK_None;
1307   }
1308 
1309   RegionBindingsRef B = getRegionBindings(store);
1310   invalidateRegionsWorker W(*this, StateMgr, B, Ex, Count, LCtx, IS, ITraits,
1311                             Invalidated, GlobalsFilter);
1312 
1313   // Scan the bindings and generate the clusters.
1314   W.GenerateClusters();
1315 
1316   // Add the regions to the worklist.
1317   populateWorkList(W, Values, TopLevelRegions);
1318 
1319   W.RunWorkList();
1320 
1321   // Return the new bindings.
1322   B = W.getRegionBindings();
1323 
1324   // For calls, determine which global regions should be invalidated and
1325   // invalidate them. (Note that function-static and immutable globals are never
1326   // invalidated by this.)
1327   // TODO: This could possibly be more precise with modules.
1328   switch (GlobalsFilter) {
1329   case GFK_All:
1330     B = invalidateGlobalRegion(MemRegion::GlobalInternalSpaceRegionKind,
1331                                Ex, Count, LCtx, B, Invalidated);
1332     // FALLTHROUGH
1333   case GFK_SystemOnly:
1334     B = invalidateGlobalRegion(MemRegion::GlobalSystemSpaceRegionKind,
1335                                Ex, Count, LCtx, B, Invalidated);
1336     // FALLTHROUGH
1337   case GFK_None:
1338     break;
1339   }
1340 
1341   return StoreRef(B.asStore(), *this);
1342 }
1343 
1344 //===----------------------------------------------------------------------===//
1345 // Extents for regions.
1346 //===----------------------------------------------------------------------===//
1347 
1348 DefinedOrUnknownSVal
1349 RegionStoreManager::getSizeInElements(ProgramStateRef state,
1350                                       const MemRegion *R,
1351                                       QualType EleTy) {
1352   SVal Size = cast<SubRegion>(R)->getExtent(svalBuilder);
1353   const llvm::APSInt *SizeInt = svalBuilder.getKnownValue(state, Size);
1354   if (!SizeInt)
1355     return UnknownVal();
1356 
1357   CharUnits RegionSize = CharUnits::fromQuantity(SizeInt->getSExtValue());
1358 
1359   if (Ctx.getAsVariableArrayType(EleTy)) {
1360     // FIXME: We need to track extra state to properly record the size
1361     // of VLAs.  Returning UnknownVal here, however, is a stop-gap so that
1362     // we don't have a divide-by-zero below.
1363     return UnknownVal();
1364   }
1365 
1366   CharUnits EleSize = Ctx.getTypeSizeInChars(EleTy);
1367 
1368   // If a variable is reinterpreted as a type that doesn't fit into a larger
1369   // type evenly, round it down.
1370   // This is a signed value, since it's used in arithmetic with signed indices.
1371   return svalBuilder.makeIntVal(RegionSize / EleSize,
1372                                 svalBuilder.getArrayIndexType());
1373 }
1374 
1375 //===----------------------------------------------------------------------===//
1376 // Location and region casting.
1377 //===----------------------------------------------------------------------===//
1378 
1379 /// ArrayToPointer - Emulates the "decay" of an array to a pointer
1380 ///  type.  'Array' represents the lvalue of the array being decayed
1381 ///  to a pointer, and the returned SVal represents the decayed
1382 ///  version of that lvalue (i.e., a pointer to the first element of
1383 ///  the array).  This is called by ExprEngine when evaluating casts
1384 ///  from arrays to pointers.
1385 SVal RegionStoreManager::ArrayToPointer(Loc Array, QualType T) {
1386   if (Array.getAs<loc::ConcreteInt>())
1387     return Array;
1388 
1389   if (!Array.getAs<loc::MemRegionVal>())
1390     return UnknownVal();
1391 
1392   const SubRegion *R =
1393       cast<SubRegion>(Array.castAs<loc::MemRegionVal>().getRegion());
1394   NonLoc ZeroIdx = svalBuilder.makeZeroArrayIndex();
1395   return loc::MemRegionVal(MRMgr.getElementRegion(T, ZeroIdx, R, Ctx));
1396 }
1397 
1398 //===----------------------------------------------------------------------===//
1399 // Loading values from regions.
1400 //===----------------------------------------------------------------------===//
1401 
1402 SVal RegionStoreManager::getBinding(RegionBindingsConstRef B, Loc L, QualType T) {
1403   assert(!L.getAs<UnknownVal>() && "location unknown");
1404   assert(!L.getAs<UndefinedVal>() && "location undefined");
1405 
1406   // For access to concrete addresses, return UnknownVal.  Checks
1407   // for null dereferences (and similar errors) are done by checkers, not
1408   // the Store.
1409   // FIXME: We can consider lazily symbolicating such memory, but we really
1410   // should defer this when we can reason easily about symbolicating arrays
1411   // of bytes.
1412   if (L.getAs<loc::ConcreteInt>()) {
1413     return UnknownVal();
1414   }
1415   if (!L.getAs<loc::MemRegionVal>()) {
1416     return UnknownVal();
1417   }
1418 
1419   const MemRegion *MR = L.castAs<loc::MemRegionVal>().getRegion();
1420 
1421   if (isa<BlockDataRegion>(MR)) {
1422     return UnknownVal();
1423   }
1424 
1425   if (!isa<TypedValueRegion>(MR)) {
1426     if (T.isNull()) {
1427       if (const TypedRegion *TR = dyn_cast<TypedRegion>(MR))
1428         T = TR->getLocationType()->getPointeeType();
1429       else if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(MR))
1430         T = SR->getSymbol()->getType()->getPointeeType();
1431     }
1432     assert(!T.isNull() && "Unable to auto-detect binding type!");
1433     assert(!T->isVoidType() && "Attempting to dereference a void pointer!");
1434     MR = GetElementZeroRegion(cast<SubRegion>(MR), T);
1435   } else {
1436     T = cast<TypedValueRegion>(MR)->getValueType();
1437   }
1438 
1439   // FIXME: Perhaps this method should just take a 'const MemRegion*' argument
1440   //  instead of 'Loc', and have the other Loc cases handled at a higher level.
1441   const TypedValueRegion *R = cast<TypedValueRegion>(MR);
1442   QualType RTy = R->getValueType();
1443 
1444   // FIXME: we do not yet model the parts of a complex type, so treat the
1445   // whole thing as "unknown".
1446   if (RTy->isAnyComplexType())
1447     return UnknownVal();
1448 
1449   // FIXME: We should eventually handle funny addressing.  e.g.:
1450   //
1451   //   int x = ...;
1452   //   int *p = &x;
1453   //   char *q = (char*) p;
1454   //   char c = *q;  // returns the first byte of 'x'.
1455   //
1456   // Such funny addressing will occur due to layering of regions.
1457   if (RTy->isStructureOrClassType())
1458     return getBindingForStruct(B, R);
1459 
1460   // FIXME: Handle unions.
1461   if (RTy->isUnionType())
1462     return createLazyBinding(B, R);
1463 
1464   if (RTy->isArrayType()) {
1465     if (RTy->isConstantArrayType())
1466       return getBindingForArray(B, R);
1467     else
1468       return UnknownVal();
1469   }
1470 
1471   // FIXME: handle Vector types.
1472   if (RTy->isVectorType())
1473     return UnknownVal();
1474 
1475   if (const FieldRegion* FR = dyn_cast<FieldRegion>(R))
1476     return CastRetrievedVal(getBindingForField(B, FR), FR, T);
1477 
1478   if (const ElementRegion* ER = dyn_cast<ElementRegion>(R)) {
1479     // FIXME: Here we actually perform an implicit conversion from the loaded
1480     // value to the element type.  Eventually we want to compose these values
1481     // more intelligently.  For example, an 'element' can encompass multiple
1482     // bound regions (e.g., several bound bytes), or could be a subset of
1483     // a larger value.
1484     return CastRetrievedVal(getBindingForElement(B, ER), ER, T);
1485   }
1486 
1487   if (const ObjCIvarRegion *IVR = dyn_cast<ObjCIvarRegion>(R)) {
1488     // FIXME: Here we actually perform an implicit conversion from the loaded
1489     // value to the ivar type.  What we should model is stores to ivars
1490     // that blow past the extent of the ivar.  If the address of the ivar is
1491     // reinterpretted, it is possible we stored a different value that could
1492     // fit within the ivar.  Either we need to cast these when storing them
1493     // or reinterpret them lazily (as we do here).
1494     return CastRetrievedVal(getBindingForObjCIvar(B, IVR), IVR, T);
1495   }
1496 
1497   if (const VarRegion *VR = dyn_cast<VarRegion>(R)) {
1498     // FIXME: Here we actually perform an implicit conversion from the loaded
1499     // value to the variable type.  What we should model is stores to variables
1500     // that blow past the extent of the variable.  If the address of the
1501     // variable is reinterpretted, it is possible we stored a different value
1502     // that could fit within the variable.  Either we need to cast these when
1503     // storing them or reinterpret them lazily (as we do here).
1504     return CastRetrievedVal(getBindingForVar(B, VR), VR, T);
1505   }
1506 
1507   const SVal *V = B.lookup(R, BindingKey::Direct);
1508 
1509   // Check if the region has a binding.
1510   if (V)
1511     return *V;
1512 
1513   // The location does not have a bound value.  This means that it has
1514   // the value it had upon its creation and/or entry to the analyzed
1515   // function/method.  These are either symbolic values or 'undefined'.
1516   if (R->hasStackNonParametersStorage()) {
1517     // All stack variables are considered to have undefined values
1518     // upon creation.  All heap allocated blocks are considered to
1519     // have undefined values as well unless they are explicitly bound
1520     // to specific values.
1521     return UndefinedVal();
1522   }
1523 
1524   // All other values are symbolic.
1525   return svalBuilder.getRegionValueSymbolVal(R);
1526 }
1527 
1528 static QualType getUnderlyingType(const SubRegion *R) {
1529   QualType RegionTy;
1530   if (const TypedValueRegion *TVR = dyn_cast<TypedValueRegion>(R))
1531     RegionTy = TVR->getValueType();
1532 
1533   if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R))
1534     RegionTy = SR->getSymbol()->getType();
1535 
1536   return RegionTy;
1537 }
1538 
1539 /// Checks to see if store \p B has a lazy binding for region \p R.
1540 ///
1541 /// If \p AllowSubregionBindings is \c false, a lazy binding will be rejected
1542 /// if there are additional bindings within \p R.
1543 ///
1544 /// Note that unlike RegionStoreManager::findLazyBinding, this will not search
1545 /// for lazy bindings for super-regions of \p R.
1546 static Optional<nonloc::LazyCompoundVal>
1547 getExistingLazyBinding(SValBuilder &SVB, RegionBindingsConstRef B,
1548                        const SubRegion *R, bool AllowSubregionBindings) {
1549   Optional<SVal> V = B.getDefaultBinding(R);
1550   if (!V)
1551     return None;
1552 
1553   Optional<nonloc::LazyCompoundVal> LCV = V->getAs<nonloc::LazyCompoundVal>();
1554   if (!LCV)
1555     return None;
1556 
1557   // If the LCV is for a subregion, the types might not match, and we shouldn't
1558   // reuse the binding.
1559   QualType RegionTy = getUnderlyingType(R);
1560   if (!RegionTy.isNull() &&
1561       !RegionTy->isVoidPointerType()) {
1562     QualType SourceRegionTy = LCV->getRegion()->getValueType();
1563     if (!SVB.getContext().hasSameUnqualifiedType(RegionTy, SourceRegionTy))
1564       return None;
1565   }
1566 
1567   if (!AllowSubregionBindings) {
1568     // If there are any other bindings within this region, we shouldn't reuse
1569     // the top-level binding.
1570     SmallVector<BindingPair, 16> Bindings;
1571     collectSubRegionBindings(Bindings, SVB, *B.lookup(R->getBaseRegion()), R,
1572                              /*IncludeAllDefaultBindings=*/true);
1573     if (Bindings.size() > 1)
1574       return None;
1575   }
1576 
1577   return *LCV;
1578 }
1579 
1580 
1581 std::pair<Store, const SubRegion *>
1582 RegionStoreManager::findLazyBinding(RegionBindingsConstRef B,
1583                                    const SubRegion *R,
1584                                    const SubRegion *originalRegion) {
1585   if (originalRegion != R) {
1586     if (Optional<nonloc::LazyCompoundVal> V =
1587           getExistingLazyBinding(svalBuilder, B, R, true))
1588       return std::make_pair(V->getStore(), V->getRegion());
1589   }
1590 
1591   typedef std::pair<Store, const SubRegion *> StoreRegionPair;
1592   StoreRegionPair Result = StoreRegionPair();
1593 
1594   if (const ElementRegion *ER = dyn_cast<ElementRegion>(R)) {
1595     Result = findLazyBinding(B, cast<SubRegion>(ER->getSuperRegion()),
1596                              originalRegion);
1597 
1598     if (Result.second)
1599       Result.second = MRMgr.getElementRegionWithSuper(ER, Result.second);
1600 
1601   } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) {
1602     Result = findLazyBinding(B, cast<SubRegion>(FR->getSuperRegion()),
1603                                        originalRegion);
1604 
1605     if (Result.second)
1606       Result.second = MRMgr.getFieldRegionWithSuper(FR, Result.second);
1607 
1608   } else if (const CXXBaseObjectRegion *BaseReg =
1609                dyn_cast<CXXBaseObjectRegion>(R)) {
1610     // C++ base object region is another kind of region that we should blast
1611     // through to look for lazy compound value. It is like a field region.
1612     Result = findLazyBinding(B, cast<SubRegion>(BaseReg->getSuperRegion()),
1613                              originalRegion);
1614 
1615     if (Result.second)
1616       Result.second = MRMgr.getCXXBaseObjectRegionWithSuper(BaseReg,
1617                                                             Result.second);
1618   }
1619 
1620   return Result;
1621 }
1622 
1623 SVal RegionStoreManager::getBindingForElement(RegionBindingsConstRef B,
1624                                               const ElementRegion* R) {
1625   // We do not currently model bindings of the CompoundLiteralregion.
1626   if (isa<CompoundLiteralRegion>(R->getBaseRegion()))
1627     return UnknownVal();
1628 
1629   // Check if the region has a binding.
1630   if (const Optional<SVal> &V = B.getDirectBinding(R))
1631     return *V;
1632 
1633   const MemRegion* superR = R->getSuperRegion();
1634 
1635   // Check if the region is an element region of a string literal.
1636   if (const StringRegion *StrR = dyn_cast<StringRegion>(superR)) {
1637     // FIXME: Handle loads from strings where the literal is treated as
1638     // an integer, e.g., *((unsigned int*)"hello")
1639     QualType T = Ctx.getAsArrayType(StrR->getValueType())->getElementType();
1640     if (!Ctx.hasSameUnqualifiedType(T, R->getElementType()))
1641       return UnknownVal();
1642 
1643     const StringLiteral *Str = StrR->getStringLiteral();
1644     SVal Idx = R->getIndex();
1645     if (Optional<nonloc::ConcreteInt> CI = Idx.getAs<nonloc::ConcreteInt>()) {
1646       int64_t i = CI->getValue().getSExtValue();
1647       // Abort on string underrun.  This can be possible by arbitrary
1648       // clients of getBindingForElement().
1649       if (i < 0)
1650         return UndefinedVal();
1651       int64_t length = Str->getLength();
1652       // Technically, only i == length is guaranteed to be null.
1653       // However, such overflows should be caught before reaching this point;
1654       // the only time such an access would be made is if a string literal was
1655       // used to initialize a larger array.
1656       char c = (i >= length) ? '\0' : Str->getCodeUnit(i);
1657       return svalBuilder.makeIntVal(c, T);
1658     }
1659   } else if (const VarRegion *VR = dyn_cast<VarRegion>(superR)) {
1660     // Check if the containing array is const and has an initialized value.
1661     const VarDecl *VD = VR->getDecl();
1662     // Either the array or the array element has to be const.
1663     if (VD->getType().isConstQualified() || R->getElementType().isConstQualified()) {
1664       if (const Expr *Init = VD->getInit()) {
1665         if (const auto *InitList = dyn_cast<InitListExpr>(Init)) {
1666           // The array index has to be known.
1667           if (auto CI = R->getIndex().getAs<nonloc::ConcreteInt>()) {
1668             int64_t i = CI->getValue().getSExtValue();
1669             // If it is known that the index is out of bounds, we can return
1670             // an undefined value.
1671             if (i < 0)
1672               return UndefinedVal();
1673 
1674             if (auto CAT = Ctx.getAsConstantArrayType(VD->getType()))
1675               if (CAT->getSize().sle(i))
1676                 return UndefinedVal();
1677 
1678             // If there is a list, but no init, it must be zero.
1679             if (i >= InitList->getNumInits())
1680               return svalBuilder.makeZeroVal(R->getElementType());
1681 
1682             if (const Expr *ElemInit = InitList->getInit(i))
1683               if (Optional<SVal> V = svalBuilder.getConstantVal(ElemInit))
1684                 return *V;
1685           }
1686         }
1687       }
1688     }
1689   }
1690 
1691   // Check for loads from a code text region.  For such loads, just give up.
1692   if (isa<CodeTextRegion>(superR))
1693     return UnknownVal();
1694 
1695   // Handle the case where we are indexing into a larger scalar object.
1696   // For example, this handles:
1697   //   int x = ...
1698   //   char *y = &x;
1699   //   return *y;
1700   // FIXME: This is a hack, and doesn't do anything really intelligent yet.
1701   const RegionRawOffset &O = R->getAsArrayOffset();
1702 
1703   // If we cannot reason about the offset, return an unknown value.
1704   if (!O.getRegion())
1705     return UnknownVal();
1706 
1707   if (const TypedValueRegion *baseR =
1708         dyn_cast_or_null<TypedValueRegion>(O.getRegion())) {
1709     QualType baseT = baseR->getValueType();
1710     if (baseT->isScalarType()) {
1711       QualType elemT = R->getElementType();
1712       if (elemT->isScalarType()) {
1713         if (Ctx.getTypeSizeInChars(baseT) >= Ctx.getTypeSizeInChars(elemT)) {
1714           if (const Optional<SVal> &V = B.getDirectBinding(superR)) {
1715             if (SymbolRef parentSym = V->getAsSymbol())
1716               return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R);
1717 
1718             if (V->isUnknownOrUndef())
1719               return *V;
1720             // Other cases: give up.  We are indexing into a larger object
1721             // that has some value, but we don't know how to handle that yet.
1722             return UnknownVal();
1723           }
1724         }
1725       }
1726     }
1727   }
1728   return getBindingForFieldOrElementCommon(B, R, R->getElementType());
1729 }
1730 
1731 SVal RegionStoreManager::getBindingForField(RegionBindingsConstRef B,
1732                                             const FieldRegion* R) {
1733 
1734   // Check if the region has a binding.
1735   if (const Optional<SVal> &V = B.getDirectBinding(R))
1736     return *V;
1737 
1738   // Is the field declared constant and has an in-class initializer?
1739   const FieldDecl *FD = R->getDecl();
1740   QualType Ty = FD->getType();
1741   if (Ty.isConstQualified())
1742     if (const Expr *Init = FD->getInClassInitializer())
1743       if (Optional<SVal> V = svalBuilder.getConstantVal(Init))
1744         return *V;
1745 
1746   // If the containing record was initialized, try to get its constant value.
1747   const MemRegion* superR = R->getSuperRegion();
1748   if (const auto *VR = dyn_cast<VarRegion>(superR)) {
1749     const VarDecl *VD = VR->getDecl();
1750     QualType RecordVarTy = VD->getType();
1751     unsigned Index = FD->getFieldIndex();
1752     // Either the record variable or the field has to be const qualified.
1753     if (RecordVarTy.isConstQualified() || Ty.isConstQualified())
1754       if (const Expr *Init = VD->getInit())
1755         if (const auto *InitList = dyn_cast<InitListExpr>(Init)) {
1756           if (Index < InitList->getNumInits()) {
1757             if (const Expr *FieldInit = InitList->getInit(Index))
1758               if (Optional<SVal> V = svalBuilder.getConstantVal(FieldInit))
1759                 return *V;
1760           } else {
1761             return svalBuilder.makeZeroVal(Ty);
1762           }
1763         }
1764   }
1765 
1766   return getBindingForFieldOrElementCommon(B, R, Ty);
1767 }
1768 
1769 Optional<SVal>
1770 RegionStoreManager::getBindingForDerivedDefaultValue(RegionBindingsConstRef B,
1771                                                      const MemRegion *superR,
1772                                                      const TypedValueRegion *R,
1773                                                      QualType Ty) {
1774 
1775   if (const Optional<SVal> &D = B.getDefaultBinding(superR)) {
1776     const SVal &val = D.getValue();
1777     if (SymbolRef parentSym = val.getAsSymbol())
1778       return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R);
1779 
1780     if (val.isZeroConstant())
1781       return svalBuilder.makeZeroVal(Ty);
1782 
1783     if (val.isUnknownOrUndef())
1784       return val;
1785 
1786     // Lazy bindings are usually handled through getExistingLazyBinding().
1787     // We should unify these two code paths at some point.
1788     if (val.getAs<nonloc::LazyCompoundVal>() ||
1789         val.getAs<nonloc::CompoundVal>())
1790       return val;
1791 
1792     llvm_unreachable("Unknown default value");
1793   }
1794 
1795   return None;
1796 }
1797 
1798 SVal RegionStoreManager::getLazyBinding(const SubRegion *LazyBindingRegion,
1799                                         RegionBindingsRef LazyBinding) {
1800   SVal Result;
1801   if (const ElementRegion *ER = dyn_cast<ElementRegion>(LazyBindingRegion))
1802     Result = getBindingForElement(LazyBinding, ER);
1803   else
1804     Result = getBindingForField(LazyBinding,
1805                                 cast<FieldRegion>(LazyBindingRegion));
1806 
1807   // FIXME: This is a hack to deal with RegionStore's inability to distinguish a
1808   // default value for /part/ of an aggregate from a default value for the
1809   // /entire/ aggregate. The most common case of this is when struct Outer
1810   // has as its first member a struct Inner, which is copied in from a stack
1811   // variable. In this case, even if the Outer's default value is symbolic, 0,
1812   // or unknown, it gets overridden by the Inner's default value of undefined.
1813   //
1814   // This is a general problem -- if the Inner is zero-initialized, the Outer
1815   // will now look zero-initialized. The proper way to solve this is with a
1816   // new version of RegionStore that tracks the extent of a binding as well
1817   // as the offset.
1818   //
1819   // This hack only takes care of the undefined case because that can very
1820   // quickly result in a warning.
1821   if (Result.isUndef())
1822     Result = UnknownVal();
1823 
1824   return Result;
1825 }
1826 
1827 SVal
1828 RegionStoreManager::getBindingForFieldOrElementCommon(RegionBindingsConstRef B,
1829                                                       const TypedValueRegion *R,
1830                                                       QualType Ty) {
1831 
1832   // At this point we have already checked in either getBindingForElement or
1833   // getBindingForField if 'R' has a direct binding.
1834 
1835   // Lazy binding?
1836   Store lazyBindingStore = nullptr;
1837   const SubRegion *lazyBindingRegion = nullptr;
1838   std::tie(lazyBindingStore, lazyBindingRegion) = findLazyBinding(B, R, R);
1839   if (lazyBindingRegion)
1840     return getLazyBinding(lazyBindingRegion,
1841                           getRegionBindings(lazyBindingStore));
1842 
1843   // Record whether or not we see a symbolic index.  That can completely
1844   // be out of scope of our lookup.
1845   bool hasSymbolicIndex = false;
1846 
1847   // FIXME: This is a hack to deal with RegionStore's inability to distinguish a
1848   // default value for /part/ of an aggregate from a default value for the
1849   // /entire/ aggregate. The most common case of this is when struct Outer
1850   // has as its first member a struct Inner, which is copied in from a stack
1851   // variable. In this case, even if the Outer's default value is symbolic, 0,
1852   // or unknown, it gets overridden by the Inner's default value of undefined.
1853   //
1854   // This is a general problem -- if the Inner is zero-initialized, the Outer
1855   // will now look zero-initialized. The proper way to solve this is with a
1856   // new version of RegionStore that tracks the extent of a binding as well
1857   // as the offset.
1858   //
1859   // This hack only takes care of the undefined case because that can very
1860   // quickly result in a warning.
1861   bool hasPartialLazyBinding = false;
1862 
1863   const SubRegion *SR = R;
1864   while (SR) {
1865     const MemRegion *Base = SR->getSuperRegion();
1866     if (Optional<SVal> D = getBindingForDerivedDefaultValue(B, Base, R, Ty)) {
1867       if (D->getAs<nonloc::LazyCompoundVal>()) {
1868         hasPartialLazyBinding = true;
1869         break;
1870       }
1871 
1872       return *D;
1873     }
1874 
1875     if (const ElementRegion *ER = dyn_cast<ElementRegion>(Base)) {
1876       NonLoc index = ER->getIndex();
1877       if (!index.isConstant())
1878         hasSymbolicIndex = true;
1879     }
1880 
1881     // If our super region is a field or element itself, walk up the region
1882     // hierarchy to see if there is a default value installed in an ancestor.
1883     SR = dyn_cast<SubRegion>(Base);
1884   }
1885 
1886   if (R->hasStackNonParametersStorage()) {
1887     if (isa<ElementRegion>(R)) {
1888       // Currently we don't reason specially about Clang-style vectors.  Check
1889       // if superR is a vector and if so return Unknown.
1890       if (const TypedValueRegion *typedSuperR =
1891             dyn_cast<TypedValueRegion>(R->getSuperRegion())) {
1892         if (typedSuperR->getValueType()->isVectorType())
1893           return UnknownVal();
1894       }
1895     }
1896 
1897     // FIXME: We also need to take ElementRegions with symbolic indexes into
1898     // account.  This case handles both directly accessing an ElementRegion
1899     // with a symbolic offset, but also fields within an element with
1900     // a symbolic offset.
1901     if (hasSymbolicIndex)
1902       return UnknownVal();
1903 
1904     if (!hasPartialLazyBinding)
1905       return UndefinedVal();
1906   }
1907 
1908   // All other values are symbolic.
1909   return svalBuilder.getRegionValueSymbolVal(R);
1910 }
1911 
1912 SVal RegionStoreManager::getBindingForObjCIvar(RegionBindingsConstRef B,
1913                                                const ObjCIvarRegion* R) {
1914   // Check if the region has a binding.
1915   if (const Optional<SVal> &V = B.getDirectBinding(R))
1916     return *V;
1917 
1918   const MemRegion *superR = R->getSuperRegion();
1919 
1920   // Check if the super region has a default binding.
1921   if (const Optional<SVal> &V = B.getDefaultBinding(superR)) {
1922     if (SymbolRef parentSym = V->getAsSymbol())
1923       return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R);
1924 
1925     // Other cases: give up.
1926     return UnknownVal();
1927   }
1928 
1929   return getBindingForLazySymbol(R);
1930 }
1931 
1932 SVal RegionStoreManager::getBindingForVar(RegionBindingsConstRef B,
1933                                           const VarRegion *R) {
1934 
1935   // Check if the region has a binding.
1936   if (const Optional<SVal> &V = B.getDirectBinding(R))
1937     return *V;
1938 
1939   // Lazily derive a value for the VarRegion.
1940   const VarDecl *VD = R->getDecl();
1941   const MemSpaceRegion *MS = R->getMemorySpace();
1942 
1943   // Arguments are always symbolic.
1944   if (isa<StackArgumentsSpaceRegion>(MS))
1945     return svalBuilder.getRegionValueSymbolVal(R);
1946 
1947   // Is 'VD' declared constant?  If so, retrieve the constant value.
1948   if (VD->getType().isConstQualified()) {
1949     if (const Expr *Init = VD->getInit()) {
1950       if (Optional<SVal> V = svalBuilder.getConstantVal(Init))
1951         return *V;
1952 
1953       // If the variable is const qualified and has an initializer but
1954       // we couldn't evaluate initializer to a value, treat the value as
1955       // unknown.
1956       return UnknownVal();
1957     }
1958   }
1959 
1960   // This must come after the check for constants because closure-captured
1961   // constant variables may appear in UnknownSpaceRegion.
1962   if (isa<UnknownSpaceRegion>(MS))
1963     return svalBuilder.getRegionValueSymbolVal(R);
1964 
1965   if (isa<GlobalsSpaceRegion>(MS)) {
1966     QualType T = VD->getType();
1967 
1968     // Function-scoped static variables are default-initialized to 0; if they
1969     // have an initializer, it would have been processed by now.
1970     // FIXME: This is only true when we're starting analysis from main().
1971     // We're losing a lot of coverage here.
1972     if (isa<StaticGlobalSpaceRegion>(MS))
1973       return svalBuilder.makeZeroVal(T);
1974 
1975     if (Optional<SVal> V = getBindingForDerivedDefaultValue(B, MS, R, T)) {
1976       assert(!V->getAs<nonloc::LazyCompoundVal>());
1977       return V.getValue();
1978     }
1979 
1980     return svalBuilder.getRegionValueSymbolVal(R);
1981   }
1982 
1983   return UndefinedVal();
1984 }
1985 
1986 SVal RegionStoreManager::getBindingForLazySymbol(const TypedValueRegion *R) {
1987   // All other values are symbolic.
1988   return svalBuilder.getRegionValueSymbolVal(R);
1989 }
1990 
1991 const RegionStoreManager::SValListTy &
1992 RegionStoreManager::getInterestingValues(nonloc::LazyCompoundVal LCV) {
1993   // First, check the cache.
1994   LazyBindingsMapTy::iterator I = LazyBindingsMap.find(LCV.getCVData());
1995   if (I != LazyBindingsMap.end())
1996     return I->second;
1997 
1998   // If we don't have a list of values cached, start constructing it.
1999   SValListTy List;
2000 
2001   const SubRegion *LazyR = LCV.getRegion();
2002   RegionBindingsRef B = getRegionBindings(LCV.getStore());
2003 
2004   // If this region had /no/ bindings at the time, there are no interesting
2005   // values to return.
2006   const ClusterBindings *Cluster = B.lookup(LazyR->getBaseRegion());
2007   if (!Cluster)
2008     return (LazyBindingsMap[LCV.getCVData()] = std::move(List));
2009 
2010   SmallVector<BindingPair, 32> Bindings;
2011   collectSubRegionBindings(Bindings, svalBuilder, *Cluster, LazyR,
2012                            /*IncludeAllDefaultBindings=*/true);
2013   for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(),
2014                                                     E = Bindings.end();
2015        I != E; ++I) {
2016     SVal V = I->second;
2017     if (V.isUnknownOrUndef() || V.isConstant())
2018       continue;
2019 
2020     if (Optional<nonloc::LazyCompoundVal> InnerLCV =
2021             V.getAs<nonloc::LazyCompoundVal>()) {
2022       const SValListTy &InnerList = getInterestingValues(*InnerLCV);
2023       List.insert(List.end(), InnerList.begin(), InnerList.end());
2024       continue;
2025     }
2026 
2027     List.push_back(V);
2028   }
2029 
2030   return (LazyBindingsMap[LCV.getCVData()] = std::move(List));
2031 }
2032 
2033 NonLoc RegionStoreManager::createLazyBinding(RegionBindingsConstRef B,
2034                                              const TypedValueRegion *R) {
2035   if (Optional<nonloc::LazyCompoundVal> V =
2036         getExistingLazyBinding(svalBuilder, B, R, false))
2037     return *V;
2038 
2039   return svalBuilder.makeLazyCompoundVal(StoreRef(B.asStore(), *this), R);
2040 }
2041 
2042 static bool isRecordEmpty(const RecordDecl *RD) {
2043   if (!RD->field_empty())
2044     return false;
2045   if (const CXXRecordDecl *CRD = dyn_cast<CXXRecordDecl>(RD))
2046     return CRD->getNumBases() == 0;
2047   return true;
2048 }
2049 
2050 SVal RegionStoreManager::getBindingForStruct(RegionBindingsConstRef B,
2051                                              const TypedValueRegion *R) {
2052   const RecordDecl *RD = R->getValueType()->castAs<RecordType>()->getDecl();
2053   if (!RD->getDefinition() || isRecordEmpty(RD))
2054     return UnknownVal();
2055 
2056   return createLazyBinding(B, R);
2057 }
2058 
2059 SVal RegionStoreManager::getBindingForArray(RegionBindingsConstRef B,
2060                                             const TypedValueRegion *R) {
2061   assert(Ctx.getAsConstantArrayType(R->getValueType()) &&
2062          "Only constant array types can have compound bindings.");
2063 
2064   return createLazyBinding(B, R);
2065 }
2066 
2067 bool RegionStoreManager::includedInBindings(Store store,
2068                                             const MemRegion *region) const {
2069   RegionBindingsRef B = getRegionBindings(store);
2070   region = region->getBaseRegion();
2071 
2072   // Quick path: if the base is the head of a cluster, the region is live.
2073   if (B.lookup(region))
2074     return true;
2075 
2076   // Slow path: if the region is the VALUE of any binding, it is live.
2077   for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); RI != RE; ++RI) {
2078     const ClusterBindings &Cluster = RI.getData();
2079     for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end();
2080          CI != CE; ++CI) {
2081       const SVal &D = CI.getData();
2082       if (const MemRegion *R = D.getAsRegion())
2083         if (R->getBaseRegion() == region)
2084           return true;
2085     }
2086   }
2087 
2088   return false;
2089 }
2090 
2091 //===----------------------------------------------------------------------===//
2092 // Binding values to regions.
2093 //===----------------------------------------------------------------------===//
2094 
2095 StoreRef RegionStoreManager::killBinding(Store ST, Loc L) {
2096   if (Optional<loc::MemRegionVal> LV = L.getAs<loc::MemRegionVal>())
2097     if (const MemRegion* R = LV->getRegion())
2098       return StoreRef(getRegionBindings(ST).removeBinding(R)
2099                                            .asImmutableMap()
2100                                            .getRootWithoutRetain(),
2101                       *this);
2102 
2103   return StoreRef(ST, *this);
2104 }
2105 
2106 RegionBindingsRef
2107 RegionStoreManager::bind(RegionBindingsConstRef B, Loc L, SVal V) {
2108   if (L.getAs<loc::ConcreteInt>())
2109     return B;
2110 
2111   // If we get here, the location should be a region.
2112   const MemRegion *R = L.castAs<loc::MemRegionVal>().getRegion();
2113 
2114   // Check if the region is a struct region.
2115   if (const TypedValueRegion* TR = dyn_cast<TypedValueRegion>(R)) {
2116     QualType Ty = TR->getValueType();
2117     if (Ty->isArrayType())
2118       return bindArray(B, TR, V);
2119     if (Ty->isStructureOrClassType())
2120       return bindStruct(B, TR, V);
2121     if (Ty->isVectorType())
2122       return bindVector(B, TR, V);
2123     if (Ty->isUnionType())
2124       return bindAggregate(B, TR, V);
2125   }
2126 
2127   if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) {
2128     // Binding directly to a symbolic region should be treated as binding
2129     // to element 0.
2130     QualType T = SR->getSymbol()->getType();
2131     if (T->isAnyPointerType() || T->isReferenceType())
2132       T = T->getPointeeType();
2133 
2134     R = GetElementZeroRegion(SR, T);
2135   }
2136 
2137   assert((!isa<CXXThisRegion>(R) || !B.lookup(R)) &&
2138          "'this' pointer is not an l-value and is not assignable");
2139 
2140   // Clear out bindings that may overlap with this binding.
2141   RegionBindingsRef NewB = removeSubRegionBindings(B, cast<SubRegion>(R));
2142   return NewB.addBinding(BindingKey::Make(R, BindingKey::Direct), V);
2143 }
2144 
2145 RegionBindingsRef
2146 RegionStoreManager::setImplicitDefaultValue(RegionBindingsConstRef B,
2147                                             const MemRegion *R,
2148                                             QualType T) {
2149   SVal V;
2150 
2151   if (Loc::isLocType(T))
2152     V = svalBuilder.makeNull();
2153   else if (T->isIntegralOrEnumerationType())
2154     V = svalBuilder.makeZeroVal(T);
2155   else if (T->isStructureOrClassType() || T->isArrayType()) {
2156     // Set the default value to a zero constant when it is a structure
2157     // or array.  The type doesn't really matter.
2158     V = svalBuilder.makeZeroVal(Ctx.IntTy);
2159   }
2160   else {
2161     // We can't represent values of this type, but we still need to set a value
2162     // to record that the region has been initialized.
2163     // If this assertion ever fires, a new case should be added above -- we
2164     // should know how to default-initialize any value we can symbolicate.
2165     assert(!SymbolManager::canSymbolicate(T) && "This type is representable");
2166     V = UnknownVal();
2167   }
2168 
2169   return B.addBinding(R, BindingKey::Default, V);
2170 }
2171 
2172 RegionBindingsRef
2173 RegionStoreManager::bindArray(RegionBindingsConstRef B,
2174                               const TypedValueRegion* R,
2175                               SVal Init) {
2176 
2177   const ArrayType *AT =cast<ArrayType>(Ctx.getCanonicalType(R->getValueType()));
2178   QualType ElementTy = AT->getElementType();
2179   Optional<uint64_t> Size;
2180 
2181   if (const ConstantArrayType* CAT = dyn_cast<ConstantArrayType>(AT))
2182     Size = CAT->getSize().getZExtValue();
2183 
2184   // Check if the init expr is a literal. If so, bind the rvalue instead.
2185   // FIXME: It's not responsibility of the Store to transform this lvalue
2186   // to rvalue. ExprEngine or maybe even CFG should do this before binding.
2187   if (Optional<loc::MemRegionVal> MRV = Init.getAs<loc::MemRegionVal>()) {
2188     SVal V = getBinding(B.asStore(), *MRV, R->getValueType());
2189     return bindAggregate(B, R, V);
2190   }
2191 
2192   // Handle lazy compound values.
2193   if (Init.getAs<nonloc::LazyCompoundVal>())
2194     return bindAggregate(B, R, Init);
2195 
2196   if (Init.isUnknown())
2197     return bindAggregate(B, R, UnknownVal());
2198 
2199   // Remaining case: explicit compound values.
2200   const nonloc::CompoundVal& CV = Init.castAs<nonloc::CompoundVal>();
2201   nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end();
2202   uint64_t i = 0;
2203 
2204   RegionBindingsRef NewB(B);
2205 
2206   for (; Size.hasValue() ? i < Size.getValue() : true ; ++i, ++VI) {
2207     // The init list might be shorter than the array length.
2208     if (VI == VE)
2209       break;
2210 
2211     const NonLoc &Idx = svalBuilder.makeArrayIndex(i);
2212     const ElementRegion *ER = MRMgr.getElementRegion(ElementTy, Idx, R, Ctx);
2213 
2214     if (ElementTy->isStructureOrClassType())
2215       NewB = bindStruct(NewB, ER, *VI);
2216     else if (ElementTy->isArrayType())
2217       NewB = bindArray(NewB, ER, *VI);
2218     else
2219       NewB = bind(NewB, loc::MemRegionVal(ER), *VI);
2220   }
2221 
2222   // If the init list is shorter than the array length (or the array has
2223   // variable length), set the array default value. Values that are already set
2224   // are not overwritten.
2225   if (!Size.hasValue() || i < Size.getValue())
2226     NewB = setImplicitDefaultValue(NewB, R, ElementTy);
2227 
2228   return NewB;
2229 }
2230 
2231 RegionBindingsRef RegionStoreManager::bindVector(RegionBindingsConstRef B,
2232                                                  const TypedValueRegion* R,
2233                                                  SVal V) {
2234   QualType T = R->getValueType();
2235   assert(T->isVectorType());
2236   const VectorType *VT = T->getAs<VectorType>(); // Use getAs for typedefs.
2237 
2238   // Handle lazy compound values and symbolic values.
2239   if (V.getAs<nonloc::LazyCompoundVal>() || V.getAs<nonloc::SymbolVal>())
2240     return bindAggregate(B, R, V);
2241 
2242   // We may get non-CompoundVal accidentally due to imprecise cast logic or
2243   // that we are binding symbolic struct value. Kill the field values, and if
2244   // the value is symbolic go and bind it as a "default" binding.
2245   if (!V.getAs<nonloc::CompoundVal>()) {
2246     return bindAggregate(B, R, UnknownVal());
2247   }
2248 
2249   QualType ElemType = VT->getElementType();
2250   nonloc::CompoundVal CV = V.castAs<nonloc::CompoundVal>();
2251   nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end();
2252   unsigned index = 0, numElements = VT->getNumElements();
2253   RegionBindingsRef NewB(B);
2254 
2255   for ( ; index != numElements ; ++index) {
2256     if (VI == VE)
2257       break;
2258 
2259     NonLoc Idx = svalBuilder.makeArrayIndex(index);
2260     const ElementRegion *ER = MRMgr.getElementRegion(ElemType, Idx, R, Ctx);
2261 
2262     if (ElemType->isArrayType())
2263       NewB = bindArray(NewB, ER, *VI);
2264     else if (ElemType->isStructureOrClassType())
2265       NewB = bindStruct(NewB, ER, *VI);
2266     else
2267       NewB = bind(NewB, loc::MemRegionVal(ER), *VI);
2268   }
2269   return NewB;
2270 }
2271 
2272 Optional<RegionBindingsRef>
2273 RegionStoreManager::tryBindSmallStruct(RegionBindingsConstRef B,
2274                                        const TypedValueRegion *R,
2275                                        const RecordDecl *RD,
2276                                        nonloc::LazyCompoundVal LCV) {
2277   FieldVector Fields;
2278 
2279   if (const CXXRecordDecl *Class = dyn_cast<CXXRecordDecl>(RD))
2280     if (Class->getNumBases() != 0 || Class->getNumVBases() != 0)
2281       return None;
2282 
2283   for (const auto *FD : RD->fields()) {
2284     if (FD->isUnnamedBitfield())
2285       continue;
2286 
2287     // If there are too many fields, or if any of the fields are aggregates,
2288     // just use the LCV as a default binding.
2289     if (Fields.size() == SmallStructLimit)
2290       return None;
2291 
2292     QualType Ty = FD->getType();
2293     if (!(Ty->isScalarType() || Ty->isReferenceType()))
2294       return None;
2295 
2296     Fields.push_back(FD);
2297   }
2298 
2299   RegionBindingsRef NewB = B;
2300 
2301   for (FieldVector::iterator I = Fields.begin(), E = Fields.end(); I != E; ++I){
2302     const FieldRegion *SourceFR = MRMgr.getFieldRegion(*I, LCV.getRegion());
2303     SVal V = getBindingForField(getRegionBindings(LCV.getStore()), SourceFR);
2304 
2305     const FieldRegion *DestFR = MRMgr.getFieldRegion(*I, R);
2306     NewB = bind(NewB, loc::MemRegionVal(DestFR), V);
2307   }
2308 
2309   return NewB;
2310 }
2311 
2312 RegionBindingsRef RegionStoreManager::bindStruct(RegionBindingsConstRef B,
2313                                                  const TypedValueRegion* R,
2314                                                  SVal V) {
2315   if (!Features.supportsFields())
2316     return B;
2317 
2318   QualType T = R->getValueType();
2319   assert(T->isStructureOrClassType());
2320 
2321   const RecordType* RT = T->getAs<RecordType>();
2322   const RecordDecl *RD = RT->getDecl();
2323 
2324   if (!RD->isCompleteDefinition())
2325     return B;
2326 
2327   // Handle lazy compound values and symbolic values.
2328   if (Optional<nonloc::LazyCompoundVal> LCV =
2329         V.getAs<nonloc::LazyCompoundVal>()) {
2330     if (Optional<RegionBindingsRef> NewB = tryBindSmallStruct(B, R, RD, *LCV))
2331       return *NewB;
2332     return bindAggregate(B, R, V);
2333   }
2334   if (V.getAs<nonloc::SymbolVal>())
2335     return bindAggregate(B, R, V);
2336 
2337   // We may get non-CompoundVal accidentally due to imprecise cast logic or
2338   // that we are binding symbolic struct value. Kill the field values, and if
2339   // the value is symbolic go and bind it as a "default" binding.
2340   if (V.isUnknown() || !V.getAs<nonloc::CompoundVal>())
2341     return bindAggregate(B, R, UnknownVal());
2342 
2343   const nonloc::CompoundVal& CV = V.castAs<nonloc::CompoundVal>();
2344   nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end();
2345 
2346   RecordDecl::field_iterator FI, FE;
2347   RegionBindingsRef NewB(B);
2348 
2349   for (FI = RD->field_begin(), FE = RD->field_end(); FI != FE; ++FI) {
2350 
2351     if (VI == VE)
2352       break;
2353 
2354     // Skip any unnamed bitfields to stay in sync with the initializers.
2355     if (FI->isUnnamedBitfield())
2356       continue;
2357 
2358     QualType FTy = FI->getType();
2359     const FieldRegion* FR = MRMgr.getFieldRegion(*FI, R);
2360 
2361     if (FTy->isArrayType())
2362       NewB = bindArray(NewB, FR, *VI);
2363     else if (FTy->isStructureOrClassType())
2364       NewB = bindStruct(NewB, FR, *VI);
2365     else
2366       NewB = bind(NewB, loc::MemRegionVal(FR), *VI);
2367     ++VI;
2368   }
2369 
2370   // There may be fewer values in the initialize list than the fields of struct.
2371   if (FI != FE) {
2372     NewB = NewB.addBinding(R, BindingKey::Default,
2373                            svalBuilder.makeIntVal(0, false));
2374   }
2375 
2376   return NewB;
2377 }
2378 
2379 RegionBindingsRef
2380 RegionStoreManager::bindAggregate(RegionBindingsConstRef B,
2381                                   const TypedRegion *R,
2382                                   SVal Val) {
2383   // Remove the old bindings, using 'R' as the root of all regions
2384   // we will invalidate. Then add the new binding.
2385   return removeSubRegionBindings(B, R).addBinding(R, BindingKey::Default, Val);
2386 }
2387 
2388 //===----------------------------------------------------------------------===//
2389 // State pruning.
2390 //===----------------------------------------------------------------------===//
2391 
2392 namespace {
2393 class removeDeadBindingsWorker :
2394   public ClusterAnalysis<removeDeadBindingsWorker> {
2395   SmallVector<const SymbolicRegion*, 12> Postponed;
2396   SymbolReaper &SymReaper;
2397   const StackFrameContext *CurrentLCtx;
2398 
2399 public:
2400   removeDeadBindingsWorker(RegionStoreManager &rm,
2401                            ProgramStateManager &stateMgr,
2402                            RegionBindingsRef b, SymbolReaper &symReaper,
2403                            const StackFrameContext *LCtx)
2404     : ClusterAnalysis<removeDeadBindingsWorker>(rm, stateMgr, b),
2405       SymReaper(symReaper), CurrentLCtx(LCtx) {}
2406 
2407   // Called by ClusterAnalysis.
2408   void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C);
2409   void VisitCluster(const MemRegion *baseR, const ClusterBindings *C);
2410   using ClusterAnalysis<removeDeadBindingsWorker>::VisitCluster;
2411 
2412   using ClusterAnalysis::AddToWorkList;
2413 
2414   bool AddToWorkList(const MemRegion *R);
2415 
2416   bool UpdatePostponed();
2417   void VisitBinding(SVal V);
2418 };
2419 }
2420 
2421 bool removeDeadBindingsWorker::AddToWorkList(const MemRegion *R) {
2422   const MemRegion *BaseR = R->getBaseRegion();
2423   return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR));
2424 }
2425 
2426 void removeDeadBindingsWorker::VisitAddedToCluster(const MemRegion *baseR,
2427                                                    const ClusterBindings &C) {
2428 
2429   if (const VarRegion *VR = dyn_cast<VarRegion>(baseR)) {
2430     if (SymReaper.isLive(VR))
2431       AddToWorkList(baseR, &C);
2432 
2433     return;
2434   }
2435 
2436   if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) {
2437     if (SymReaper.isLive(SR->getSymbol()))
2438       AddToWorkList(SR, &C);
2439     else
2440       Postponed.push_back(SR);
2441 
2442     return;
2443   }
2444 
2445   if (isa<NonStaticGlobalSpaceRegion>(baseR)) {
2446     AddToWorkList(baseR, &C);
2447     return;
2448   }
2449 
2450   // CXXThisRegion in the current or parent location context is live.
2451   if (const CXXThisRegion *TR = dyn_cast<CXXThisRegion>(baseR)) {
2452     const StackArgumentsSpaceRegion *StackReg =
2453       cast<StackArgumentsSpaceRegion>(TR->getSuperRegion());
2454     const StackFrameContext *RegCtx = StackReg->getStackFrame();
2455     if (CurrentLCtx &&
2456         (RegCtx == CurrentLCtx || RegCtx->isParentOf(CurrentLCtx)))
2457       AddToWorkList(TR, &C);
2458   }
2459 }
2460 
2461 void removeDeadBindingsWorker::VisitCluster(const MemRegion *baseR,
2462                                             const ClusterBindings *C) {
2463   if (!C)
2464     return;
2465 
2466   // Mark the symbol for any SymbolicRegion with live bindings as live itself.
2467   // This means we should continue to track that symbol.
2468   if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(baseR))
2469     SymReaper.markLive(SymR->getSymbol());
2470 
2471   for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) {
2472     // Element index of a binding key is live.
2473     SymReaper.markElementIndicesLive(I.getKey().getRegion());
2474 
2475     VisitBinding(I.getData());
2476   }
2477 }
2478 
2479 void removeDeadBindingsWorker::VisitBinding(SVal V) {
2480   // Is it a LazyCompoundVal?  All referenced regions are live as well.
2481   if (Optional<nonloc::LazyCompoundVal> LCS =
2482           V.getAs<nonloc::LazyCompoundVal>()) {
2483 
2484     const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS);
2485 
2486     for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(),
2487                                                         E = Vals.end();
2488          I != E; ++I)
2489       VisitBinding(*I);
2490 
2491     return;
2492   }
2493 
2494   // If V is a region, then add it to the worklist.
2495   if (const MemRegion *R = V.getAsRegion()) {
2496     AddToWorkList(R);
2497     SymReaper.markLive(R);
2498 
2499     // All regions captured by a block are also live.
2500     if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(R)) {
2501       BlockDataRegion::referenced_vars_iterator I = BR->referenced_vars_begin(),
2502                                                 E = BR->referenced_vars_end();
2503       for ( ; I != E; ++I)
2504         AddToWorkList(I.getCapturedRegion());
2505     }
2506   }
2507 
2508 
2509   // Update the set of live symbols.
2510   for (SymExpr::symbol_iterator SI = V.symbol_begin(), SE = V.symbol_end();
2511        SI!=SE; ++SI)
2512     SymReaper.markLive(*SI);
2513 }
2514 
2515 bool removeDeadBindingsWorker::UpdatePostponed() {
2516   // See if any postponed SymbolicRegions are actually live now, after
2517   // having done a scan.
2518   bool changed = false;
2519 
2520   for (SmallVectorImpl<const SymbolicRegion*>::iterator
2521         I = Postponed.begin(), E = Postponed.end() ; I != E ; ++I) {
2522     if (const SymbolicRegion *SR = *I) {
2523       if (SymReaper.isLive(SR->getSymbol())) {
2524         changed |= AddToWorkList(SR);
2525         *I = nullptr;
2526       }
2527     }
2528   }
2529 
2530   return changed;
2531 }
2532 
2533 StoreRef RegionStoreManager::removeDeadBindings(Store store,
2534                                                 const StackFrameContext *LCtx,
2535                                                 SymbolReaper& SymReaper) {
2536   RegionBindingsRef B = getRegionBindings(store);
2537   removeDeadBindingsWorker W(*this, StateMgr, B, SymReaper, LCtx);
2538   W.GenerateClusters();
2539 
2540   // Enqueue the region roots onto the worklist.
2541   for (SymbolReaper::region_iterator I = SymReaper.region_begin(),
2542        E = SymReaper.region_end(); I != E; ++I) {
2543     W.AddToWorkList(*I);
2544   }
2545 
2546   do W.RunWorkList(); while (W.UpdatePostponed());
2547 
2548   // We have now scanned the store, marking reachable regions and symbols
2549   // as live.  We now remove all the regions that are dead from the store
2550   // as well as update DSymbols with the set symbols that are now dead.
2551   for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) {
2552     const MemRegion *Base = I.getKey();
2553 
2554     // If the cluster has been visited, we know the region has been marked.
2555     if (W.isVisited(Base))
2556       continue;
2557 
2558     // Remove the dead entry.
2559     B = B.remove(Base);
2560 
2561     if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(Base))
2562       SymReaper.maybeDead(SymR->getSymbol());
2563 
2564     // Mark all non-live symbols that this binding references as dead.
2565     const ClusterBindings &Cluster = I.getData();
2566     for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end();
2567          CI != CE; ++CI) {
2568       SVal X = CI.getData();
2569       SymExpr::symbol_iterator SI = X.symbol_begin(), SE = X.symbol_end();
2570       for (; SI != SE; ++SI)
2571         SymReaper.maybeDead(*SI);
2572     }
2573   }
2574 
2575   return StoreRef(B.asStore(), *this);
2576 }
2577 
2578 //===----------------------------------------------------------------------===//
2579 // Utility methods.
2580 //===----------------------------------------------------------------------===//
2581 
2582 void RegionStoreManager::print(Store store, raw_ostream &OS,
2583                                const char* nl, const char *sep) {
2584   RegionBindingsRef B = getRegionBindings(store);
2585   OS << "Store (direct and default bindings), "
2586      << B.asStore()
2587      << " :" << nl;
2588   B.dump(OS, nl);
2589 }
2590