1 //== RegionStore.cpp - Field-sensitive store model --------------*- C++ -*--==// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a basic region store model. In this model, we do have field 11 // sensitivity. But we assume nothing about the heap shape. So recursive data 12 // structures are largely ignored. Basically we do 1-limiting analysis. 13 // Parameter pointers are assumed with no aliasing. Pointee objects of 14 // parameters are created lazily. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "clang/AST/Attr.h" 19 #include "clang/AST/CharUnits.h" 20 #include "clang/Analysis/Analyses/LiveVariables.h" 21 #include "clang/Analysis/AnalysisContext.h" 22 #include "clang/Basic/TargetInfo.h" 23 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 24 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 25 #include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h" 26 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramStateTrait.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h" 29 #include "llvm/ADT/ImmutableMap.h" 30 #include "llvm/ADT/Optional.h" 31 #include "llvm/Support/raw_ostream.h" 32 #include <utility> 33 34 using namespace clang; 35 using namespace ento; 36 37 //===----------------------------------------------------------------------===// 38 // Representation of binding keys. 39 //===----------------------------------------------------------------------===// 40 41 namespace { 42 class BindingKey { 43 public: 44 enum Kind { Default = 0x0, Direct = 0x1 }; 45 private: 46 enum { Symbolic = 0x2 }; 47 48 llvm::PointerIntPair<const MemRegion *, 2> P; 49 uint64_t Data; 50 51 /// Create a key for a binding to region \p r, which has a symbolic offset 52 /// from region \p Base. 53 explicit BindingKey(const SubRegion *r, const SubRegion *Base, Kind k) 54 : P(r, k | Symbolic), Data(reinterpret_cast<uintptr_t>(Base)) { 55 assert(r && Base && "Must have known regions."); 56 assert(getConcreteOffsetRegion() == Base && "Failed to store base region"); 57 } 58 59 /// Create a key for a binding at \p offset from base region \p r. 60 explicit BindingKey(const MemRegion *r, uint64_t offset, Kind k) 61 : P(r, k), Data(offset) { 62 assert(r && "Must have known regions."); 63 assert(getOffset() == offset && "Failed to store offset"); 64 assert((r == r->getBaseRegion() || isa<ObjCIvarRegion>(r)) && "Not a base"); 65 } 66 public: 67 68 bool isDirect() const { return P.getInt() & Direct; } 69 bool hasSymbolicOffset() const { return P.getInt() & Symbolic; } 70 71 const MemRegion *getRegion() const { return P.getPointer(); } 72 uint64_t getOffset() const { 73 assert(!hasSymbolicOffset()); 74 return Data; 75 } 76 77 const SubRegion *getConcreteOffsetRegion() const { 78 assert(hasSymbolicOffset()); 79 return reinterpret_cast<const SubRegion *>(static_cast<uintptr_t>(Data)); 80 } 81 82 const MemRegion *getBaseRegion() const { 83 if (hasSymbolicOffset()) 84 return getConcreteOffsetRegion()->getBaseRegion(); 85 return getRegion()->getBaseRegion(); 86 } 87 88 void Profile(llvm::FoldingSetNodeID& ID) const { 89 ID.AddPointer(P.getOpaqueValue()); 90 ID.AddInteger(Data); 91 } 92 93 static BindingKey Make(const MemRegion *R, Kind k); 94 95 bool operator<(const BindingKey &X) const { 96 if (P.getOpaqueValue() < X.P.getOpaqueValue()) 97 return true; 98 if (P.getOpaqueValue() > X.P.getOpaqueValue()) 99 return false; 100 return Data < X.Data; 101 } 102 103 bool operator==(const BindingKey &X) const { 104 return P.getOpaqueValue() == X.P.getOpaqueValue() && 105 Data == X.Data; 106 } 107 108 void dump() const; 109 }; 110 } // end anonymous namespace 111 112 BindingKey BindingKey::Make(const MemRegion *R, Kind k) { 113 const RegionOffset &RO = R->getAsOffset(); 114 if (RO.hasSymbolicOffset()) 115 return BindingKey(cast<SubRegion>(R), cast<SubRegion>(RO.getRegion()), k); 116 117 return BindingKey(RO.getRegion(), RO.getOffset(), k); 118 } 119 120 namespace llvm { 121 static inline 122 raw_ostream &operator<<(raw_ostream &os, BindingKey K) { 123 os << '(' << K.getRegion(); 124 if (!K.hasSymbolicOffset()) 125 os << ',' << K.getOffset(); 126 os << ',' << (K.isDirect() ? "direct" : "default") 127 << ')'; 128 return os; 129 } 130 131 template <typename T> struct isPodLike; 132 template <> struct isPodLike<BindingKey> { 133 static const bool value = true; 134 }; 135 } // end llvm namespace 136 137 LLVM_DUMP_METHOD void BindingKey::dump() const { llvm::errs() << *this; } 138 139 //===----------------------------------------------------------------------===// 140 // Actual Store type. 141 //===----------------------------------------------------------------------===// 142 143 typedef llvm::ImmutableMap<BindingKey, SVal> ClusterBindings; 144 typedef llvm::ImmutableMapRef<BindingKey, SVal> ClusterBindingsRef; 145 typedef std::pair<BindingKey, SVal> BindingPair; 146 147 typedef llvm::ImmutableMap<const MemRegion *, ClusterBindings> 148 RegionBindings; 149 150 namespace { 151 class RegionBindingsRef : public llvm::ImmutableMapRef<const MemRegion *, 152 ClusterBindings> { 153 ClusterBindings::Factory *CBFactory; 154 155 public: 156 typedef llvm::ImmutableMapRef<const MemRegion *, ClusterBindings> 157 ParentTy; 158 159 RegionBindingsRef(ClusterBindings::Factory &CBFactory, 160 const RegionBindings::TreeTy *T, 161 RegionBindings::TreeTy::Factory *F) 162 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(T, F), 163 CBFactory(&CBFactory) {} 164 165 RegionBindingsRef(const ParentTy &P, ClusterBindings::Factory &CBFactory) 166 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(P), 167 CBFactory(&CBFactory) {} 168 169 RegionBindingsRef add(key_type_ref K, data_type_ref D) const { 170 return RegionBindingsRef(static_cast<const ParentTy *>(this)->add(K, D), 171 *CBFactory); 172 } 173 174 RegionBindingsRef remove(key_type_ref K) const { 175 return RegionBindingsRef(static_cast<const ParentTy *>(this)->remove(K), 176 *CBFactory); 177 } 178 179 RegionBindingsRef addBinding(BindingKey K, SVal V) const; 180 181 RegionBindingsRef addBinding(const MemRegion *R, 182 BindingKey::Kind k, SVal V) const; 183 184 const SVal *lookup(BindingKey K) const; 185 const SVal *lookup(const MemRegion *R, BindingKey::Kind k) const; 186 using llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>::lookup; 187 188 RegionBindingsRef removeBinding(BindingKey K); 189 190 RegionBindingsRef removeBinding(const MemRegion *R, 191 BindingKey::Kind k); 192 193 RegionBindingsRef removeBinding(const MemRegion *R) { 194 return removeBinding(R, BindingKey::Direct). 195 removeBinding(R, BindingKey::Default); 196 } 197 198 Optional<SVal> getDirectBinding(const MemRegion *R) const; 199 200 /// getDefaultBinding - Returns an SVal* representing an optional default 201 /// binding associated with a region and its subregions. 202 Optional<SVal> getDefaultBinding(const MemRegion *R) const; 203 204 /// Return the internal tree as a Store. 205 Store asStore() const { 206 return asImmutableMap().getRootWithoutRetain(); 207 } 208 209 void dump(raw_ostream &OS, const char *nl) const { 210 for (iterator I = begin(), E = end(); I != E; ++I) { 211 const ClusterBindings &Cluster = I.getData(); 212 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 213 CI != CE; ++CI) { 214 OS << ' ' << CI.getKey() << " : " << CI.getData() << nl; 215 } 216 OS << nl; 217 } 218 } 219 220 LLVM_DUMP_METHOD void dump() const { dump(llvm::errs(), "\n"); } 221 }; 222 } // end anonymous namespace 223 224 typedef const RegionBindingsRef& RegionBindingsConstRef; 225 226 Optional<SVal> RegionBindingsRef::getDirectBinding(const MemRegion *R) const { 227 return Optional<SVal>::create(lookup(R, BindingKey::Direct)); 228 } 229 230 Optional<SVal> RegionBindingsRef::getDefaultBinding(const MemRegion *R) const { 231 if (R->isBoundable()) 232 if (const TypedValueRegion *TR = dyn_cast<TypedValueRegion>(R)) 233 if (TR->getValueType()->isUnionType()) 234 return UnknownVal(); 235 236 return Optional<SVal>::create(lookup(R, BindingKey::Default)); 237 } 238 239 RegionBindingsRef RegionBindingsRef::addBinding(BindingKey K, SVal V) const { 240 const MemRegion *Base = K.getBaseRegion(); 241 242 const ClusterBindings *ExistingCluster = lookup(Base); 243 ClusterBindings Cluster = 244 (ExistingCluster ? *ExistingCluster : CBFactory->getEmptyMap()); 245 246 ClusterBindings NewCluster = CBFactory->add(Cluster, K, V); 247 return add(Base, NewCluster); 248 } 249 250 251 RegionBindingsRef RegionBindingsRef::addBinding(const MemRegion *R, 252 BindingKey::Kind k, 253 SVal V) const { 254 return addBinding(BindingKey::Make(R, k), V); 255 } 256 257 const SVal *RegionBindingsRef::lookup(BindingKey K) const { 258 const ClusterBindings *Cluster = lookup(K.getBaseRegion()); 259 if (!Cluster) 260 return nullptr; 261 return Cluster->lookup(K); 262 } 263 264 const SVal *RegionBindingsRef::lookup(const MemRegion *R, 265 BindingKey::Kind k) const { 266 return lookup(BindingKey::Make(R, k)); 267 } 268 269 RegionBindingsRef RegionBindingsRef::removeBinding(BindingKey K) { 270 const MemRegion *Base = K.getBaseRegion(); 271 const ClusterBindings *Cluster = lookup(Base); 272 if (!Cluster) 273 return *this; 274 275 ClusterBindings NewCluster = CBFactory->remove(*Cluster, K); 276 if (NewCluster.isEmpty()) 277 return remove(Base); 278 return add(Base, NewCluster); 279 } 280 281 RegionBindingsRef RegionBindingsRef::removeBinding(const MemRegion *R, 282 BindingKey::Kind k){ 283 return removeBinding(BindingKey::Make(R, k)); 284 } 285 286 //===----------------------------------------------------------------------===// 287 // Fine-grained control of RegionStoreManager. 288 //===----------------------------------------------------------------------===// 289 290 namespace { 291 struct minimal_features_tag {}; 292 struct maximal_features_tag {}; 293 294 class RegionStoreFeatures { 295 bool SupportsFields; 296 public: 297 RegionStoreFeatures(minimal_features_tag) : 298 SupportsFields(false) {} 299 300 RegionStoreFeatures(maximal_features_tag) : 301 SupportsFields(true) {} 302 303 void enableFields(bool t) { SupportsFields = t; } 304 305 bool supportsFields() const { return SupportsFields; } 306 }; 307 } 308 309 //===----------------------------------------------------------------------===// 310 // Main RegionStore logic. 311 //===----------------------------------------------------------------------===// 312 313 namespace { 314 class invalidateRegionsWorker; 315 316 class RegionStoreManager : public StoreManager { 317 public: 318 const RegionStoreFeatures Features; 319 320 RegionBindings::Factory RBFactory; 321 mutable ClusterBindings::Factory CBFactory; 322 323 typedef std::vector<SVal> SValListTy; 324 private: 325 typedef llvm::DenseMap<const LazyCompoundValData *, 326 SValListTy> LazyBindingsMapTy; 327 LazyBindingsMapTy LazyBindingsMap; 328 329 /// The largest number of fields a struct can have and still be 330 /// considered "small". 331 /// 332 /// This is currently used to decide whether or not it is worth "forcing" a 333 /// LazyCompoundVal on bind. 334 /// 335 /// This is controlled by 'region-store-small-struct-limit' option. 336 /// To disable all small-struct-dependent behavior, set the option to "0". 337 unsigned SmallStructLimit; 338 339 /// \brief A helper used to populate the work list with the given set of 340 /// regions. 341 void populateWorkList(invalidateRegionsWorker &W, 342 ArrayRef<SVal> Values, 343 InvalidatedRegions *TopLevelRegions); 344 345 public: 346 RegionStoreManager(ProgramStateManager& mgr, const RegionStoreFeatures &f) 347 : StoreManager(mgr), Features(f), 348 RBFactory(mgr.getAllocator()), CBFactory(mgr.getAllocator()), 349 SmallStructLimit(0) { 350 if (SubEngine *Eng = StateMgr.getOwningEngine()) { 351 AnalyzerOptions &Options = Eng->getAnalysisManager().options; 352 SmallStructLimit = 353 Options.getOptionAsInteger("region-store-small-struct-limit", 2); 354 } 355 } 356 357 358 /// setImplicitDefaultValue - Set the default binding for the provided 359 /// MemRegion to the value implicitly defined for compound literals when 360 /// the value is not specified. 361 RegionBindingsRef setImplicitDefaultValue(RegionBindingsConstRef B, 362 const MemRegion *R, QualType T); 363 364 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 365 /// type. 'Array' represents the lvalue of the array being decayed 366 /// to a pointer, and the returned SVal represents the decayed 367 /// version of that lvalue (i.e., a pointer to the first element of 368 /// the array). This is called by ExprEngine when evaluating 369 /// casts from arrays to pointers. 370 SVal ArrayToPointer(Loc Array, QualType ElementTy) override; 371 372 StoreRef getInitialStore(const LocationContext *InitLoc) override { 373 return StoreRef(RBFactory.getEmptyMap().getRootWithoutRetain(), *this); 374 } 375 376 //===-------------------------------------------------------------------===// 377 // Binding values to regions. 378 //===-------------------------------------------------------------------===// 379 RegionBindingsRef invalidateGlobalRegion(MemRegion::Kind K, 380 const Expr *Ex, 381 unsigned Count, 382 const LocationContext *LCtx, 383 RegionBindingsRef B, 384 InvalidatedRegions *Invalidated); 385 386 StoreRef invalidateRegions(Store store, 387 ArrayRef<SVal> Values, 388 const Expr *E, unsigned Count, 389 const LocationContext *LCtx, 390 const CallEvent *Call, 391 InvalidatedSymbols &IS, 392 RegionAndSymbolInvalidationTraits &ITraits, 393 InvalidatedRegions *Invalidated, 394 InvalidatedRegions *InvalidatedTopLevel) override; 395 396 bool scanReachableSymbols(Store S, const MemRegion *R, 397 ScanReachableSymbols &Callbacks) override; 398 399 RegionBindingsRef removeSubRegionBindings(RegionBindingsConstRef B, 400 const SubRegion *R); 401 402 public: // Part of public interface to class. 403 404 StoreRef Bind(Store store, Loc LV, SVal V) override { 405 return StoreRef(bind(getRegionBindings(store), LV, V).asStore(), *this); 406 } 407 408 RegionBindingsRef bind(RegionBindingsConstRef B, Loc LV, SVal V); 409 410 // BindDefault is only used to initialize a region with a default value. 411 StoreRef BindDefault(Store store, const MemRegion *R, SVal V) override { 412 RegionBindingsRef B = getRegionBindings(store); 413 assert(!B.lookup(R, BindingKey::Direct)); 414 415 BindingKey Key = BindingKey::Make(R, BindingKey::Default); 416 if (B.lookup(Key)) { 417 const SubRegion *SR = cast<SubRegion>(R); 418 assert(SR->getAsOffset().getOffset() == 419 SR->getSuperRegion()->getAsOffset().getOffset() && 420 "A default value must come from a super-region"); 421 B = removeSubRegionBindings(B, SR); 422 } else { 423 B = B.addBinding(Key, V); 424 } 425 426 return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this); 427 } 428 429 /// Attempt to extract the fields of \p LCV and bind them to the struct region 430 /// \p R. 431 /// 432 /// This path is used when it seems advantageous to "force" loading the values 433 /// within a LazyCompoundVal to bind memberwise to the struct region, rather 434 /// than using a Default binding at the base of the entire region. This is a 435 /// heuristic attempting to avoid building long chains of LazyCompoundVals. 436 /// 437 /// \returns The updated store bindings, or \c None if binding non-lazily 438 /// would be too expensive. 439 Optional<RegionBindingsRef> tryBindSmallStruct(RegionBindingsConstRef B, 440 const TypedValueRegion *R, 441 const RecordDecl *RD, 442 nonloc::LazyCompoundVal LCV); 443 444 /// BindStruct - Bind a compound value to a structure. 445 RegionBindingsRef bindStruct(RegionBindingsConstRef B, 446 const TypedValueRegion* R, SVal V); 447 448 /// BindVector - Bind a compound value to a vector. 449 RegionBindingsRef bindVector(RegionBindingsConstRef B, 450 const TypedValueRegion* R, SVal V); 451 452 RegionBindingsRef bindArray(RegionBindingsConstRef B, 453 const TypedValueRegion* R, 454 SVal V); 455 456 /// Clears out all bindings in the given region and assigns a new value 457 /// as a Default binding. 458 RegionBindingsRef bindAggregate(RegionBindingsConstRef B, 459 const TypedRegion *R, 460 SVal DefaultVal); 461 462 /// \brief Create a new store with the specified binding removed. 463 /// \param ST the original store, that is the basis for the new store. 464 /// \param L the location whose binding should be removed. 465 StoreRef killBinding(Store ST, Loc L) override; 466 467 void incrementReferenceCount(Store store) override { 468 getRegionBindings(store).manualRetain(); 469 } 470 471 /// If the StoreManager supports it, decrement the reference count of 472 /// the specified Store object. If the reference count hits 0, the memory 473 /// associated with the object is recycled. 474 void decrementReferenceCount(Store store) override { 475 getRegionBindings(store).manualRelease(); 476 } 477 478 bool includedInBindings(Store store, const MemRegion *region) const override; 479 480 /// \brief Return the value bound to specified location in a given state. 481 /// 482 /// The high level logic for this method is this: 483 /// getBinding (L) 484 /// if L has binding 485 /// return L's binding 486 /// else if L is in killset 487 /// return unknown 488 /// else 489 /// if L is on stack or heap 490 /// return undefined 491 /// else 492 /// return symbolic 493 SVal getBinding(Store S, Loc L, QualType T) override { 494 return getBinding(getRegionBindings(S), L, T); 495 } 496 497 Optional<SVal> getDefaultBinding(Store S, const MemRegion *R) override { 498 RegionBindingsRef B = getRegionBindings(S); 499 // Default bindings are always applied over a base region so look up the 500 // base region's default binding, otherwise the lookup will fail when R 501 // is at an offset from R->getBaseRegion(). 502 return B.getDefaultBinding(R->getBaseRegion()); 503 } 504 505 SVal getBinding(RegionBindingsConstRef B, Loc L, QualType T = QualType()); 506 507 SVal getBindingForElement(RegionBindingsConstRef B, const ElementRegion *R); 508 509 SVal getBindingForField(RegionBindingsConstRef B, const FieldRegion *R); 510 511 SVal getBindingForObjCIvar(RegionBindingsConstRef B, const ObjCIvarRegion *R); 512 513 SVal getBindingForVar(RegionBindingsConstRef B, const VarRegion *R); 514 515 SVal getBindingForLazySymbol(const TypedValueRegion *R); 516 517 SVal getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 518 const TypedValueRegion *R, 519 QualType Ty); 520 521 SVal getLazyBinding(const SubRegion *LazyBindingRegion, 522 RegionBindingsRef LazyBinding); 523 524 /// Get bindings for the values in a struct and return a CompoundVal, used 525 /// when doing struct copy: 526 /// struct s x, y; 527 /// x = y; 528 /// y's value is retrieved by this method. 529 SVal getBindingForStruct(RegionBindingsConstRef B, const TypedValueRegion *R); 530 SVal getBindingForArray(RegionBindingsConstRef B, const TypedValueRegion *R); 531 NonLoc createLazyBinding(RegionBindingsConstRef B, const TypedValueRegion *R); 532 533 /// Used to lazily generate derived symbols for bindings that are defined 534 /// implicitly by default bindings in a super region. 535 /// 536 /// Note that callers may need to specially handle LazyCompoundVals, which 537 /// are returned as is in case the caller needs to treat them differently. 538 Optional<SVal> getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 539 const MemRegion *superR, 540 const TypedValueRegion *R, 541 QualType Ty); 542 543 /// Get the state and region whose binding this region \p R corresponds to. 544 /// 545 /// If there is no lazy binding for \p R, the returned value will have a null 546 /// \c second. Note that a null pointer can represents a valid Store. 547 std::pair<Store, const SubRegion *> 548 findLazyBinding(RegionBindingsConstRef B, const SubRegion *R, 549 const SubRegion *originalRegion); 550 551 /// Returns the cached set of interesting SVals contained within a lazy 552 /// binding. 553 /// 554 /// The precise value of "interesting" is determined for the purposes of 555 /// RegionStore's internal analysis. It must always contain all regions and 556 /// symbols, but may omit constants and other kinds of SVal. 557 const SValListTy &getInterestingValues(nonloc::LazyCompoundVal LCV); 558 559 //===------------------------------------------------------------------===// 560 // State pruning. 561 //===------------------------------------------------------------------===// 562 563 /// removeDeadBindings - Scans the RegionStore of 'state' for dead values. 564 /// It returns a new Store with these values removed. 565 StoreRef removeDeadBindings(Store store, const StackFrameContext *LCtx, 566 SymbolReaper& SymReaper) override; 567 568 //===------------------------------------------------------------------===// 569 // Region "extents". 570 //===------------------------------------------------------------------===// 571 572 // FIXME: This method will soon be eliminated; see the note in Store.h. 573 DefinedOrUnknownSVal getSizeInElements(ProgramStateRef state, 574 const MemRegion* R, 575 QualType EleTy) override; 576 577 //===------------------------------------------------------------------===// 578 // Utility methods. 579 //===------------------------------------------------------------------===// 580 581 RegionBindingsRef getRegionBindings(Store store) const { 582 return RegionBindingsRef(CBFactory, 583 static_cast<const RegionBindings::TreeTy*>(store), 584 RBFactory.getTreeFactory()); 585 } 586 587 void print(Store store, raw_ostream &Out, const char* nl, 588 const char *sep) override; 589 590 void iterBindings(Store store, BindingsHandler& f) override { 591 RegionBindingsRef B = getRegionBindings(store); 592 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 593 const ClusterBindings &Cluster = I.getData(); 594 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 595 CI != CE; ++CI) { 596 const BindingKey &K = CI.getKey(); 597 if (!K.isDirect()) 598 continue; 599 if (const SubRegion *R = dyn_cast<SubRegion>(K.getRegion())) { 600 // FIXME: Possibly incorporate the offset? 601 if (!f.HandleBinding(*this, store, R, CI.getData())) 602 return; 603 } 604 } 605 } 606 } 607 }; 608 609 } // end anonymous namespace 610 611 //===----------------------------------------------------------------------===// 612 // RegionStore creation. 613 //===----------------------------------------------------------------------===// 614 615 std::unique_ptr<StoreManager> 616 ento::CreateRegionStoreManager(ProgramStateManager &StMgr) { 617 RegionStoreFeatures F = maximal_features_tag(); 618 return llvm::make_unique<RegionStoreManager>(StMgr, F); 619 } 620 621 std::unique_ptr<StoreManager> 622 ento::CreateFieldsOnlyRegionStoreManager(ProgramStateManager &StMgr) { 623 RegionStoreFeatures F = minimal_features_tag(); 624 F.enableFields(true); 625 return llvm::make_unique<RegionStoreManager>(StMgr, F); 626 } 627 628 629 //===----------------------------------------------------------------------===// 630 // Region Cluster analysis. 631 //===----------------------------------------------------------------------===// 632 633 namespace { 634 /// Used to determine which global regions are automatically included in the 635 /// initial worklist of a ClusterAnalysis. 636 enum GlobalsFilterKind { 637 /// Don't include any global regions. 638 GFK_None, 639 /// Only include system globals. 640 GFK_SystemOnly, 641 /// Include all global regions. 642 GFK_All 643 }; 644 645 template <typename DERIVED> 646 class ClusterAnalysis { 647 protected: 648 typedef llvm::DenseMap<const MemRegion *, const ClusterBindings *> ClusterMap; 649 typedef const MemRegion * WorkListElement; 650 typedef SmallVector<WorkListElement, 10> WorkList; 651 652 llvm::SmallPtrSet<const ClusterBindings *, 16> Visited; 653 654 WorkList WL; 655 656 RegionStoreManager &RM; 657 ASTContext &Ctx; 658 SValBuilder &svalBuilder; 659 660 RegionBindingsRef B; 661 662 663 protected: 664 const ClusterBindings *getCluster(const MemRegion *R) { 665 return B.lookup(R); 666 } 667 668 /// Returns true if all clusters in the given memspace should be initially 669 /// included in the cluster analysis. Subclasses may provide their 670 /// own implementation. 671 bool includeEntireMemorySpace(const MemRegion *Base) { 672 return false; 673 } 674 675 public: 676 ClusterAnalysis(RegionStoreManager &rm, ProgramStateManager &StateMgr, 677 RegionBindingsRef b) 678 : RM(rm), Ctx(StateMgr.getContext()), 679 svalBuilder(StateMgr.getSValBuilder()), B(std::move(b)) {} 680 681 RegionBindingsRef getRegionBindings() const { return B; } 682 683 bool isVisited(const MemRegion *R) { 684 return Visited.count(getCluster(R)); 685 } 686 687 void GenerateClusters() { 688 // Scan the entire set of bindings and record the region clusters. 689 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); 690 RI != RE; ++RI){ 691 const MemRegion *Base = RI.getKey(); 692 693 const ClusterBindings &Cluster = RI.getData(); 694 assert(!Cluster.isEmpty() && "Empty clusters should be removed"); 695 static_cast<DERIVED*>(this)->VisitAddedToCluster(Base, Cluster); 696 697 // If the base's memspace should be entirely invalidated, add the cluster 698 // to the workspace up front. 699 if (static_cast<DERIVED*>(this)->includeEntireMemorySpace(Base)) 700 AddToWorkList(WorkListElement(Base), &Cluster); 701 } 702 } 703 704 bool AddToWorkList(WorkListElement E, const ClusterBindings *C) { 705 if (C && !Visited.insert(C).second) 706 return false; 707 WL.push_back(E); 708 return true; 709 } 710 711 bool AddToWorkList(const MemRegion *R) { 712 return static_cast<DERIVED*>(this)->AddToWorkList(R); 713 } 714 715 void RunWorkList() { 716 while (!WL.empty()) { 717 WorkListElement E = WL.pop_back_val(); 718 const MemRegion *BaseR = E; 719 720 static_cast<DERIVED*>(this)->VisitCluster(BaseR, getCluster(BaseR)); 721 } 722 } 723 724 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C) {} 725 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C) {} 726 727 void VisitCluster(const MemRegion *BaseR, const ClusterBindings *C, 728 bool Flag) { 729 static_cast<DERIVED*>(this)->VisitCluster(BaseR, C); 730 } 731 }; 732 } 733 734 //===----------------------------------------------------------------------===// 735 // Binding invalidation. 736 //===----------------------------------------------------------------------===// 737 738 bool RegionStoreManager::scanReachableSymbols(Store S, const MemRegion *R, 739 ScanReachableSymbols &Callbacks) { 740 assert(R == R->getBaseRegion() && "Should only be called for base regions"); 741 RegionBindingsRef B = getRegionBindings(S); 742 const ClusterBindings *Cluster = B.lookup(R); 743 744 if (!Cluster) 745 return true; 746 747 for (ClusterBindings::iterator RI = Cluster->begin(), RE = Cluster->end(); 748 RI != RE; ++RI) { 749 if (!Callbacks.scan(RI.getData())) 750 return false; 751 } 752 753 return true; 754 } 755 756 static inline bool isUnionField(const FieldRegion *FR) { 757 return FR->getDecl()->getParent()->isUnion(); 758 } 759 760 typedef SmallVector<const FieldDecl *, 8> FieldVector; 761 762 static void getSymbolicOffsetFields(BindingKey K, FieldVector &Fields) { 763 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 764 765 const MemRegion *Base = K.getConcreteOffsetRegion(); 766 const MemRegion *R = K.getRegion(); 767 768 while (R != Base) { 769 if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) 770 if (!isUnionField(FR)) 771 Fields.push_back(FR->getDecl()); 772 773 R = cast<SubRegion>(R)->getSuperRegion(); 774 } 775 } 776 777 static bool isCompatibleWithFields(BindingKey K, const FieldVector &Fields) { 778 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 779 780 if (Fields.empty()) 781 return true; 782 783 FieldVector FieldsInBindingKey; 784 getSymbolicOffsetFields(K, FieldsInBindingKey); 785 786 ptrdiff_t Delta = FieldsInBindingKey.size() - Fields.size(); 787 if (Delta >= 0) 788 return std::equal(FieldsInBindingKey.begin() + Delta, 789 FieldsInBindingKey.end(), 790 Fields.begin()); 791 else 792 return std::equal(FieldsInBindingKey.begin(), FieldsInBindingKey.end(), 793 Fields.begin() - Delta); 794 } 795 796 /// Collects all bindings in \p Cluster that may refer to bindings within 797 /// \p Top. 798 /// 799 /// Each binding is a pair whose \c first is the key (a BindingKey) and whose 800 /// \c second is the value (an SVal). 801 /// 802 /// The \p IncludeAllDefaultBindings parameter specifies whether to include 803 /// default bindings that may extend beyond \p Top itself, e.g. if \p Top is 804 /// an aggregate within a larger aggregate with a default binding. 805 static void 806 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 807 SValBuilder &SVB, const ClusterBindings &Cluster, 808 const SubRegion *Top, BindingKey TopKey, 809 bool IncludeAllDefaultBindings) { 810 FieldVector FieldsInSymbolicSubregions; 811 if (TopKey.hasSymbolicOffset()) { 812 getSymbolicOffsetFields(TopKey, FieldsInSymbolicSubregions); 813 Top = cast<SubRegion>(TopKey.getConcreteOffsetRegion()); 814 TopKey = BindingKey::Make(Top, BindingKey::Default); 815 } 816 817 // Find the length (in bits) of the region being invalidated. 818 uint64_t Length = UINT64_MAX; 819 SVal Extent = Top->getExtent(SVB); 820 if (Optional<nonloc::ConcreteInt> ExtentCI = 821 Extent.getAs<nonloc::ConcreteInt>()) { 822 const llvm::APSInt &ExtentInt = ExtentCI->getValue(); 823 assert(ExtentInt.isNonNegative() || ExtentInt.isUnsigned()); 824 // Extents are in bytes but region offsets are in bits. Be careful! 825 Length = ExtentInt.getLimitedValue() * SVB.getContext().getCharWidth(); 826 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(Top)) { 827 if (FR->getDecl()->isBitField()) 828 Length = FR->getDecl()->getBitWidthValue(SVB.getContext()); 829 } 830 831 for (ClusterBindings::iterator I = Cluster.begin(), E = Cluster.end(); 832 I != E; ++I) { 833 BindingKey NextKey = I.getKey(); 834 if (NextKey.getRegion() == TopKey.getRegion()) { 835 // FIXME: This doesn't catch the case where we're really invalidating a 836 // region with a symbolic offset. Example: 837 // R: points[i].y 838 // Next: points[0].x 839 840 if (NextKey.getOffset() > TopKey.getOffset() && 841 NextKey.getOffset() - TopKey.getOffset() < Length) { 842 // Case 1: The next binding is inside the region we're invalidating. 843 // Include it. 844 Bindings.push_back(*I); 845 846 } else if (NextKey.getOffset() == TopKey.getOffset()) { 847 // Case 2: The next binding is at the same offset as the region we're 848 // invalidating. In this case, we need to leave default bindings alone, 849 // since they may be providing a default value for a regions beyond what 850 // we're invalidating. 851 // FIXME: This is probably incorrect; consider invalidating an outer 852 // struct whose first field is bound to a LazyCompoundVal. 853 if (IncludeAllDefaultBindings || NextKey.isDirect()) 854 Bindings.push_back(*I); 855 } 856 857 } else if (NextKey.hasSymbolicOffset()) { 858 const MemRegion *Base = NextKey.getConcreteOffsetRegion(); 859 if (Top->isSubRegionOf(Base)) { 860 // Case 3: The next key is symbolic and we just changed something within 861 // its concrete region. We don't know if the binding is still valid, so 862 // we'll be conservative and include it. 863 if (IncludeAllDefaultBindings || NextKey.isDirect()) 864 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 865 Bindings.push_back(*I); 866 } else if (const SubRegion *BaseSR = dyn_cast<SubRegion>(Base)) { 867 // Case 4: The next key is symbolic, but we changed a known 868 // super-region. In this case the binding is certainly included. 869 if (Top == Base || BaseSR->isSubRegionOf(Top)) 870 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 871 Bindings.push_back(*I); 872 } 873 } 874 } 875 } 876 877 static void 878 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 879 SValBuilder &SVB, const ClusterBindings &Cluster, 880 const SubRegion *Top, bool IncludeAllDefaultBindings) { 881 collectSubRegionBindings(Bindings, SVB, Cluster, Top, 882 BindingKey::Make(Top, BindingKey::Default), 883 IncludeAllDefaultBindings); 884 } 885 886 RegionBindingsRef 887 RegionStoreManager::removeSubRegionBindings(RegionBindingsConstRef B, 888 const SubRegion *Top) { 889 BindingKey TopKey = BindingKey::Make(Top, BindingKey::Default); 890 const MemRegion *ClusterHead = TopKey.getBaseRegion(); 891 892 if (Top == ClusterHead) { 893 // We can remove an entire cluster's bindings all in one go. 894 return B.remove(Top); 895 } 896 897 const ClusterBindings *Cluster = B.lookup(ClusterHead); 898 if (!Cluster) { 899 // If we're invalidating a region with a symbolic offset, we need to make 900 // sure we don't treat the base region as uninitialized anymore. 901 if (TopKey.hasSymbolicOffset()) { 902 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 903 return B.addBinding(Concrete, BindingKey::Default, UnknownVal()); 904 } 905 return B; 906 } 907 908 SmallVector<BindingPair, 32> Bindings; 909 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, Top, TopKey, 910 /*IncludeAllDefaultBindings=*/false); 911 912 ClusterBindingsRef Result(*Cluster, CBFactory); 913 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 914 E = Bindings.end(); 915 I != E; ++I) 916 Result = Result.remove(I->first); 917 918 // If we're invalidating a region with a symbolic offset, we need to make sure 919 // we don't treat the base region as uninitialized anymore. 920 // FIXME: This isn't very precise; see the example in 921 // collectSubRegionBindings. 922 if (TopKey.hasSymbolicOffset()) { 923 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 924 Result = Result.add(BindingKey::Make(Concrete, BindingKey::Default), 925 UnknownVal()); 926 } 927 928 if (Result.isEmpty()) 929 return B.remove(ClusterHead); 930 return B.add(ClusterHead, Result.asImmutableMap()); 931 } 932 933 namespace { 934 class invalidateRegionsWorker : public ClusterAnalysis<invalidateRegionsWorker> 935 { 936 const Expr *Ex; 937 unsigned Count; 938 const LocationContext *LCtx; 939 InvalidatedSymbols &IS; 940 RegionAndSymbolInvalidationTraits &ITraits; 941 StoreManager::InvalidatedRegions *Regions; 942 GlobalsFilterKind GlobalsFilter; 943 public: 944 invalidateRegionsWorker(RegionStoreManager &rm, 945 ProgramStateManager &stateMgr, 946 RegionBindingsRef b, 947 const Expr *ex, unsigned count, 948 const LocationContext *lctx, 949 InvalidatedSymbols &is, 950 RegionAndSymbolInvalidationTraits &ITraitsIn, 951 StoreManager::InvalidatedRegions *r, 952 GlobalsFilterKind GFK) 953 : ClusterAnalysis<invalidateRegionsWorker>(rm, stateMgr, b), 954 Ex(ex), Count(count), LCtx(lctx), IS(is), ITraits(ITraitsIn), Regions(r), 955 GlobalsFilter(GFK) {} 956 957 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 958 void VisitBinding(SVal V); 959 960 using ClusterAnalysis::AddToWorkList; 961 962 bool AddToWorkList(const MemRegion *R); 963 964 /// Returns true if all clusters in the memory space for \p Base should be 965 /// be invalidated. 966 bool includeEntireMemorySpace(const MemRegion *Base); 967 968 /// Returns true if the memory space of the given region is one of the global 969 /// regions specially included at the start of invalidation. 970 bool isInitiallyIncludedGlobalRegion(const MemRegion *R); 971 }; 972 } 973 974 bool invalidateRegionsWorker::AddToWorkList(const MemRegion *R) { 975 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 976 R, RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 977 const MemRegion *BaseR = doNotInvalidateSuperRegion ? R : R->getBaseRegion(); 978 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 979 } 980 981 void invalidateRegionsWorker::VisitBinding(SVal V) { 982 // A symbol? Mark it touched by the invalidation. 983 if (SymbolRef Sym = V.getAsSymbol()) 984 IS.insert(Sym); 985 986 if (const MemRegion *R = V.getAsRegion()) { 987 AddToWorkList(R); 988 return; 989 } 990 991 // Is it a LazyCompoundVal? All references get invalidated as well. 992 if (Optional<nonloc::LazyCompoundVal> LCS = 993 V.getAs<nonloc::LazyCompoundVal>()) { 994 995 const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS); 996 997 for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(), 998 E = Vals.end(); 999 I != E; ++I) 1000 VisitBinding(*I); 1001 1002 return; 1003 } 1004 } 1005 1006 void invalidateRegionsWorker::VisitCluster(const MemRegion *baseR, 1007 const ClusterBindings *C) { 1008 1009 bool PreserveRegionsContents = 1010 ITraits.hasTrait(baseR, 1011 RegionAndSymbolInvalidationTraits::TK_PreserveContents); 1012 1013 if (C) { 1014 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) 1015 VisitBinding(I.getData()); 1016 1017 // Invalidate regions contents. 1018 if (!PreserveRegionsContents) 1019 B = B.remove(baseR); 1020 } 1021 1022 // BlockDataRegion? If so, invalidate captured variables that are passed 1023 // by reference. 1024 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(baseR)) { 1025 for (BlockDataRegion::referenced_vars_iterator 1026 BI = BR->referenced_vars_begin(), BE = BR->referenced_vars_end() ; 1027 BI != BE; ++BI) { 1028 const VarRegion *VR = BI.getCapturedRegion(); 1029 const VarDecl *VD = VR->getDecl(); 1030 if (VD->hasAttr<BlocksAttr>() || !VD->hasLocalStorage()) { 1031 AddToWorkList(VR); 1032 } 1033 else if (Loc::isLocType(VR->getValueType())) { 1034 // Map the current bindings to a Store to retrieve the value 1035 // of the binding. If that binding itself is a region, we should 1036 // invalidate that region. This is because a block may capture 1037 // a pointer value, but the thing pointed by that pointer may 1038 // get invalidated. 1039 SVal V = RM.getBinding(B, loc::MemRegionVal(VR)); 1040 if (Optional<Loc> L = V.getAs<Loc>()) { 1041 if (const MemRegion *LR = L->getAsRegion()) 1042 AddToWorkList(LR); 1043 } 1044 } 1045 } 1046 return; 1047 } 1048 1049 // Symbolic region? 1050 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) 1051 IS.insert(SR->getSymbol()); 1052 1053 // Nothing else should be done in the case when we preserve regions context. 1054 if (PreserveRegionsContents) 1055 return; 1056 1057 // Otherwise, we have a normal data region. Record that we touched the region. 1058 if (Regions) 1059 Regions->push_back(baseR); 1060 1061 if (isa<AllocaRegion>(baseR) || isa<SymbolicRegion>(baseR)) { 1062 // Invalidate the region by setting its default value to 1063 // conjured symbol. The type of the symbol is irrelevant. 1064 DefinedOrUnknownSVal V = 1065 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, Ctx.IntTy, Count); 1066 B = B.addBinding(baseR, BindingKey::Default, V); 1067 return; 1068 } 1069 1070 if (!baseR->isBoundable()) 1071 return; 1072 1073 const TypedValueRegion *TR = cast<TypedValueRegion>(baseR); 1074 QualType T = TR->getValueType(); 1075 1076 if (isInitiallyIncludedGlobalRegion(baseR)) { 1077 // If the region is a global and we are invalidating all globals, 1078 // erasing the entry is good enough. This causes all globals to be lazily 1079 // symbolicated from the same base symbol. 1080 return; 1081 } 1082 1083 if (T->isStructureOrClassType()) { 1084 // Invalidate the region by setting its default value to 1085 // conjured symbol. The type of the symbol is irrelevant. 1086 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1087 Ctx.IntTy, Count); 1088 B = B.addBinding(baseR, BindingKey::Default, V); 1089 return; 1090 } 1091 1092 if (const ArrayType *AT = Ctx.getAsArrayType(T)) { 1093 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 1094 baseR, 1095 RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 1096 1097 if (doNotInvalidateSuperRegion) { 1098 // We are not doing blank invalidation of the whole array region so we 1099 // have to manually invalidate each elements. 1100 Optional<uint64_t> NumElements; 1101 1102 // Compute lower and upper offsets for region within array. 1103 if (const ConstantArrayType *CAT = dyn_cast<ConstantArrayType>(AT)) 1104 NumElements = CAT->getSize().getZExtValue(); 1105 if (!NumElements) // We are not dealing with a constant size array 1106 goto conjure_default; 1107 QualType ElementTy = AT->getElementType(); 1108 uint64_t ElemSize = Ctx.getTypeSize(ElementTy); 1109 const RegionOffset &RO = baseR->getAsOffset(); 1110 const MemRegion *SuperR = baseR->getBaseRegion(); 1111 if (RO.hasSymbolicOffset()) { 1112 // If base region has a symbolic offset, 1113 // we revert to invalidating the super region. 1114 if (SuperR) 1115 AddToWorkList(SuperR); 1116 goto conjure_default; 1117 } 1118 1119 uint64_t LowerOffset = RO.getOffset(); 1120 uint64_t UpperOffset = LowerOffset + *NumElements * ElemSize; 1121 bool UpperOverflow = UpperOffset < LowerOffset; 1122 1123 // Invalidate regions which are within array boundaries, 1124 // or have a symbolic offset. 1125 if (!SuperR) 1126 goto conjure_default; 1127 1128 const ClusterBindings *C = B.lookup(SuperR); 1129 if (!C) 1130 goto conjure_default; 1131 1132 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; 1133 ++I) { 1134 const BindingKey &BK = I.getKey(); 1135 Optional<uint64_t> ROffset = 1136 BK.hasSymbolicOffset() ? Optional<uint64_t>() : BK.getOffset(); 1137 1138 // Check offset is not symbolic and within array's boundaries. 1139 // Handles arrays of 0 elements and of 0-sized elements as well. 1140 if (!ROffset || 1141 ((*ROffset >= LowerOffset && *ROffset < UpperOffset) || 1142 (UpperOverflow && 1143 (*ROffset >= LowerOffset || *ROffset < UpperOffset)) || 1144 (LowerOffset == UpperOffset && *ROffset == LowerOffset))) { 1145 B = B.removeBinding(I.getKey()); 1146 // Bound symbolic regions need to be invalidated for dead symbol 1147 // detection. 1148 SVal V = I.getData(); 1149 const MemRegion *R = V.getAsRegion(); 1150 if (R && isa<SymbolicRegion>(R)) 1151 VisitBinding(V); 1152 } 1153 } 1154 } 1155 conjure_default: 1156 // Set the default value of the array to conjured symbol. 1157 DefinedOrUnknownSVal V = 1158 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1159 AT->getElementType(), Count); 1160 B = B.addBinding(baseR, BindingKey::Default, V); 1161 return; 1162 } 1163 1164 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1165 T,Count); 1166 assert(SymbolManager::canSymbolicate(T) || V.isUnknown()); 1167 B = B.addBinding(baseR, BindingKey::Direct, V); 1168 } 1169 1170 bool invalidateRegionsWorker::isInitiallyIncludedGlobalRegion( 1171 const MemRegion *R) { 1172 switch (GlobalsFilter) { 1173 case GFK_None: 1174 return false; 1175 case GFK_SystemOnly: 1176 return isa<GlobalSystemSpaceRegion>(R->getMemorySpace()); 1177 case GFK_All: 1178 return isa<NonStaticGlobalSpaceRegion>(R->getMemorySpace()); 1179 } 1180 1181 llvm_unreachable("unknown globals filter"); 1182 } 1183 1184 bool invalidateRegionsWorker::includeEntireMemorySpace(const MemRegion *Base) { 1185 if (isInitiallyIncludedGlobalRegion(Base)) 1186 return true; 1187 1188 const MemSpaceRegion *MemSpace = Base->getMemorySpace(); 1189 return ITraits.hasTrait(MemSpace, 1190 RegionAndSymbolInvalidationTraits::TK_EntireMemSpace); 1191 } 1192 1193 RegionBindingsRef 1194 RegionStoreManager::invalidateGlobalRegion(MemRegion::Kind K, 1195 const Expr *Ex, 1196 unsigned Count, 1197 const LocationContext *LCtx, 1198 RegionBindingsRef B, 1199 InvalidatedRegions *Invalidated) { 1200 // Bind the globals memory space to a new symbol that we will use to derive 1201 // the bindings for all globals. 1202 const GlobalsSpaceRegion *GS = MRMgr.getGlobalsRegion(K); 1203 SVal V = svalBuilder.conjureSymbolVal(/* SymbolTag = */ (const void*) GS, Ex, LCtx, 1204 /* type does not matter */ Ctx.IntTy, 1205 Count); 1206 1207 B = B.removeBinding(GS) 1208 .addBinding(BindingKey::Make(GS, BindingKey::Default), V); 1209 1210 // Even if there are no bindings in the global scope, we still need to 1211 // record that we touched it. 1212 if (Invalidated) 1213 Invalidated->push_back(GS); 1214 1215 return B; 1216 } 1217 1218 void RegionStoreManager::populateWorkList(invalidateRegionsWorker &W, 1219 ArrayRef<SVal> Values, 1220 InvalidatedRegions *TopLevelRegions) { 1221 for (ArrayRef<SVal>::iterator I = Values.begin(), 1222 E = Values.end(); I != E; ++I) { 1223 SVal V = *I; 1224 if (Optional<nonloc::LazyCompoundVal> LCS = 1225 V.getAs<nonloc::LazyCompoundVal>()) { 1226 1227 const SValListTy &Vals = getInterestingValues(*LCS); 1228 1229 for (SValListTy::const_iterator I = Vals.begin(), 1230 E = Vals.end(); I != E; ++I) { 1231 // Note: the last argument is false here because these are 1232 // non-top-level regions. 1233 if (const MemRegion *R = (*I).getAsRegion()) 1234 W.AddToWorkList(R); 1235 } 1236 continue; 1237 } 1238 1239 if (const MemRegion *R = V.getAsRegion()) { 1240 if (TopLevelRegions) 1241 TopLevelRegions->push_back(R); 1242 W.AddToWorkList(R); 1243 continue; 1244 } 1245 } 1246 } 1247 1248 StoreRef 1249 RegionStoreManager::invalidateRegions(Store store, 1250 ArrayRef<SVal> Values, 1251 const Expr *Ex, unsigned Count, 1252 const LocationContext *LCtx, 1253 const CallEvent *Call, 1254 InvalidatedSymbols &IS, 1255 RegionAndSymbolInvalidationTraits &ITraits, 1256 InvalidatedRegions *TopLevelRegions, 1257 InvalidatedRegions *Invalidated) { 1258 GlobalsFilterKind GlobalsFilter; 1259 if (Call) { 1260 if (Call->isInSystemHeader()) 1261 GlobalsFilter = GFK_SystemOnly; 1262 else 1263 GlobalsFilter = GFK_All; 1264 } else { 1265 GlobalsFilter = GFK_None; 1266 } 1267 1268 RegionBindingsRef B = getRegionBindings(store); 1269 invalidateRegionsWorker W(*this, StateMgr, B, Ex, Count, LCtx, IS, ITraits, 1270 Invalidated, GlobalsFilter); 1271 1272 // Scan the bindings and generate the clusters. 1273 W.GenerateClusters(); 1274 1275 // Add the regions to the worklist. 1276 populateWorkList(W, Values, TopLevelRegions); 1277 1278 W.RunWorkList(); 1279 1280 // Return the new bindings. 1281 B = W.getRegionBindings(); 1282 1283 // For calls, determine which global regions should be invalidated and 1284 // invalidate them. (Note that function-static and immutable globals are never 1285 // invalidated by this.) 1286 // TODO: This could possibly be more precise with modules. 1287 switch (GlobalsFilter) { 1288 case GFK_All: 1289 B = invalidateGlobalRegion(MemRegion::GlobalInternalSpaceRegionKind, 1290 Ex, Count, LCtx, B, Invalidated); 1291 // FALLTHROUGH 1292 case GFK_SystemOnly: 1293 B = invalidateGlobalRegion(MemRegion::GlobalSystemSpaceRegionKind, 1294 Ex, Count, LCtx, B, Invalidated); 1295 // FALLTHROUGH 1296 case GFK_None: 1297 break; 1298 } 1299 1300 return StoreRef(B.asStore(), *this); 1301 } 1302 1303 //===----------------------------------------------------------------------===// 1304 // Extents for regions. 1305 //===----------------------------------------------------------------------===// 1306 1307 DefinedOrUnknownSVal 1308 RegionStoreManager::getSizeInElements(ProgramStateRef state, 1309 const MemRegion *R, 1310 QualType EleTy) { 1311 SVal Size = cast<SubRegion>(R)->getExtent(svalBuilder); 1312 const llvm::APSInt *SizeInt = svalBuilder.getKnownValue(state, Size); 1313 if (!SizeInt) 1314 return UnknownVal(); 1315 1316 CharUnits RegionSize = CharUnits::fromQuantity(SizeInt->getSExtValue()); 1317 1318 if (Ctx.getAsVariableArrayType(EleTy)) { 1319 // FIXME: We need to track extra state to properly record the size 1320 // of VLAs. Returning UnknownVal here, however, is a stop-gap so that 1321 // we don't have a divide-by-zero below. 1322 return UnknownVal(); 1323 } 1324 1325 CharUnits EleSize = Ctx.getTypeSizeInChars(EleTy); 1326 1327 // If a variable is reinterpreted as a type that doesn't fit into a larger 1328 // type evenly, round it down. 1329 // This is a signed value, since it's used in arithmetic with signed indices. 1330 return svalBuilder.makeIntVal(RegionSize / EleSize, false); 1331 } 1332 1333 //===----------------------------------------------------------------------===// 1334 // Location and region casting. 1335 //===----------------------------------------------------------------------===// 1336 1337 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 1338 /// type. 'Array' represents the lvalue of the array being decayed 1339 /// to a pointer, and the returned SVal represents the decayed 1340 /// version of that lvalue (i.e., a pointer to the first element of 1341 /// the array). This is called by ExprEngine when evaluating casts 1342 /// from arrays to pointers. 1343 SVal RegionStoreManager::ArrayToPointer(Loc Array, QualType T) { 1344 if (Array.getAs<loc::ConcreteInt>()) 1345 return Array; 1346 1347 if (!Array.getAs<loc::MemRegionVal>()) 1348 return UnknownVal(); 1349 1350 const SubRegion *R = 1351 cast<SubRegion>(Array.castAs<loc::MemRegionVal>().getRegion()); 1352 NonLoc ZeroIdx = svalBuilder.makeZeroArrayIndex(); 1353 return loc::MemRegionVal(MRMgr.getElementRegion(T, ZeroIdx, R, Ctx)); 1354 } 1355 1356 //===----------------------------------------------------------------------===// 1357 // Loading values from regions. 1358 //===----------------------------------------------------------------------===// 1359 1360 SVal RegionStoreManager::getBinding(RegionBindingsConstRef B, Loc L, QualType T) { 1361 assert(!L.getAs<UnknownVal>() && "location unknown"); 1362 assert(!L.getAs<UndefinedVal>() && "location undefined"); 1363 1364 // For access to concrete addresses, return UnknownVal. Checks 1365 // for null dereferences (and similar errors) are done by checkers, not 1366 // the Store. 1367 // FIXME: We can consider lazily symbolicating such memory, but we really 1368 // should defer this when we can reason easily about symbolicating arrays 1369 // of bytes. 1370 if (L.getAs<loc::ConcreteInt>()) { 1371 return UnknownVal(); 1372 } 1373 if (!L.getAs<loc::MemRegionVal>()) { 1374 return UnknownVal(); 1375 } 1376 1377 const MemRegion *MR = L.castAs<loc::MemRegionVal>().getRegion(); 1378 1379 if (isa<BlockDataRegion>(MR)) { 1380 return UnknownVal(); 1381 } 1382 1383 if (isa<AllocaRegion>(MR) || 1384 isa<SymbolicRegion>(MR) || 1385 isa<CodeTextRegion>(MR)) { 1386 if (T.isNull()) { 1387 if (const TypedRegion *TR = dyn_cast<TypedRegion>(MR)) 1388 T = TR->getLocationType(); 1389 else { 1390 const SymbolicRegion *SR = cast<SymbolicRegion>(MR); 1391 T = SR->getSymbol()->getType(); 1392 } 1393 } 1394 MR = GetElementZeroRegion(cast<SubRegion>(MR), T); 1395 } 1396 1397 // FIXME: Perhaps this method should just take a 'const MemRegion*' argument 1398 // instead of 'Loc', and have the other Loc cases handled at a higher level. 1399 const TypedValueRegion *R = cast<TypedValueRegion>(MR); 1400 QualType RTy = R->getValueType(); 1401 1402 // FIXME: we do not yet model the parts of a complex type, so treat the 1403 // whole thing as "unknown". 1404 if (RTy->isAnyComplexType()) 1405 return UnknownVal(); 1406 1407 // FIXME: We should eventually handle funny addressing. e.g.: 1408 // 1409 // int x = ...; 1410 // int *p = &x; 1411 // char *q = (char*) p; 1412 // char c = *q; // returns the first byte of 'x'. 1413 // 1414 // Such funny addressing will occur due to layering of regions. 1415 if (RTy->isStructureOrClassType()) 1416 return getBindingForStruct(B, R); 1417 1418 // FIXME: Handle unions. 1419 if (RTy->isUnionType()) 1420 return createLazyBinding(B, R); 1421 1422 if (RTy->isArrayType()) { 1423 if (RTy->isConstantArrayType()) 1424 return getBindingForArray(B, R); 1425 else 1426 return UnknownVal(); 1427 } 1428 1429 // FIXME: handle Vector types. 1430 if (RTy->isVectorType()) 1431 return UnknownVal(); 1432 1433 if (const FieldRegion* FR = dyn_cast<FieldRegion>(R)) 1434 return CastRetrievedVal(getBindingForField(B, FR), FR, T, false); 1435 1436 if (const ElementRegion* ER = dyn_cast<ElementRegion>(R)) { 1437 // FIXME: Here we actually perform an implicit conversion from the loaded 1438 // value to the element type. Eventually we want to compose these values 1439 // more intelligently. For example, an 'element' can encompass multiple 1440 // bound regions (e.g., several bound bytes), or could be a subset of 1441 // a larger value. 1442 return CastRetrievedVal(getBindingForElement(B, ER), ER, T, false); 1443 } 1444 1445 if (const ObjCIvarRegion *IVR = dyn_cast<ObjCIvarRegion>(R)) { 1446 // FIXME: Here we actually perform an implicit conversion from the loaded 1447 // value to the ivar type. What we should model is stores to ivars 1448 // that blow past the extent of the ivar. If the address of the ivar is 1449 // reinterpretted, it is possible we stored a different value that could 1450 // fit within the ivar. Either we need to cast these when storing them 1451 // or reinterpret them lazily (as we do here). 1452 return CastRetrievedVal(getBindingForObjCIvar(B, IVR), IVR, T, false); 1453 } 1454 1455 if (const VarRegion *VR = dyn_cast<VarRegion>(R)) { 1456 // FIXME: Here we actually perform an implicit conversion from the loaded 1457 // value to the variable type. What we should model is stores to variables 1458 // that blow past the extent of the variable. If the address of the 1459 // variable is reinterpretted, it is possible we stored a different value 1460 // that could fit within the variable. Either we need to cast these when 1461 // storing them or reinterpret them lazily (as we do here). 1462 return CastRetrievedVal(getBindingForVar(B, VR), VR, T, false); 1463 } 1464 1465 const SVal *V = B.lookup(R, BindingKey::Direct); 1466 1467 // Check if the region has a binding. 1468 if (V) 1469 return *V; 1470 1471 // The location does not have a bound value. This means that it has 1472 // the value it had upon its creation and/or entry to the analyzed 1473 // function/method. These are either symbolic values or 'undefined'. 1474 if (R->hasStackNonParametersStorage()) { 1475 // All stack variables are considered to have undefined values 1476 // upon creation. All heap allocated blocks are considered to 1477 // have undefined values as well unless they are explicitly bound 1478 // to specific values. 1479 return UndefinedVal(); 1480 } 1481 1482 // All other values are symbolic. 1483 return svalBuilder.getRegionValueSymbolVal(R); 1484 } 1485 1486 static QualType getUnderlyingType(const SubRegion *R) { 1487 QualType RegionTy; 1488 if (const TypedValueRegion *TVR = dyn_cast<TypedValueRegion>(R)) 1489 RegionTy = TVR->getValueType(); 1490 1491 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) 1492 RegionTy = SR->getSymbol()->getType(); 1493 1494 return RegionTy; 1495 } 1496 1497 /// Checks to see if store \p B has a lazy binding for region \p R. 1498 /// 1499 /// If \p AllowSubregionBindings is \c false, a lazy binding will be rejected 1500 /// if there are additional bindings within \p R. 1501 /// 1502 /// Note that unlike RegionStoreManager::findLazyBinding, this will not search 1503 /// for lazy bindings for super-regions of \p R. 1504 static Optional<nonloc::LazyCompoundVal> 1505 getExistingLazyBinding(SValBuilder &SVB, RegionBindingsConstRef B, 1506 const SubRegion *R, bool AllowSubregionBindings) { 1507 Optional<SVal> V = B.getDefaultBinding(R); 1508 if (!V) 1509 return None; 1510 1511 Optional<nonloc::LazyCompoundVal> LCV = V->getAs<nonloc::LazyCompoundVal>(); 1512 if (!LCV) 1513 return None; 1514 1515 // If the LCV is for a subregion, the types might not match, and we shouldn't 1516 // reuse the binding. 1517 QualType RegionTy = getUnderlyingType(R); 1518 if (!RegionTy.isNull() && 1519 !RegionTy->isVoidPointerType()) { 1520 QualType SourceRegionTy = LCV->getRegion()->getValueType(); 1521 if (!SVB.getContext().hasSameUnqualifiedType(RegionTy, SourceRegionTy)) 1522 return None; 1523 } 1524 1525 if (!AllowSubregionBindings) { 1526 // If there are any other bindings within this region, we shouldn't reuse 1527 // the top-level binding. 1528 SmallVector<BindingPair, 16> Bindings; 1529 collectSubRegionBindings(Bindings, SVB, *B.lookup(R->getBaseRegion()), R, 1530 /*IncludeAllDefaultBindings=*/true); 1531 if (Bindings.size() > 1) 1532 return None; 1533 } 1534 1535 return *LCV; 1536 } 1537 1538 1539 std::pair<Store, const SubRegion *> 1540 RegionStoreManager::findLazyBinding(RegionBindingsConstRef B, 1541 const SubRegion *R, 1542 const SubRegion *originalRegion) { 1543 if (originalRegion != R) { 1544 if (Optional<nonloc::LazyCompoundVal> V = 1545 getExistingLazyBinding(svalBuilder, B, R, true)) 1546 return std::make_pair(V->getStore(), V->getRegion()); 1547 } 1548 1549 typedef std::pair<Store, const SubRegion *> StoreRegionPair; 1550 StoreRegionPair Result = StoreRegionPair(); 1551 1552 if (const ElementRegion *ER = dyn_cast<ElementRegion>(R)) { 1553 Result = findLazyBinding(B, cast<SubRegion>(ER->getSuperRegion()), 1554 originalRegion); 1555 1556 if (Result.second) 1557 Result.second = MRMgr.getElementRegionWithSuper(ER, Result.second); 1558 1559 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) { 1560 Result = findLazyBinding(B, cast<SubRegion>(FR->getSuperRegion()), 1561 originalRegion); 1562 1563 if (Result.second) 1564 Result.second = MRMgr.getFieldRegionWithSuper(FR, Result.second); 1565 1566 } else if (const CXXBaseObjectRegion *BaseReg = 1567 dyn_cast<CXXBaseObjectRegion>(R)) { 1568 // C++ base object region is another kind of region that we should blast 1569 // through to look for lazy compound value. It is like a field region. 1570 Result = findLazyBinding(B, cast<SubRegion>(BaseReg->getSuperRegion()), 1571 originalRegion); 1572 1573 if (Result.second) 1574 Result.second = MRMgr.getCXXBaseObjectRegionWithSuper(BaseReg, 1575 Result.second); 1576 } 1577 1578 return Result; 1579 } 1580 1581 SVal RegionStoreManager::getBindingForElement(RegionBindingsConstRef B, 1582 const ElementRegion* R) { 1583 // We do not currently model bindings of the CompoundLiteralregion. 1584 if (isa<CompoundLiteralRegion>(R->getBaseRegion())) 1585 return UnknownVal(); 1586 1587 // Check if the region has a binding. 1588 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1589 return *V; 1590 1591 const MemRegion* superR = R->getSuperRegion(); 1592 1593 // Check if the region is an element region of a string literal. 1594 if (const StringRegion *StrR=dyn_cast<StringRegion>(superR)) { 1595 // FIXME: Handle loads from strings where the literal is treated as 1596 // an integer, e.g., *((unsigned int*)"hello") 1597 QualType T = Ctx.getAsArrayType(StrR->getValueType())->getElementType(); 1598 if (!Ctx.hasSameUnqualifiedType(T, R->getElementType())) 1599 return UnknownVal(); 1600 1601 const StringLiteral *Str = StrR->getStringLiteral(); 1602 SVal Idx = R->getIndex(); 1603 if (Optional<nonloc::ConcreteInt> CI = Idx.getAs<nonloc::ConcreteInt>()) { 1604 int64_t i = CI->getValue().getSExtValue(); 1605 // Abort on string underrun. This can be possible by arbitrary 1606 // clients of getBindingForElement(). 1607 if (i < 0) 1608 return UndefinedVal(); 1609 int64_t length = Str->getLength(); 1610 // Technically, only i == length is guaranteed to be null. 1611 // However, such overflows should be caught before reaching this point; 1612 // the only time such an access would be made is if a string literal was 1613 // used to initialize a larger array. 1614 char c = (i >= length) ? '\0' : Str->getCodeUnit(i); 1615 return svalBuilder.makeIntVal(c, T); 1616 } 1617 } 1618 1619 // Check for loads from a code text region. For such loads, just give up. 1620 if (isa<CodeTextRegion>(superR)) 1621 return UnknownVal(); 1622 1623 // Handle the case where we are indexing into a larger scalar object. 1624 // For example, this handles: 1625 // int x = ... 1626 // char *y = &x; 1627 // return *y; 1628 // FIXME: This is a hack, and doesn't do anything really intelligent yet. 1629 const RegionRawOffset &O = R->getAsArrayOffset(); 1630 1631 // If we cannot reason about the offset, return an unknown value. 1632 if (!O.getRegion()) 1633 return UnknownVal(); 1634 1635 if (const TypedValueRegion *baseR = 1636 dyn_cast_or_null<TypedValueRegion>(O.getRegion())) { 1637 QualType baseT = baseR->getValueType(); 1638 if (baseT->isScalarType()) { 1639 QualType elemT = R->getElementType(); 1640 if (elemT->isScalarType()) { 1641 if (Ctx.getTypeSizeInChars(baseT) >= Ctx.getTypeSizeInChars(elemT)) { 1642 if (const Optional<SVal> &V = B.getDirectBinding(superR)) { 1643 if (SymbolRef parentSym = V->getAsSymbol()) 1644 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1645 1646 if (V->isUnknownOrUndef()) 1647 return *V; 1648 // Other cases: give up. We are indexing into a larger object 1649 // that has some value, but we don't know how to handle that yet. 1650 return UnknownVal(); 1651 } 1652 } 1653 } 1654 } 1655 } 1656 return getBindingForFieldOrElementCommon(B, R, R->getElementType()); 1657 } 1658 1659 SVal RegionStoreManager::getBindingForField(RegionBindingsConstRef B, 1660 const FieldRegion* R) { 1661 1662 // Check if the region has a binding. 1663 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1664 return *V; 1665 1666 QualType Ty = R->getValueType(); 1667 return getBindingForFieldOrElementCommon(B, R, Ty); 1668 } 1669 1670 Optional<SVal> 1671 RegionStoreManager::getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 1672 const MemRegion *superR, 1673 const TypedValueRegion *R, 1674 QualType Ty) { 1675 1676 if (const Optional<SVal> &D = B.getDefaultBinding(superR)) { 1677 const SVal &val = D.getValue(); 1678 if (SymbolRef parentSym = val.getAsSymbol()) 1679 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1680 1681 if (val.isZeroConstant()) 1682 return svalBuilder.makeZeroVal(Ty); 1683 1684 if (val.isUnknownOrUndef()) 1685 return val; 1686 1687 // Lazy bindings are usually handled through getExistingLazyBinding(). 1688 // We should unify these two code paths at some point. 1689 if (val.getAs<nonloc::LazyCompoundVal>() || 1690 val.getAs<nonloc::CompoundVal>()) 1691 return val; 1692 1693 llvm_unreachable("Unknown default value"); 1694 } 1695 1696 return None; 1697 } 1698 1699 SVal RegionStoreManager::getLazyBinding(const SubRegion *LazyBindingRegion, 1700 RegionBindingsRef LazyBinding) { 1701 SVal Result; 1702 if (const ElementRegion *ER = dyn_cast<ElementRegion>(LazyBindingRegion)) 1703 Result = getBindingForElement(LazyBinding, ER); 1704 else 1705 Result = getBindingForField(LazyBinding, 1706 cast<FieldRegion>(LazyBindingRegion)); 1707 1708 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 1709 // default value for /part/ of an aggregate from a default value for the 1710 // /entire/ aggregate. The most common case of this is when struct Outer 1711 // has as its first member a struct Inner, which is copied in from a stack 1712 // variable. In this case, even if the Outer's default value is symbolic, 0, 1713 // or unknown, it gets overridden by the Inner's default value of undefined. 1714 // 1715 // This is a general problem -- if the Inner is zero-initialized, the Outer 1716 // will now look zero-initialized. The proper way to solve this is with a 1717 // new version of RegionStore that tracks the extent of a binding as well 1718 // as the offset. 1719 // 1720 // This hack only takes care of the undefined case because that can very 1721 // quickly result in a warning. 1722 if (Result.isUndef()) 1723 Result = UnknownVal(); 1724 1725 return Result; 1726 } 1727 1728 SVal 1729 RegionStoreManager::getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 1730 const TypedValueRegion *R, 1731 QualType Ty) { 1732 1733 // At this point we have already checked in either getBindingForElement or 1734 // getBindingForField if 'R' has a direct binding. 1735 1736 // Lazy binding? 1737 Store lazyBindingStore = nullptr; 1738 const SubRegion *lazyBindingRegion = nullptr; 1739 std::tie(lazyBindingStore, lazyBindingRegion) = findLazyBinding(B, R, R); 1740 if (lazyBindingRegion) 1741 return getLazyBinding(lazyBindingRegion, 1742 getRegionBindings(lazyBindingStore)); 1743 1744 // Record whether or not we see a symbolic index. That can completely 1745 // be out of scope of our lookup. 1746 bool hasSymbolicIndex = false; 1747 1748 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 1749 // default value for /part/ of an aggregate from a default value for the 1750 // /entire/ aggregate. The most common case of this is when struct Outer 1751 // has as its first member a struct Inner, which is copied in from a stack 1752 // variable. In this case, even if the Outer's default value is symbolic, 0, 1753 // or unknown, it gets overridden by the Inner's default value of undefined. 1754 // 1755 // This is a general problem -- if the Inner is zero-initialized, the Outer 1756 // will now look zero-initialized. The proper way to solve this is with a 1757 // new version of RegionStore that tracks the extent of a binding as well 1758 // as the offset. 1759 // 1760 // This hack only takes care of the undefined case because that can very 1761 // quickly result in a warning. 1762 bool hasPartialLazyBinding = false; 1763 1764 const SubRegion *SR = dyn_cast<SubRegion>(R); 1765 while (SR) { 1766 const MemRegion *Base = SR->getSuperRegion(); 1767 if (Optional<SVal> D = getBindingForDerivedDefaultValue(B, Base, R, Ty)) { 1768 if (D->getAs<nonloc::LazyCompoundVal>()) { 1769 hasPartialLazyBinding = true; 1770 break; 1771 } 1772 1773 return *D; 1774 } 1775 1776 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Base)) { 1777 NonLoc index = ER->getIndex(); 1778 if (!index.isConstant()) 1779 hasSymbolicIndex = true; 1780 } 1781 1782 // If our super region is a field or element itself, walk up the region 1783 // hierarchy to see if there is a default value installed in an ancestor. 1784 SR = dyn_cast<SubRegion>(Base); 1785 } 1786 1787 if (R->hasStackNonParametersStorage()) { 1788 if (isa<ElementRegion>(R)) { 1789 // Currently we don't reason specially about Clang-style vectors. Check 1790 // if superR is a vector and if so return Unknown. 1791 if (const TypedValueRegion *typedSuperR = 1792 dyn_cast<TypedValueRegion>(R->getSuperRegion())) { 1793 if (typedSuperR->getValueType()->isVectorType()) 1794 return UnknownVal(); 1795 } 1796 } 1797 1798 // FIXME: We also need to take ElementRegions with symbolic indexes into 1799 // account. This case handles both directly accessing an ElementRegion 1800 // with a symbolic offset, but also fields within an element with 1801 // a symbolic offset. 1802 if (hasSymbolicIndex) 1803 return UnknownVal(); 1804 1805 if (!hasPartialLazyBinding) 1806 return UndefinedVal(); 1807 } 1808 1809 // All other values are symbolic. 1810 return svalBuilder.getRegionValueSymbolVal(R); 1811 } 1812 1813 SVal RegionStoreManager::getBindingForObjCIvar(RegionBindingsConstRef B, 1814 const ObjCIvarRegion* R) { 1815 // Check if the region has a binding. 1816 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1817 return *V; 1818 1819 const MemRegion *superR = R->getSuperRegion(); 1820 1821 // Check if the super region has a default binding. 1822 if (const Optional<SVal> &V = B.getDefaultBinding(superR)) { 1823 if (SymbolRef parentSym = V->getAsSymbol()) 1824 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1825 1826 // Other cases: give up. 1827 return UnknownVal(); 1828 } 1829 1830 return getBindingForLazySymbol(R); 1831 } 1832 1833 SVal RegionStoreManager::getBindingForVar(RegionBindingsConstRef B, 1834 const VarRegion *R) { 1835 1836 // Check if the region has a binding. 1837 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1838 return *V; 1839 1840 // Lazily derive a value for the VarRegion. 1841 const VarDecl *VD = R->getDecl(); 1842 const MemSpaceRegion *MS = R->getMemorySpace(); 1843 1844 // Arguments are always symbolic. 1845 if (isa<StackArgumentsSpaceRegion>(MS)) 1846 return svalBuilder.getRegionValueSymbolVal(R); 1847 1848 // Is 'VD' declared constant? If so, retrieve the constant value. 1849 if (VD->getType().isConstQualified()) 1850 if (const Expr *Init = VD->getInit()) 1851 if (Optional<SVal> V = svalBuilder.getConstantVal(Init)) 1852 return *V; 1853 1854 // This must come after the check for constants because closure-captured 1855 // constant variables may appear in UnknownSpaceRegion. 1856 if (isa<UnknownSpaceRegion>(MS)) 1857 return svalBuilder.getRegionValueSymbolVal(R); 1858 1859 if (isa<GlobalsSpaceRegion>(MS)) { 1860 QualType T = VD->getType(); 1861 1862 // Function-scoped static variables are default-initialized to 0; if they 1863 // have an initializer, it would have been processed by now. 1864 // FIXME: This is only true when we're starting analysis from main(). 1865 // We're losing a lot of coverage here. 1866 if (isa<StaticGlobalSpaceRegion>(MS)) 1867 return svalBuilder.makeZeroVal(T); 1868 1869 if (Optional<SVal> V = getBindingForDerivedDefaultValue(B, MS, R, T)) { 1870 assert(!V->getAs<nonloc::LazyCompoundVal>()); 1871 return V.getValue(); 1872 } 1873 1874 return svalBuilder.getRegionValueSymbolVal(R); 1875 } 1876 1877 return UndefinedVal(); 1878 } 1879 1880 SVal RegionStoreManager::getBindingForLazySymbol(const TypedValueRegion *R) { 1881 // All other values are symbolic. 1882 return svalBuilder.getRegionValueSymbolVal(R); 1883 } 1884 1885 const RegionStoreManager::SValListTy & 1886 RegionStoreManager::getInterestingValues(nonloc::LazyCompoundVal LCV) { 1887 // First, check the cache. 1888 LazyBindingsMapTy::iterator I = LazyBindingsMap.find(LCV.getCVData()); 1889 if (I != LazyBindingsMap.end()) 1890 return I->second; 1891 1892 // If we don't have a list of values cached, start constructing it. 1893 SValListTy List; 1894 1895 const SubRegion *LazyR = LCV.getRegion(); 1896 RegionBindingsRef B = getRegionBindings(LCV.getStore()); 1897 1898 // If this region had /no/ bindings at the time, there are no interesting 1899 // values to return. 1900 const ClusterBindings *Cluster = B.lookup(LazyR->getBaseRegion()); 1901 if (!Cluster) 1902 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 1903 1904 SmallVector<BindingPair, 32> Bindings; 1905 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, LazyR, 1906 /*IncludeAllDefaultBindings=*/true); 1907 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 1908 E = Bindings.end(); 1909 I != E; ++I) { 1910 SVal V = I->second; 1911 if (V.isUnknownOrUndef() || V.isConstant()) 1912 continue; 1913 1914 if (Optional<nonloc::LazyCompoundVal> InnerLCV = 1915 V.getAs<nonloc::LazyCompoundVal>()) { 1916 const SValListTy &InnerList = getInterestingValues(*InnerLCV); 1917 List.insert(List.end(), InnerList.begin(), InnerList.end()); 1918 continue; 1919 } 1920 1921 List.push_back(V); 1922 } 1923 1924 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 1925 } 1926 1927 NonLoc RegionStoreManager::createLazyBinding(RegionBindingsConstRef B, 1928 const TypedValueRegion *R) { 1929 if (Optional<nonloc::LazyCompoundVal> V = 1930 getExistingLazyBinding(svalBuilder, B, R, false)) 1931 return *V; 1932 1933 return svalBuilder.makeLazyCompoundVal(StoreRef(B.asStore(), *this), R); 1934 } 1935 1936 static bool isRecordEmpty(const RecordDecl *RD) { 1937 if (!RD->field_empty()) 1938 return false; 1939 if (const CXXRecordDecl *CRD = dyn_cast<CXXRecordDecl>(RD)) 1940 return CRD->getNumBases() == 0; 1941 return true; 1942 } 1943 1944 SVal RegionStoreManager::getBindingForStruct(RegionBindingsConstRef B, 1945 const TypedValueRegion *R) { 1946 const RecordDecl *RD = R->getValueType()->castAs<RecordType>()->getDecl(); 1947 if (!RD->getDefinition() || isRecordEmpty(RD)) 1948 return UnknownVal(); 1949 1950 return createLazyBinding(B, R); 1951 } 1952 1953 SVal RegionStoreManager::getBindingForArray(RegionBindingsConstRef B, 1954 const TypedValueRegion *R) { 1955 assert(Ctx.getAsConstantArrayType(R->getValueType()) && 1956 "Only constant array types can have compound bindings."); 1957 1958 return createLazyBinding(B, R); 1959 } 1960 1961 bool RegionStoreManager::includedInBindings(Store store, 1962 const MemRegion *region) const { 1963 RegionBindingsRef B = getRegionBindings(store); 1964 region = region->getBaseRegion(); 1965 1966 // Quick path: if the base is the head of a cluster, the region is live. 1967 if (B.lookup(region)) 1968 return true; 1969 1970 // Slow path: if the region is the VALUE of any binding, it is live. 1971 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); RI != RE; ++RI) { 1972 const ClusterBindings &Cluster = RI.getData(); 1973 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 1974 CI != CE; ++CI) { 1975 const SVal &D = CI.getData(); 1976 if (const MemRegion *R = D.getAsRegion()) 1977 if (R->getBaseRegion() == region) 1978 return true; 1979 } 1980 } 1981 1982 return false; 1983 } 1984 1985 //===----------------------------------------------------------------------===// 1986 // Binding values to regions. 1987 //===----------------------------------------------------------------------===// 1988 1989 StoreRef RegionStoreManager::killBinding(Store ST, Loc L) { 1990 if (Optional<loc::MemRegionVal> LV = L.getAs<loc::MemRegionVal>()) 1991 if (const MemRegion* R = LV->getRegion()) 1992 return StoreRef(getRegionBindings(ST).removeBinding(R) 1993 .asImmutableMap() 1994 .getRootWithoutRetain(), 1995 *this); 1996 1997 return StoreRef(ST, *this); 1998 } 1999 2000 RegionBindingsRef 2001 RegionStoreManager::bind(RegionBindingsConstRef B, Loc L, SVal V) { 2002 if (L.getAs<loc::ConcreteInt>()) 2003 return B; 2004 2005 // If we get here, the location should be a region. 2006 const MemRegion *R = L.castAs<loc::MemRegionVal>().getRegion(); 2007 2008 // Check if the region is a struct region. 2009 if (const TypedValueRegion* TR = dyn_cast<TypedValueRegion>(R)) { 2010 QualType Ty = TR->getValueType(); 2011 if (Ty->isArrayType()) 2012 return bindArray(B, TR, V); 2013 if (Ty->isStructureOrClassType()) 2014 return bindStruct(B, TR, V); 2015 if (Ty->isVectorType()) 2016 return bindVector(B, TR, V); 2017 if (Ty->isUnionType()) 2018 return bindAggregate(B, TR, V); 2019 } 2020 2021 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) { 2022 // Binding directly to a symbolic region should be treated as binding 2023 // to element 0. 2024 QualType T = SR->getSymbol()->getType(); 2025 if (T->isAnyPointerType() || T->isReferenceType()) 2026 T = T->getPointeeType(); 2027 2028 R = GetElementZeroRegion(SR, T); 2029 } 2030 2031 // Clear out bindings that may overlap with this binding. 2032 RegionBindingsRef NewB = removeSubRegionBindings(B, cast<SubRegion>(R)); 2033 return NewB.addBinding(BindingKey::Make(R, BindingKey::Direct), V); 2034 } 2035 2036 RegionBindingsRef 2037 RegionStoreManager::setImplicitDefaultValue(RegionBindingsConstRef B, 2038 const MemRegion *R, 2039 QualType T) { 2040 SVal V; 2041 2042 if (Loc::isLocType(T)) 2043 V = svalBuilder.makeNull(); 2044 else if (T->isIntegralOrEnumerationType()) 2045 V = svalBuilder.makeZeroVal(T); 2046 else if (T->isStructureOrClassType() || T->isArrayType()) { 2047 // Set the default value to a zero constant when it is a structure 2048 // or array. The type doesn't really matter. 2049 V = svalBuilder.makeZeroVal(Ctx.IntTy); 2050 } 2051 else { 2052 // We can't represent values of this type, but we still need to set a value 2053 // to record that the region has been initialized. 2054 // If this assertion ever fires, a new case should be added above -- we 2055 // should know how to default-initialize any value we can symbolicate. 2056 assert(!SymbolManager::canSymbolicate(T) && "This type is representable"); 2057 V = UnknownVal(); 2058 } 2059 2060 return B.addBinding(R, BindingKey::Default, V); 2061 } 2062 2063 RegionBindingsRef 2064 RegionStoreManager::bindArray(RegionBindingsConstRef B, 2065 const TypedValueRegion* R, 2066 SVal Init) { 2067 2068 const ArrayType *AT =cast<ArrayType>(Ctx.getCanonicalType(R->getValueType())); 2069 QualType ElementTy = AT->getElementType(); 2070 Optional<uint64_t> Size; 2071 2072 if (const ConstantArrayType* CAT = dyn_cast<ConstantArrayType>(AT)) 2073 Size = CAT->getSize().getZExtValue(); 2074 2075 // Check if the init expr is a string literal. 2076 if (Optional<loc::MemRegionVal> MRV = Init.getAs<loc::MemRegionVal>()) { 2077 const StringRegion *S = cast<StringRegion>(MRV->getRegion()); 2078 2079 // Treat the string as a lazy compound value. 2080 StoreRef store(B.asStore(), *this); 2081 nonloc::LazyCompoundVal LCV = svalBuilder.makeLazyCompoundVal(store, S) 2082 .castAs<nonloc::LazyCompoundVal>(); 2083 return bindAggregate(B, R, LCV); 2084 } 2085 2086 // Handle lazy compound values. 2087 if (Init.getAs<nonloc::LazyCompoundVal>()) 2088 return bindAggregate(B, R, Init); 2089 2090 if (Init.isUnknown()) 2091 return bindAggregate(B, R, UnknownVal()); 2092 2093 // Remaining case: explicit compound values. 2094 const nonloc::CompoundVal& CV = Init.castAs<nonloc::CompoundVal>(); 2095 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2096 uint64_t i = 0; 2097 2098 RegionBindingsRef NewB(B); 2099 2100 for (; Size.hasValue() ? i < Size.getValue() : true ; ++i, ++VI) { 2101 // The init list might be shorter than the array length. 2102 if (VI == VE) 2103 break; 2104 2105 const NonLoc &Idx = svalBuilder.makeArrayIndex(i); 2106 const ElementRegion *ER = MRMgr.getElementRegion(ElementTy, Idx, R, Ctx); 2107 2108 if (ElementTy->isStructureOrClassType()) 2109 NewB = bindStruct(NewB, ER, *VI); 2110 else if (ElementTy->isArrayType()) 2111 NewB = bindArray(NewB, ER, *VI); 2112 else 2113 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2114 } 2115 2116 // If the init list is shorter than the array length, set the 2117 // array default value. 2118 if (Size.hasValue() && i < Size.getValue()) 2119 NewB = setImplicitDefaultValue(NewB, R, ElementTy); 2120 2121 return NewB; 2122 } 2123 2124 RegionBindingsRef RegionStoreManager::bindVector(RegionBindingsConstRef B, 2125 const TypedValueRegion* R, 2126 SVal V) { 2127 QualType T = R->getValueType(); 2128 assert(T->isVectorType()); 2129 const VectorType *VT = T->getAs<VectorType>(); // Use getAs for typedefs. 2130 2131 // Handle lazy compound values and symbolic values. 2132 if (V.getAs<nonloc::LazyCompoundVal>() || V.getAs<nonloc::SymbolVal>()) 2133 return bindAggregate(B, R, V); 2134 2135 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2136 // that we are binding symbolic struct value. Kill the field values, and if 2137 // the value is symbolic go and bind it as a "default" binding. 2138 if (!V.getAs<nonloc::CompoundVal>()) { 2139 return bindAggregate(B, R, UnknownVal()); 2140 } 2141 2142 QualType ElemType = VT->getElementType(); 2143 nonloc::CompoundVal CV = V.castAs<nonloc::CompoundVal>(); 2144 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2145 unsigned index = 0, numElements = VT->getNumElements(); 2146 RegionBindingsRef NewB(B); 2147 2148 for ( ; index != numElements ; ++index) { 2149 if (VI == VE) 2150 break; 2151 2152 NonLoc Idx = svalBuilder.makeArrayIndex(index); 2153 const ElementRegion *ER = MRMgr.getElementRegion(ElemType, Idx, R, Ctx); 2154 2155 if (ElemType->isArrayType()) 2156 NewB = bindArray(NewB, ER, *VI); 2157 else if (ElemType->isStructureOrClassType()) 2158 NewB = bindStruct(NewB, ER, *VI); 2159 else 2160 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2161 } 2162 return NewB; 2163 } 2164 2165 Optional<RegionBindingsRef> 2166 RegionStoreManager::tryBindSmallStruct(RegionBindingsConstRef B, 2167 const TypedValueRegion *R, 2168 const RecordDecl *RD, 2169 nonloc::LazyCompoundVal LCV) { 2170 FieldVector Fields; 2171 2172 if (const CXXRecordDecl *Class = dyn_cast<CXXRecordDecl>(RD)) 2173 if (Class->getNumBases() != 0 || Class->getNumVBases() != 0) 2174 return None; 2175 2176 for (const auto *FD : RD->fields()) { 2177 if (FD->isUnnamedBitfield()) 2178 continue; 2179 2180 // If there are too many fields, or if any of the fields are aggregates, 2181 // just use the LCV as a default binding. 2182 if (Fields.size() == SmallStructLimit) 2183 return None; 2184 2185 QualType Ty = FD->getType(); 2186 if (!(Ty->isScalarType() || Ty->isReferenceType())) 2187 return None; 2188 2189 Fields.push_back(FD); 2190 } 2191 2192 RegionBindingsRef NewB = B; 2193 2194 for (FieldVector::iterator I = Fields.begin(), E = Fields.end(); I != E; ++I){ 2195 const FieldRegion *SourceFR = MRMgr.getFieldRegion(*I, LCV.getRegion()); 2196 SVal V = getBindingForField(getRegionBindings(LCV.getStore()), SourceFR); 2197 2198 const FieldRegion *DestFR = MRMgr.getFieldRegion(*I, R); 2199 NewB = bind(NewB, loc::MemRegionVal(DestFR), V); 2200 } 2201 2202 return NewB; 2203 } 2204 2205 RegionBindingsRef RegionStoreManager::bindStruct(RegionBindingsConstRef B, 2206 const TypedValueRegion* R, 2207 SVal V) { 2208 if (!Features.supportsFields()) 2209 return B; 2210 2211 QualType T = R->getValueType(); 2212 assert(T->isStructureOrClassType()); 2213 2214 const RecordType* RT = T->getAs<RecordType>(); 2215 const RecordDecl *RD = RT->getDecl(); 2216 2217 if (!RD->isCompleteDefinition()) 2218 return B; 2219 2220 // Handle lazy compound values and symbolic values. 2221 if (Optional<nonloc::LazyCompoundVal> LCV = 2222 V.getAs<nonloc::LazyCompoundVal>()) { 2223 if (Optional<RegionBindingsRef> NewB = tryBindSmallStruct(B, R, RD, *LCV)) 2224 return *NewB; 2225 return bindAggregate(B, R, V); 2226 } 2227 if (V.getAs<nonloc::SymbolVal>()) 2228 return bindAggregate(B, R, V); 2229 2230 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2231 // that we are binding symbolic struct value. Kill the field values, and if 2232 // the value is symbolic go and bind it as a "default" binding. 2233 if (V.isUnknown() || !V.getAs<nonloc::CompoundVal>()) 2234 return bindAggregate(B, R, UnknownVal()); 2235 2236 const nonloc::CompoundVal& CV = V.castAs<nonloc::CompoundVal>(); 2237 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2238 2239 RecordDecl::field_iterator FI, FE; 2240 RegionBindingsRef NewB(B); 2241 2242 for (FI = RD->field_begin(), FE = RD->field_end(); FI != FE; ++FI) { 2243 2244 if (VI == VE) 2245 break; 2246 2247 // Skip any unnamed bitfields to stay in sync with the initializers. 2248 if (FI->isUnnamedBitfield()) 2249 continue; 2250 2251 QualType FTy = FI->getType(); 2252 const FieldRegion* FR = MRMgr.getFieldRegion(*FI, R); 2253 2254 if (FTy->isArrayType()) 2255 NewB = bindArray(NewB, FR, *VI); 2256 else if (FTy->isStructureOrClassType()) 2257 NewB = bindStruct(NewB, FR, *VI); 2258 else 2259 NewB = bind(NewB, loc::MemRegionVal(FR), *VI); 2260 ++VI; 2261 } 2262 2263 // There may be fewer values in the initialize list than the fields of struct. 2264 if (FI != FE) { 2265 NewB = NewB.addBinding(R, BindingKey::Default, 2266 svalBuilder.makeIntVal(0, false)); 2267 } 2268 2269 return NewB; 2270 } 2271 2272 RegionBindingsRef 2273 RegionStoreManager::bindAggregate(RegionBindingsConstRef B, 2274 const TypedRegion *R, 2275 SVal Val) { 2276 // Remove the old bindings, using 'R' as the root of all regions 2277 // we will invalidate. Then add the new binding. 2278 return removeSubRegionBindings(B, R).addBinding(R, BindingKey::Default, Val); 2279 } 2280 2281 //===----------------------------------------------------------------------===// 2282 // State pruning. 2283 //===----------------------------------------------------------------------===// 2284 2285 namespace { 2286 class removeDeadBindingsWorker : 2287 public ClusterAnalysis<removeDeadBindingsWorker> { 2288 SmallVector<const SymbolicRegion*, 12> Postponed; 2289 SymbolReaper &SymReaper; 2290 const StackFrameContext *CurrentLCtx; 2291 2292 public: 2293 removeDeadBindingsWorker(RegionStoreManager &rm, 2294 ProgramStateManager &stateMgr, 2295 RegionBindingsRef b, SymbolReaper &symReaper, 2296 const StackFrameContext *LCtx) 2297 : ClusterAnalysis<removeDeadBindingsWorker>(rm, stateMgr, b), 2298 SymReaper(symReaper), CurrentLCtx(LCtx) {} 2299 2300 // Called by ClusterAnalysis. 2301 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C); 2302 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 2303 using ClusterAnalysis<removeDeadBindingsWorker>::VisitCluster; 2304 2305 using ClusterAnalysis::AddToWorkList; 2306 2307 bool AddToWorkList(const MemRegion *R); 2308 2309 bool UpdatePostponed(); 2310 void VisitBinding(SVal V); 2311 }; 2312 } 2313 2314 bool removeDeadBindingsWorker::AddToWorkList(const MemRegion *R) { 2315 const MemRegion *BaseR = R->getBaseRegion(); 2316 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 2317 } 2318 2319 void removeDeadBindingsWorker::VisitAddedToCluster(const MemRegion *baseR, 2320 const ClusterBindings &C) { 2321 2322 if (const VarRegion *VR = dyn_cast<VarRegion>(baseR)) { 2323 if (SymReaper.isLive(VR)) 2324 AddToWorkList(baseR, &C); 2325 2326 return; 2327 } 2328 2329 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) { 2330 if (SymReaper.isLive(SR->getSymbol())) 2331 AddToWorkList(SR, &C); 2332 else 2333 Postponed.push_back(SR); 2334 2335 return; 2336 } 2337 2338 if (isa<NonStaticGlobalSpaceRegion>(baseR)) { 2339 AddToWorkList(baseR, &C); 2340 return; 2341 } 2342 2343 // CXXThisRegion in the current or parent location context is live. 2344 if (const CXXThisRegion *TR = dyn_cast<CXXThisRegion>(baseR)) { 2345 const StackArgumentsSpaceRegion *StackReg = 2346 cast<StackArgumentsSpaceRegion>(TR->getSuperRegion()); 2347 const StackFrameContext *RegCtx = StackReg->getStackFrame(); 2348 if (CurrentLCtx && 2349 (RegCtx == CurrentLCtx || RegCtx->isParentOf(CurrentLCtx))) 2350 AddToWorkList(TR, &C); 2351 } 2352 } 2353 2354 void removeDeadBindingsWorker::VisitCluster(const MemRegion *baseR, 2355 const ClusterBindings *C) { 2356 if (!C) 2357 return; 2358 2359 // Mark the symbol for any SymbolicRegion with live bindings as live itself. 2360 // This means we should continue to track that symbol. 2361 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(baseR)) 2362 SymReaper.markLive(SymR->getSymbol()); 2363 2364 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) { 2365 // Element index of a binding key is live. 2366 SymReaper.markElementIndicesLive(I.getKey().getRegion()); 2367 2368 VisitBinding(I.getData()); 2369 } 2370 } 2371 2372 void removeDeadBindingsWorker::VisitBinding(SVal V) { 2373 // Is it a LazyCompoundVal? All referenced regions are live as well. 2374 if (Optional<nonloc::LazyCompoundVal> LCS = 2375 V.getAs<nonloc::LazyCompoundVal>()) { 2376 2377 const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS); 2378 2379 for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(), 2380 E = Vals.end(); 2381 I != E; ++I) 2382 VisitBinding(*I); 2383 2384 return; 2385 } 2386 2387 // If V is a region, then add it to the worklist. 2388 if (const MemRegion *R = V.getAsRegion()) { 2389 AddToWorkList(R); 2390 SymReaper.markLive(R); 2391 2392 // All regions captured by a block are also live. 2393 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(R)) { 2394 BlockDataRegion::referenced_vars_iterator I = BR->referenced_vars_begin(), 2395 E = BR->referenced_vars_end(); 2396 for ( ; I != E; ++I) 2397 AddToWorkList(I.getCapturedRegion()); 2398 } 2399 } 2400 2401 2402 // Update the set of live symbols. 2403 for (SymExpr::symbol_iterator SI = V.symbol_begin(), SE = V.symbol_end(); 2404 SI!=SE; ++SI) 2405 SymReaper.markLive(*SI); 2406 } 2407 2408 bool removeDeadBindingsWorker::UpdatePostponed() { 2409 // See if any postponed SymbolicRegions are actually live now, after 2410 // having done a scan. 2411 bool changed = false; 2412 2413 for (SmallVectorImpl<const SymbolicRegion*>::iterator 2414 I = Postponed.begin(), E = Postponed.end() ; I != E ; ++I) { 2415 if (const SymbolicRegion *SR = *I) { 2416 if (SymReaper.isLive(SR->getSymbol())) { 2417 changed |= AddToWorkList(SR); 2418 *I = nullptr; 2419 } 2420 } 2421 } 2422 2423 return changed; 2424 } 2425 2426 StoreRef RegionStoreManager::removeDeadBindings(Store store, 2427 const StackFrameContext *LCtx, 2428 SymbolReaper& SymReaper) { 2429 RegionBindingsRef B = getRegionBindings(store); 2430 removeDeadBindingsWorker W(*this, StateMgr, B, SymReaper, LCtx); 2431 W.GenerateClusters(); 2432 2433 // Enqueue the region roots onto the worklist. 2434 for (SymbolReaper::region_iterator I = SymReaper.region_begin(), 2435 E = SymReaper.region_end(); I != E; ++I) { 2436 W.AddToWorkList(*I); 2437 } 2438 2439 do W.RunWorkList(); while (W.UpdatePostponed()); 2440 2441 // We have now scanned the store, marking reachable regions and symbols 2442 // as live. We now remove all the regions that are dead from the store 2443 // as well as update DSymbols with the set symbols that are now dead. 2444 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 2445 const MemRegion *Base = I.getKey(); 2446 2447 // If the cluster has been visited, we know the region has been marked. 2448 if (W.isVisited(Base)) 2449 continue; 2450 2451 // Remove the dead entry. 2452 B = B.remove(Base); 2453 2454 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(Base)) 2455 SymReaper.maybeDead(SymR->getSymbol()); 2456 2457 // Mark all non-live symbols that this binding references as dead. 2458 const ClusterBindings &Cluster = I.getData(); 2459 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 2460 CI != CE; ++CI) { 2461 SVal X = CI.getData(); 2462 SymExpr::symbol_iterator SI = X.symbol_begin(), SE = X.symbol_end(); 2463 for (; SI != SE; ++SI) 2464 SymReaper.maybeDead(*SI); 2465 } 2466 } 2467 2468 return StoreRef(B.asStore(), *this); 2469 } 2470 2471 //===----------------------------------------------------------------------===// 2472 // Utility methods. 2473 //===----------------------------------------------------------------------===// 2474 2475 void RegionStoreManager::print(Store store, raw_ostream &OS, 2476 const char* nl, const char *sep) { 2477 RegionBindingsRef B = getRegionBindings(store); 2478 OS << "Store (direct and default bindings), " 2479 << B.asStore() 2480 << " :" << nl; 2481 B.dump(OS, nl); 2482 } 2483