1 //== RegionStore.cpp - Field-sensitive store model --------------*- C++ -*--==// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a basic region store model. In this model, we do have field 11 // sensitivity. But we assume nothing about the heap shape. So recursive data 12 // structures are largely ignored. Basically we do 1-limiting analysis. 13 // Parameter pointers are assumed with no aliasing. Pointee objects of 14 // parameters are created lazily. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "clang/AST/Attr.h" 19 #include "clang/AST/CharUnits.h" 20 #include "clang/Analysis/Analyses/LiveVariables.h" 21 #include "clang/Analysis/AnalysisDeclContext.h" 22 #include "clang/Basic/TargetInfo.h" 23 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 24 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 25 #include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h" 26 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramStateTrait.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h" 29 #include "llvm/ADT/ImmutableMap.h" 30 #include "llvm/ADT/Optional.h" 31 #include "llvm/Support/raw_ostream.h" 32 #include <utility> 33 34 using namespace clang; 35 using namespace ento; 36 37 //===----------------------------------------------------------------------===// 38 // Representation of binding keys. 39 //===----------------------------------------------------------------------===// 40 41 namespace { 42 class BindingKey { 43 public: 44 enum Kind { Default = 0x0, Direct = 0x1 }; 45 private: 46 enum { Symbolic = 0x2 }; 47 48 llvm::PointerIntPair<const MemRegion *, 2> P; 49 uint64_t Data; 50 51 /// Create a key for a binding to region \p r, which has a symbolic offset 52 /// from region \p Base. 53 explicit BindingKey(const SubRegion *r, const SubRegion *Base, Kind k) 54 : P(r, k | Symbolic), Data(reinterpret_cast<uintptr_t>(Base)) { 55 assert(r && Base && "Must have known regions."); 56 assert(getConcreteOffsetRegion() == Base && "Failed to store base region"); 57 } 58 59 /// Create a key for a binding at \p offset from base region \p r. 60 explicit BindingKey(const MemRegion *r, uint64_t offset, Kind k) 61 : P(r, k), Data(offset) { 62 assert(r && "Must have known regions."); 63 assert(getOffset() == offset && "Failed to store offset"); 64 assert((r == r->getBaseRegion() || isa<ObjCIvarRegion>(r)) && "Not a base"); 65 } 66 public: 67 68 bool isDirect() const { return P.getInt() & Direct; } 69 bool hasSymbolicOffset() const { return P.getInt() & Symbolic; } 70 71 const MemRegion *getRegion() const { return P.getPointer(); } 72 uint64_t getOffset() const { 73 assert(!hasSymbolicOffset()); 74 return Data; 75 } 76 77 const SubRegion *getConcreteOffsetRegion() const { 78 assert(hasSymbolicOffset()); 79 return reinterpret_cast<const SubRegion *>(static_cast<uintptr_t>(Data)); 80 } 81 82 const MemRegion *getBaseRegion() const { 83 if (hasSymbolicOffset()) 84 return getConcreteOffsetRegion()->getBaseRegion(); 85 return getRegion()->getBaseRegion(); 86 } 87 88 void Profile(llvm::FoldingSetNodeID& ID) const { 89 ID.AddPointer(P.getOpaqueValue()); 90 ID.AddInteger(Data); 91 } 92 93 static BindingKey Make(const MemRegion *R, Kind k); 94 95 bool operator<(const BindingKey &X) const { 96 if (P.getOpaqueValue() < X.P.getOpaqueValue()) 97 return true; 98 if (P.getOpaqueValue() > X.P.getOpaqueValue()) 99 return false; 100 return Data < X.Data; 101 } 102 103 bool operator==(const BindingKey &X) const { 104 return P.getOpaqueValue() == X.P.getOpaqueValue() && 105 Data == X.Data; 106 } 107 108 void dump() const; 109 }; 110 } // end anonymous namespace 111 112 BindingKey BindingKey::Make(const MemRegion *R, Kind k) { 113 const RegionOffset &RO = R->getAsOffset(); 114 if (RO.hasSymbolicOffset()) 115 return BindingKey(cast<SubRegion>(R), cast<SubRegion>(RO.getRegion()), k); 116 117 return BindingKey(RO.getRegion(), RO.getOffset(), k); 118 } 119 120 namespace llvm { 121 static inline 122 raw_ostream &operator<<(raw_ostream &os, BindingKey K) { 123 os << '(' << K.getRegion(); 124 if (!K.hasSymbolicOffset()) 125 os << ',' << K.getOffset(); 126 os << ',' << (K.isDirect() ? "direct" : "default") 127 << ')'; 128 return os; 129 } 130 131 template <typename T> struct isPodLike; 132 template <> struct isPodLike<BindingKey> { 133 static const bool value = true; 134 }; 135 } // end llvm namespace 136 137 LLVM_DUMP_METHOD void BindingKey::dump() const { llvm::errs() << *this; } 138 139 //===----------------------------------------------------------------------===// 140 // Actual Store type. 141 //===----------------------------------------------------------------------===// 142 143 typedef llvm::ImmutableMap<BindingKey, SVal> ClusterBindings; 144 typedef llvm::ImmutableMapRef<BindingKey, SVal> ClusterBindingsRef; 145 typedef std::pair<BindingKey, SVal> BindingPair; 146 147 typedef llvm::ImmutableMap<const MemRegion *, ClusterBindings> 148 RegionBindings; 149 150 namespace { 151 class RegionBindingsRef : public llvm::ImmutableMapRef<const MemRegion *, 152 ClusterBindings> { 153 ClusterBindings::Factory *CBFactory; 154 155 public: 156 typedef llvm::ImmutableMapRef<const MemRegion *, ClusterBindings> 157 ParentTy; 158 159 RegionBindingsRef(ClusterBindings::Factory &CBFactory, 160 const RegionBindings::TreeTy *T, 161 RegionBindings::TreeTy::Factory *F) 162 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(T, F), 163 CBFactory(&CBFactory) {} 164 165 RegionBindingsRef(const ParentTy &P, ClusterBindings::Factory &CBFactory) 166 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(P), 167 CBFactory(&CBFactory) {} 168 169 RegionBindingsRef add(key_type_ref K, data_type_ref D) const { 170 return RegionBindingsRef(static_cast<const ParentTy *>(this)->add(K, D), 171 *CBFactory); 172 } 173 174 RegionBindingsRef remove(key_type_ref K) const { 175 return RegionBindingsRef(static_cast<const ParentTy *>(this)->remove(K), 176 *CBFactory); 177 } 178 179 RegionBindingsRef addBinding(BindingKey K, SVal V) const; 180 181 RegionBindingsRef addBinding(const MemRegion *R, 182 BindingKey::Kind k, SVal V) const; 183 184 const SVal *lookup(BindingKey K) const; 185 const SVal *lookup(const MemRegion *R, BindingKey::Kind k) const; 186 using llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>::lookup; 187 188 RegionBindingsRef removeBinding(BindingKey K); 189 190 RegionBindingsRef removeBinding(const MemRegion *R, 191 BindingKey::Kind k); 192 193 RegionBindingsRef removeBinding(const MemRegion *R) { 194 return removeBinding(R, BindingKey::Direct). 195 removeBinding(R, BindingKey::Default); 196 } 197 198 Optional<SVal> getDirectBinding(const MemRegion *R) const; 199 200 /// getDefaultBinding - Returns an SVal* representing an optional default 201 /// binding associated with a region and its subregions. 202 Optional<SVal> getDefaultBinding(const MemRegion *R) const; 203 204 /// Return the internal tree as a Store. 205 Store asStore() const { 206 return asImmutableMap().getRootWithoutRetain(); 207 } 208 209 void dump(raw_ostream &OS, const char *nl) const { 210 for (iterator I = begin(), E = end(); I != E; ++I) { 211 const ClusterBindings &Cluster = I.getData(); 212 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 213 CI != CE; ++CI) { 214 OS << ' ' << CI.getKey() << " : " << CI.getData() << nl; 215 } 216 OS << nl; 217 } 218 } 219 220 LLVM_DUMP_METHOD void dump() const { dump(llvm::errs(), "\n"); } 221 }; 222 } // end anonymous namespace 223 224 typedef const RegionBindingsRef& RegionBindingsConstRef; 225 226 Optional<SVal> RegionBindingsRef::getDirectBinding(const MemRegion *R) const { 227 return Optional<SVal>::create(lookup(R, BindingKey::Direct)); 228 } 229 230 Optional<SVal> RegionBindingsRef::getDefaultBinding(const MemRegion *R) const { 231 if (R->isBoundable()) 232 if (const TypedValueRegion *TR = dyn_cast<TypedValueRegion>(R)) 233 if (TR->getValueType()->isUnionType()) 234 return UnknownVal(); 235 236 return Optional<SVal>::create(lookup(R, BindingKey::Default)); 237 } 238 239 RegionBindingsRef RegionBindingsRef::addBinding(BindingKey K, SVal V) const { 240 const MemRegion *Base = K.getBaseRegion(); 241 242 const ClusterBindings *ExistingCluster = lookup(Base); 243 ClusterBindings Cluster = 244 (ExistingCluster ? *ExistingCluster : CBFactory->getEmptyMap()); 245 246 ClusterBindings NewCluster = CBFactory->add(Cluster, K, V); 247 return add(Base, NewCluster); 248 } 249 250 251 RegionBindingsRef RegionBindingsRef::addBinding(const MemRegion *R, 252 BindingKey::Kind k, 253 SVal V) const { 254 return addBinding(BindingKey::Make(R, k), V); 255 } 256 257 const SVal *RegionBindingsRef::lookup(BindingKey K) const { 258 const ClusterBindings *Cluster = lookup(K.getBaseRegion()); 259 if (!Cluster) 260 return nullptr; 261 return Cluster->lookup(K); 262 } 263 264 const SVal *RegionBindingsRef::lookup(const MemRegion *R, 265 BindingKey::Kind k) const { 266 return lookup(BindingKey::Make(R, k)); 267 } 268 269 RegionBindingsRef RegionBindingsRef::removeBinding(BindingKey K) { 270 const MemRegion *Base = K.getBaseRegion(); 271 const ClusterBindings *Cluster = lookup(Base); 272 if (!Cluster) 273 return *this; 274 275 ClusterBindings NewCluster = CBFactory->remove(*Cluster, K); 276 if (NewCluster.isEmpty()) 277 return remove(Base); 278 return add(Base, NewCluster); 279 } 280 281 RegionBindingsRef RegionBindingsRef::removeBinding(const MemRegion *R, 282 BindingKey::Kind k){ 283 return removeBinding(BindingKey::Make(R, k)); 284 } 285 286 //===----------------------------------------------------------------------===// 287 // Fine-grained control of RegionStoreManager. 288 //===----------------------------------------------------------------------===// 289 290 namespace { 291 struct minimal_features_tag {}; 292 struct maximal_features_tag {}; 293 294 class RegionStoreFeatures { 295 bool SupportsFields; 296 public: 297 RegionStoreFeatures(minimal_features_tag) : 298 SupportsFields(false) {} 299 300 RegionStoreFeatures(maximal_features_tag) : 301 SupportsFields(true) {} 302 303 void enableFields(bool t) { SupportsFields = t; } 304 305 bool supportsFields() const { return SupportsFields; } 306 }; 307 } 308 309 //===----------------------------------------------------------------------===// 310 // Main RegionStore logic. 311 //===----------------------------------------------------------------------===// 312 313 namespace { 314 class invalidateRegionsWorker; 315 316 class RegionStoreManager : public StoreManager { 317 public: 318 const RegionStoreFeatures Features; 319 320 RegionBindings::Factory RBFactory; 321 mutable ClusterBindings::Factory CBFactory; 322 323 typedef std::vector<SVal> SValListTy; 324 private: 325 typedef llvm::DenseMap<const LazyCompoundValData *, 326 SValListTy> LazyBindingsMapTy; 327 LazyBindingsMapTy LazyBindingsMap; 328 329 /// The largest number of fields a struct can have and still be 330 /// considered "small". 331 /// 332 /// This is currently used to decide whether or not it is worth "forcing" a 333 /// LazyCompoundVal on bind. 334 /// 335 /// This is controlled by 'region-store-small-struct-limit' option. 336 /// To disable all small-struct-dependent behavior, set the option to "0". 337 unsigned SmallStructLimit; 338 339 /// \brief A helper used to populate the work list with the given set of 340 /// regions. 341 void populateWorkList(invalidateRegionsWorker &W, 342 ArrayRef<SVal> Values, 343 InvalidatedRegions *TopLevelRegions); 344 345 public: 346 RegionStoreManager(ProgramStateManager& mgr, const RegionStoreFeatures &f) 347 : StoreManager(mgr), Features(f), 348 RBFactory(mgr.getAllocator()), CBFactory(mgr.getAllocator()), 349 SmallStructLimit(0) { 350 if (SubEngine *Eng = StateMgr.getOwningEngine()) { 351 AnalyzerOptions &Options = Eng->getAnalysisManager().options; 352 SmallStructLimit = 353 Options.getOptionAsInteger("region-store-small-struct-limit", 2); 354 } 355 } 356 357 358 /// setImplicitDefaultValue - Set the default binding for the provided 359 /// MemRegion to the value implicitly defined for compound literals when 360 /// the value is not specified. 361 RegionBindingsRef setImplicitDefaultValue(RegionBindingsConstRef B, 362 const MemRegion *R, QualType T); 363 364 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 365 /// type. 'Array' represents the lvalue of the array being decayed 366 /// to a pointer, and the returned SVal represents the decayed 367 /// version of that lvalue (i.e., a pointer to the first element of 368 /// the array). This is called by ExprEngine when evaluating 369 /// casts from arrays to pointers. 370 SVal ArrayToPointer(Loc Array, QualType ElementTy) override; 371 372 StoreRef getInitialStore(const LocationContext *InitLoc) override { 373 return StoreRef(RBFactory.getEmptyMap().getRootWithoutRetain(), *this); 374 } 375 376 //===-------------------------------------------------------------------===// 377 // Binding values to regions. 378 //===-------------------------------------------------------------------===// 379 RegionBindingsRef invalidateGlobalRegion(MemRegion::Kind K, 380 const Expr *Ex, 381 unsigned Count, 382 const LocationContext *LCtx, 383 RegionBindingsRef B, 384 InvalidatedRegions *Invalidated); 385 386 StoreRef invalidateRegions(Store store, 387 ArrayRef<SVal> Values, 388 const Expr *E, unsigned Count, 389 const LocationContext *LCtx, 390 const CallEvent *Call, 391 InvalidatedSymbols &IS, 392 RegionAndSymbolInvalidationTraits &ITraits, 393 InvalidatedRegions *Invalidated, 394 InvalidatedRegions *InvalidatedTopLevel) override; 395 396 bool scanReachableSymbols(Store S, const MemRegion *R, 397 ScanReachableSymbols &Callbacks) override; 398 399 RegionBindingsRef removeSubRegionBindings(RegionBindingsConstRef B, 400 const SubRegion *R); 401 402 public: // Part of public interface to class. 403 404 StoreRef Bind(Store store, Loc LV, SVal V) override { 405 return StoreRef(bind(getRegionBindings(store), LV, V).asStore(), *this); 406 } 407 408 RegionBindingsRef bind(RegionBindingsConstRef B, Loc LV, SVal V); 409 410 // BindDefault is only used to initialize a region with a default value. 411 StoreRef BindDefault(Store store, const MemRegion *R, SVal V) override { 412 // FIXME: The offsets of empty bases can be tricky because of 413 // of the so called "empty base class optimization". 414 // If a base class has been optimized out 415 // we should not try to create a binding, otherwise we should. 416 // Unfortunately, at the moment ASTRecordLayout doesn't expose 417 // the actual sizes of the empty bases 418 // and trying to infer them from offsets/alignments 419 // seems to be error-prone and non-trivial because of the trailing padding. 420 // As a temporary mitigation we don't create bindings for empty bases. 421 if (R->getKind() == MemRegion::CXXBaseObjectRegionKind && 422 cast<CXXBaseObjectRegion>(R)->getDecl()->isEmpty()) 423 return StoreRef(store, *this); 424 425 RegionBindingsRef B = getRegionBindings(store); 426 assert(!B.lookup(R, BindingKey::Direct)); 427 428 BindingKey Key = BindingKey::Make(R, BindingKey::Default); 429 if (B.lookup(Key)) { 430 const SubRegion *SR = cast<SubRegion>(R); 431 assert(SR->getAsOffset().getOffset() == 432 SR->getSuperRegion()->getAsOffset().getOffset() && 433 "A default value must come from a super-region"); 434 B = removeSubRegionBindings(B, SR); 435 } else { 436 B = B.addBinding(Key, V); 437 } 438 439 return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this); 440 } 441 442 /// Attempt to extract the fields of \p LCV and bind them to the struct region 443 /// \p R. 444 /// 445 /// This path is used when it seems advantageous to "force" loading the values 446 /// within a LazyCompoundVal to bind memberwise to the struct region, rather 447 /// than using a Default binding at the base of the entire region. This is a 448 /// heuristic attempting to avoid building long chains of LazyCompoundVals. 449 /// 450 /// \returns The updated store bindings, or \c None if binding non-lazily 451 /// would be too expensive. 452 Optional<RegionBindingsRef> tryBindSmallStruct(RegionBindingsConstRef B, 453 const TypedValueRegion *R, 454 const RecordDecl *RD, 455 nonloc::LazyCompoundVal LCV); 456 457 /// BindStruct - Bind a compound value to a structure. 458 RegionBindingsRef bindStruct(RegionBindingsConstRef B, 459 const TypedValueRegion* R, SVal V); 460 461 /// BindVector - Bind a compound value to a vector. 462 RegionBindingsRef bindVector(RegionBindingsConstRef B, 463 const TypedValueRegion* R, SVal V); 464 465 RegionBindingsRef bindArray(RegionBindingsConstRef B, 466 const TypedValueRegion* R, 467 SVal V); 468 469 /// Clears out all bindings in the given region and assigns a new value 470 /// as a Default binding. 471 RegionBindingsRef bindAggregate(RegionBindingsConstRef B, 472 const TypedRegion *R, 473 SVal DefaultVal); 474 475 /// \brief Create a new store with the specified binding removed. 476 /// \param ST the original store, that is the basis for the new store. 477 /// \param L the location whose binding should be removed. 478 StoreRef killBinding(Store ST, Loc L) override; 479 480 void incrementReferenceCount(Store store) override { 481 getRegionBindings(store).manualRetain(); 482 } 483 484 /// If the StoreManager supports it, decrement the reference count of 485 /// the specified Store object. If the reference count hits 0, the memory 486 /// associated with the object is recycled. 487 void decrementReferenceCount(Store store) override { 488 getRegionBindings(store).manualRelease(); 489 } 490 491 bool includedInBindings(Store store, const MemRegion *region) const override; 492 493 /// \brief Return the value bound to specified location in a given state. 494 /// 495 /// The high level logic for this method is this: 496 /// getBinding (L) 497 /// if L has binding 498 /// return L's binding 499 /// else if L is in killset 500 /// return unknown 501 /// else 502 /// if L is on stack or heap 503 /// return undefined 504 /// else 505 /// return symbolic 506 SVal getBinding(Store S, Loc L, QualType T) override { 507 return getBinding(getRegionBindings(S), L, T); 508 } 509 510 Optional<SVal> getDefaultBinding(Store S, const MemRegion *R) override { 511 RegionBindingsRef B = getRegionBindings(S); 512 // Default bindings are always applied over a base region so look up the 513 // base region's default binding, otherwise the lookup will fail when R 514 // is at an offset from R->getBaseRegion(). 515 return B.getDefaultBinding(R->getBaseRegion()); 516 } 517 518 SVal getBinding(RegionBindingsConstRef B, Loc L, QualType T = QualType()); 519 520 SVal getBindingForElement(RegionBindingsConstRef B, const ElementRegion *R); 521 522 SVal getBindingForField(RegionBindingsConstRef B, const FieldRegion *R); 523 524 SVal getBindingForObjCIvar(RegionBindingsConstRef B, const ObjCIvarRegion *R); 525 526 SVal getBindingForVar(RegionBindingsConstRef B, const VarRegion *R); 527 528 SVal getBindingForLazySymbol(const TypedValueRegion *R); 529 530 SVal getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 531 const TypedValueRegion *R, 532 QualType Ty); 533 534 SVal getLazyBinding(const SubRegion *LazyBindingRegion, 535 RegionBindingsRef LazyBinding); 536 537 /// Get bindings for the values in a struct and return a CompoundVal, used 538 /// when doing struct copy: 539 /// struct s x, y; 540 /// x = y; 541 /// y's value is retrieved by this method. 542 SVal getBindingForStruct(RegionBindingsConstRef B, const TypedValueRegion *R); 543 SVal getBindingForArray(RegionBindingsConstRef B, const TypedValueRegion *R); 544 NonLoc createLazyBinding(RegionBindingsConstRef B, const TypedValueRegion *R); 545 546 /// Used to lazily generate derived symbols for bindings that are defined 547 /// implicitly by default bindings in a super region. 548 /// 549 /// Note that callers may need to specially handle LazyCompoundVals, which 550 /// are returned as is in case the caller needs to treat them differently. 551 Optional<SVal> getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 552 const MemRegion *superR, 553 const TypedValueRegion *R, 554 QualType Ty); 555 556 /// Get the state and region whose binding this region \p R corresponds to. 557 /// 558 /// If there is no lazy binding for \p R, the returned value will have a null 559 /// \c second. Note that a null pointer can represents a valid Store. 560 std::pair<Store, const SubRegion *> 561 findLazyBinding(RegionBindingsConstRef B, const SubRegion *R, 562 const SubRegion *originalRegion); 563 564 /// Returns the cached set of interesting SVals contained within a lazy 565 /// binding. 566 /// 567 /// The precise value of "interesting" is determined for the purposes of 568 /// RegionStore's internal analysis. It must always contain all regions and 569 /// symbols, but may omit constants and other kinds of SVal. 570 const SValListTy &getInterestingValues(nonloc::LazyCompoundVal LCV); 571 572 //===------------------------------------------------------------------===// 573 // State pruning. 574 //===------------------------------------------------------------------===// 575 576 /// removeDeadBindings - Scans the RegionStore of 'state' for dead values. 577 /// It returns a new Store with these values removed. 578 StoreRef removeDeadBindings(Store store, const StackFrameContext *LCtx, 579 SymbolReaper& SymReaper) override; 580 581 //===------------------------------------------------------------------===// 582 // Region "extents". 583 //===------------------------------------------------------------------===// 584 585 // FIXME: This method will soon be eliminated; see the note in Store.h. 586 DefinedOrUnknownSVal getSizeInElements(ProgramStateRef state, 587 const MemRegion* R, 588 QualType EleTy) override; 589 590 //===------------------------------------------------------------------===// 591 // Utility methods. 592 //===------------------------------------------------------------------===// 593 594 RegionBindingsRef getRegionBindings(Store store) const { 595 return RegionBindingsRef(CBFactory, 596 static_cast<const RegionBindings::TreeTy*>(store), 597 RBFactory.getTreeFactory()); 598 } 599 600 void print(Store store, raw_ostream &Out, const char* nl, 601 const char *sep) override; 602 603 void iterBindings(Store store, BindingsHandler& f) override { 604 RegionBindingsRef B = getRegionBindings(store); 605 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 606 const ClusterBindings &Cluster = I.getData(); 607 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 608 CI != CE; ++CI) { 609 const BindingKey &K = CI.getKey(); 610 if (!K.isDirect()) 611 continue; 612 if (const SubRegion *R = dyn_cast<SubRegion>(K.getRegion())) { 613 // FIXME: Possibly incorporate the offset? 614 if (!f.HandleBinding(*this, store, R, CI.getData())) 615 return; 616 } 617 } 618 } 619 } 620 }; 621 622 } // end anonymous namespace 623 624 //===----------------------------------------------------------------------===// 625 // RegionStore creation. 626 //===----------------------------------------------------------------------===// 627 628 std::unique_ptr<StoreManager> 629 ento::CreateRegionStoreManager(ProgramStateManager &StMgr) { 630 RegionStoreFeatures F = maximal_features_tag(); 631 return llvm::make_unique<RegionStoreManager>(StMgr, F); 632 } 633 634 std::unique_ptr<StoreManager> 635 ento::CreateFieldsOnlyRegionStoreManager(ProgramStateManager &StMgr) { 636 RegionStoreFeatures F = minimal_features_tag(); 637 F.enableFields(true); 638 return llvm::make_unique<RegionStoreManager>(StMgr, F); 639 } 640 641 642 //===----------------------------------------------------------------------===// 643 // Region Cluster analysis. 644 //===----------------------------------------------------------------------===// 645 646 namespace { 647 /// Used to determine which global regions are automatically included in the 648 /// initial worklist of a ClusterAnalysis. 649 enum GlobalsFilterKind { 650 /// Don't include any global regions. 651 GFK_None, 652 /// Only include system globals. 653 GFK_SystemOnly, 654 /// Include all global regions. 655 GFK_All 656 }; 657 658 template <typename DERIVED> 659 class ClusterAnalysis { 660 protected: 661 typedef llvm::DenseMap<const MemRegion *, const ClusterBindings *> ClusterMap; 662 typedef const MemRegion * WorkListElement; 663 typedef SmallVector<WorkListElement, 10> WorkList; 664 665 llvm::SmallPtrSet<const ClusterBindings *, 16> Visited; 666 667 WorkList WL; 668 669 RegionStoreManager &RM; 670 ASTContext &Ctx; 671 SValBuilder &svalBuilder; 672 673 RegionBindingsRef B; 674 675 676 protected: 677 const ClusterBindings *getCluster(const MemRegion *R) { 678 return B.lookup(R); 679 } 680 681 /// Returns true if all clusters in the given memspace should be initially 682 /// included in the cluster analysis. Subclasses may provide their 683 /// own implementation. 684 bool includeEntireMemorySpace(const MemRegion *Base) { 685 return false; 686 } 687 688 public: 689 ClusterAnalysis(RegionStoreManager &rm, ProgramStateManager &StateMgr, 690 RegionBindingsRef b) 691 : RM(rm), Ctx(StateMgr.getContext()), 692 svalBuilder(StateMgr.getSValBuilder()), B(std::move(b)) {} 693 694 RegionBindingsRef getRegionBindings() const { return B; } 695 696 bool isVisited(const MemRegion *R) { 697 return Visited.count(getCluster(R)); 698 } 699 700 void GenerateClusters() { 701 // Scan the entire set of bindings and record the region clusters. 702 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); 703 RI != RE; ++RI){ 704 const MemRegion *Base = RI.getKey(); 705 706 const ClusterBindings &Cluster = RI.getData(); 707 assert(!Cluster.isEmpty() && "Empty clusters should be removed"); 708 static_cast<DERIVED*>(this)->VisitAddedToCluster(Base, Cluster); 709 710 // If the base's memspace should be entirely invalidated, add the cluster 711 // to the workspace up front. 712 if (static_cast<DERIVED*>(this)->includeEntireMemorySpace(Base)) 713 AddToWorkList(WorkListElement(Base), &Cluster); 714 } 715 } 716 717 bool AddToWorkList(WorkListElement E, const ClusterBindings *C) { 718 if (C && !Visited.insert(C).second) 719 return false; 720 WL.push_back(E); 721 return true; 722 } 723 724 bool AddToWorkList(const MemRegion *R) { 725 return static_cast<DERIVED*>(this)->AddToWorkList(R); 726 } 727 728 void RunWorkList() { 729 while (!WL.empty()) { 730 WorkListElement E = WL.pop_back_val(); 731 const MemRegion *BaseR = E; 732 733 static_cast<DERIVED*>(this)->VisitCluster(BaseR, getCluster(BaseR)); 734 } 735 } 736 737 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C) {} 738 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C) {} 739 740 void VisitCluster(const MemRegion *BaseR, const ClusterBindings *C, 741 bool Flag) { 742 static_cast<DERIVED*>(this)->VisitCluster(BaseR, C); 743 } 744 }; 745 } 746 747 //===----------------------------------------------------------------------===// 748 // Binding invalidation. 749 //===----------------------------------------------------------------------===// 750 751 bool RegionStoreManager::scanReachableSymbols(Store S, const MemRegion *R, 752 ScanReachableSymbols &Callbacks) { 753 assert(R == R->getBaseRegion() && "Should only be called for base regions"); 754 RegionBindingsRef B = getRegionBindings(S); 755 const ClusterBindings *Cluster = B.lookup(R); 756 757 if (!Cluster) 758 return true; 759 760 for (ClusterBindings::iterator RI = Cluster->begin(), RE = Cluster->end(); 761 RI != RE; ++RI) { 762 if (!Callbacks.scan(RI.getData())) 763 return false; 764 } 765 766 return true; 767 } 768 769 static inline bool isUnionField(const FieldRegion *FR) { 770 return FR->getDecl()->getParent()->isUnion(); 771 } 772 773 typedef SmallVector<const FieldDecl *, 8> FieldVector; 774 775 static void getSymbolicOffsetFields(BindingKey K, FieldVector &Fields) { 776 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 777 778 const MemRegion *Base = K.getConcreteOffsetRegion(); 779 const MemRegion *R = K.getRegion(); 780 781 while (R != Base) { 782 if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) 783 if (!isUnionField(FR)) 784 Fields.push_back(FR->getDecl()); 785 786 R = cast<SubRegion>(R)->getSuperRegion(); 787 } 788 } 789 790 static bool isCompatibleWithFields(BindingKey K, const FieldVector &Fields) { 791 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 792 793 if (Fields.empty()) 794 return true; 795 796 FieldVector FieldsInBindingKey; 797 getSymbolicOffsetFields(K, FieldsInBindingKey); 798 799 ptrdiff_t Delta = FieldsInBindingKey.size() - Fields.size(); 800 if (Delta >= 0) 801 return std::equal(FieldsInBindingKey.begin() + Delta, 802 FieldsInBindingKey.end(), 803 Fields.begin()); 804 else 805 return std::equal(FieldsInBindingKey.begin(), FieldsInBindingKey.end(), 806 Fields.begin() - Delta); 807 } 808 809 /// Collects all bindings in \p Cluster that may refer to bindings within 810 /// \p Top. 811 /// 812 /// Each binding is a pair whose \c first is the key (a BindingKey) and whose 813 /// \c second is the value (an SVal). 814 /// 815 /// The \p IncludeAllDefaultBindings parameter specifies whether to include 816 /// default bindings that may extend beyond \p Top itself, e.g. if \p Top is 817 /// an aggregate within a larger aggregate with a default binding. 818 static void 819 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 820 SValBuilder &SVB, const ClusterBindings &Cluster, 821 const SubRegion *Top, BindingKey TopKey, 822 bool IncludeAllDefaultBindings) { 823 FieldVector FieldsInSymbolicSubregions; 824 if (TopKey.hasSymbolicOffset()) { 825 getSymbolicOffsetFields(TopKey, FieldsInSymbolicSubregions); 826 Top = cast<SubRegion>(TopKey.getConcreteOffsetRegion()); 827 TopKey = BindingKey::Make(Top, BindingKey::Default); 828 } 829 830 // Find the length (in bits) of the region being invalidated. 831 uint64_t Length = UINT64_MAX; 832 SVal Extent = Top->getExtent(SVB); 833 if (Optional<nonloc::ConcreteInt> ExtentCI = 834 Extent.getAs<nonloc::ConcreteInt>()) { 835 const llvm::APSInt &ExtentInt = ExtentCI->getValue(); 836 assert(ExtentInt.isNonNegative() || ExtentInt.isUnsigned()); 837 // Extents are in bytes but region offsets are in bits. Be careful! 838 Length = ExtentInt.getLimitedValue() * SVB.getContext().getCharWidth(); 839 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(Top)) { 840 if (FR->getDecl()->isBitField()) 841 Length = FR->getDecl()->getBitWidthValue(SVB.getContext()); 842 } 843 844 for (ClusterBindings::iterator I = Cluster.begin(), E = Cluster.end(); 845 I != E; ++I) { 846 BindingKey NextKey = I.getKey(); 847 if (NextKey.getRegion() == TopKey.getRegion()) { 848 // FIXME: This doesn't catch the case where we're really invalidating a 849 // region with a symbolic offset. Example: 850 // R: points[i].y 851 // Next: points[0].x 852 853 if (NextKey.getOffset() > TopKey.getOffset() && 854 NextKey.getOffset() - TopKey.getOffset() < Length) { 855 // Case 1: The next binding is inside the region we're invalidating. 856 // Include it. 857 Bindings.push_back(*I); 858 859 } else if (NextKey.getOffset() == TopKey.getOffset()) { 860 // Case 2: The next binding is at the same offset as the region we're 861 // invalidating. In this case, we need to leave default bindings alone, 862 // since they may be providing a default value for a regions beyond what 863 // we're invalidating. 864 // FIXME: This is probably incorrect; consider invalidating an outer 865 // struct whose first field is bound to a LazyCompoundVal. 866 if (IncludeAllDefaultBindings || NextKey.isDirect()) 867 Bindings.push_back(*I); 868 } 869 870 } else if (NextKey.hasSymbolicOffset()) { 871 const MemRegion *Base = NextKey.getConcreteOffsetRegion(); 872 if (Top->isSubRegionOf(Base)) { 873 // Case 3: The next key is symbolic and we just changed something within 874 // its concrete region. We don't know if the binding is still valid, so 875 // we'll be conservative and include it. 876 if (IncludeAllDefaultBindings || NextKey.isDirect()) 877 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 878 Bindings.push_back(*I); 879 } else if (const SubRegion *BaseSR = dyn_cast<SubRegion>(Base)) { 880 // Case 4: The next key is symbolic, but we changed a known 881 // super-region. In this case the binding is certainly included. 882 if (Top == Base || BaseSR->isSubRegionOf(Top)) 883 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 884 Bindings.push_back(*I); 885 } 886 } 887 } 888 } 889 890 static void 891 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 892 SValBuilder &SVB, const ClusterBindings &Cluster, 893 const SubRegion *Top, bool IncludeAllDefaultBindings) { 894 collectSubRegionBindings(Bindings, SVB, Cluster, Top, 895 BindingKey::Make(Top, BindingKey::Default), 896 IncludeAllDefaultBindings); 897 } 898 899 RegionBindingsRef 900 RegionStoreManager::removeSubRegionBindings(RegionBindingsConstRef B, 901 const SubRegion *Top) { 902 BindingKey TopKey = BindingKey::Make(Top, BindingKey::Default); 903 const MemRegion *ClusterHead = TopKey.getBaseRegion(); 904 905 if (Top == ClusterHead) { 906 // We can remove an entire cluster's bindings all in one go. 907 return B.remove(Top); 908 } 909 910 const ClusterBindings *Cluster = B.lookup(ClusterHead); 911 if (!Cluster) { 912 // If we're invalidating a region with a symbolic offset, we need to make 913 // sure we don't treat the base region as uninitialized anymore. 914 if (TopKey.hasSymbolicOffset()) { 915 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 916 return B.addBinding(Concrete, BindingKey::Default, UnknownVal()); 917 } 918 return B; 919 } 920 921 SmallVector<BindingPair, 32> Bindings; 922 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, Top, TopKey, 923 /*IncludeAllDefaultBindings=*/false); 924 925 ClusterBindingsRef Result(*Cluster, CBFactory); 926 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 927 E = Bindings.end(); 928 I != E; ++I) 929 Result = Result.remove(I->first); 930 931 // If we're invalidating a region with a symbolic offset, we need to make sure 932 // we don't treat the base region as uninitialized anymore. 933 // FIXME: This isn't very precise; see the example in 934 // collectSubRegionBindings. 935 if (TopKey.hasSymbolicOffset()) { 936 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 937 Result = Result.add(BindingKey::Make(Concrete, BindingKey::Default), 938 UnknownVal()); 939 } 940 941 if (Result.isEmpty()) 942 return B.remove(ClusterHead); 943 return B.add(ClusterHead, Result.asImmutableMap()); 944 } 945 946 namespace { 947 class invalidateRegionsWorker : public ClusterAnalysis<invalidateRegionsWorker> 948 { 949 const Expr *Ex; 950 unsigned Count; 951 const LocationContext *LCtx; 952 InvalidatedSymbols &IS; 953 RegionAndSymbolInvalidationTraits &ITraits; 954 StoreManager::InvalidatedRegions *Regions; 955 GlobalsFilterKind GlobalsFilter; 956 public: 957 invalidateRegionsWorker(RegionStoreManager &rm, 958 ProgramStateManager &stateMgr, 959 RegionBindingsRef b, 960 const Expr *ex, unsigned count, 961 const LocationContext *lctx, 962 InvalidatedSymbols &is, 963 RegionAndSymbolInvalidationTraits &ITraitsIn, 964 StoreManager::InvalidatedRegions *r, 965 GlobalsFilterKind GFK) 966 : ClusterAnalysis<invalidateRegionsWorker>(rm, stateMgr, b), 967 Ex(ex), Count(count), LCtx(lctx), IS(is), ITraits(ITraitsIn), Regions(r), 968 GlobalsFilter(GFK) {} 969 970 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 971 void VisitBinding(SVal V); 972 973 using ClusterAnalysis::AddToWorkList; 974 975 bool AddToWorkList(const MemRegion *R); 976 977 /// Returns true if all clusters in the memory space for \p Base should be 978 /// be invalidated. 979 bool includeEntireMemorySpace(const MemRegion *Base); 980 981 /// Returns true if the memory space of the given region is one of the global 982 /// regions specially included at the start of invalidation. 983 bool isInitiallyIncludedGlobalRegion(const MemRegion *R); 984 }; 985 } 986 987 bool invalidateRegionsWorker::AddToWorkList(const MemRegion *R) { 988 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 989 R, RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 990 const MemRegion *BaseR = doNotInvalidateSuperRegion ? R : R->getBaseRegion(); 991 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 992 } 993 994 void invalidateRegionsWorker::VisitBinding(SVal V) { 995 // A symbol? Mark it touched by the invalidation. 996 if (SymbolRef Sym = V.getAsSymbol()) 997 IS.insert(Sym); 998 999 if (const MemRegion *R = V.getAsRegion()) { 1000 AddToWorkList(R); 1001 return; 1002 } 1003 1004 // Is it a LazyCompoundVal? All references get invalidated as well. 1005 if (Optional<nonloc::LazyCompoundVal> LCS = 1006 V.getAs<nonloc::LazyCompoundVal>()) { 1007 1008 const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS); 1009 1010 for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(), 1011 E = Vals.end(); 1012 I != E; ++I) 1013 VisitBinding(*I); 1014 1015 return; 1016 } 1017 } 1018 1019 void invalidateRegionsWorker::VisitCluster(const MemRegion *baseR, 1020 const ClusterBindings *C) { 1021 1022 bool PreserveRegionsContents = 1023 ITraits.hasTrait(baseR, 1024 RegionAndSymbolInvalidationTraits::TK_PreserveContents); 1025 1026 if (C) { 1027 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) 1028 VisitBinding(I.getData()); 1029 1030 // Invalidate regions contents. 1031 if (!PreserveRegionsContents) 1032 B = B.remove(baseR); 1033 } 1034 1035 // BlockDataRegion? If so, invalidate captured variables that are passed 1036 // by reference. 1037 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(baseR)) { 1038 for (BlockDataRegion::referenced_vars_iterator 1039 BI = BR->referenced_vars_begin(), BE = BR->referenced_vars_end() ; 1040 BI != BE; ++BI) { 1041 const VarRegion *VR = BI.getCapturedRegion(); 1042 const VarDecl *VD = VR->getDecl(); 1043 if (VD->hasAttr<BlocksAttr>() || !VD->hasLocalStorage()) { 1044 AddToWorkList(VR); 1045 } 1046 else if (Loc::isLocType(VR->getValueType())) { 1047 // Map the current bindings to a Store to retrieve the value 1048 // of the binding. If that binding itself is a region, we should 1049 // invalidate that region. This is because a block may capture 1050 // a pointer value, but the thing pointed by that pointer may 1051 // get invalidated. 1052 SVal V = RM.getBinding(B, loc::MemRegionVal(VR)); 1053 if (Optional<Loc> L = V.getAs<Loc>()) { 1054 if (const MemRegion *LR = L->getAsRegion()) 1055 AddToWorkList(LR); 1056 } 1057 } 1058 } 1059 return; 1060 } 1061 1062 // Symbolic region? 1063 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) 1064 IS.insert(SR->getSymbol()); 1065 1066 // Nothing else should be done in the case when we preserve regions context. 1067 if (PreserveRegionsContents) 1068 return; 1069 1070 // Otherwise, we have a normal data region. Record that we touched the region. 1071 if (Regions) 1072 Regions->push_back(baseR); 1073 1074 if (isa<AllocaRegion>(baseR) || isa<SymbolicRegion>(baseR)) { 1075 // Invalidate the region by setting its default value to 1076 // conjured symbol. The type of the symbol is irrelevant. 1077 DefinedOrUnknownSVal V = 1078 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, Ctx.IntTy, Count); 1079 B = B.addBinding(baseR, BindingKey::Default, V); 1080 return; 1081 } 1082 1083 if (!baseR->isBoundable()) 1084 return; 1085 1086 const TypedValueRegion *TR = cast<TypedValueRegion>(baseR); 1087 QualType T = TR->getValueType(); 1088 1089 if (isInitiallyIncludedGlobalRegion(baseR)) { 1090 // If the region is a global and we are invalidating all globals, 1091 // erasing the entry is good enough. This causes all globals to be lazily 1092 // symbolicated from the same base symbol. 1093 return; 1094 } 1095 1096 if (T->isStructureOrClassType()) { 1097 // Invalidate the region by setting its default value to 1098 // conjured symbol. The type of the symbol is irrelevant. 1099 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1100 Ctx.IntTy, Count); 1101 B = B.addBinding(baseR, BindingKey::Default, V); 1102 return; 1103 } 1104 1105 if (const ArrayType *AT = Ctx.getAsArrayType(T)) { 1106 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 1107 baseR, 1108 RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 1109 1110 if (doNotInvalidateSuperRegion) { 1111 // We are not doing blank invalidation of the whole array region so we 1112 // have to manually invalidate each elements. 1113 Optional<uint64_t> NumElements; 1114 1115 // Compute lower and upper offsets for region within array. 1116 if (const ConstantArrayType *CAT = dyn_cast<ConstantArrayType>(AT)) 1117 NumElements = CAT->getSize().getZExtValue(); 1118 if (!NumElements) // We are not dealing with a constant size array 1119 goto conjure_default; 1120 QualType ElementTy = AT->getElementType(); 1121 uint64_t ElemSize = Ctx.getTypeSize(ElementTy); 1122 const RegionOffset &RO = baseR->getAsOffset(); 1123 const MemRegion *SuperR = baseR->getBaseRegion(); 1124 if (RO.hasSymbolicOffset()) { 1125 // If base region has a symbolic offset, 1126 // we revert to invalidating the super region. 1127 if (SuperR) 1128 AddToWorkList(SuperR); 1129 goto conjure_default; 1130 } 1131 1132 uint64_t LowerOffset = RO.getOffset(); 1133 uint64_t UpperOffset = LowerOffset + *NumElements * ElemSize; 1134 bool UpperOverflow = UpperOffset < LowerOffset; 1135 1136 // Invalidate regions which are within array boundaries, 1137 // or have a symbolic offset. 1138 if (!SuperR) 1139 goto conjure_default; 1140 1141 const ClusterBindings *C = B.lookup(SuperR); 1142 if (!C) 1143 goto conjure_default; 1144 1145 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; 1146 ++I) { 1147 const BindingKey &BK = I.getKey(); 1148 Optional<uint64_t> ROffset = 1149 BK.hasSymbolicOffset() ? Optional<uint64_t>() : BK.getOffset(); 1150 1151 // Check offset is not symbolic and within array's boundaries. 1152 // Handles arrays of 0 elements and of 0-sized elements as well. 1153 if (!ROffset || 1154 ((*ROffset >= LowerOffset && *ROffset < UpperOffset) || 1155 (UpperOverflow && 1156 (*ROffset >= LowerOffset || *ROffset < UpperOffset)) || 1157 (LowerOffset == UpperOffset && *ROffset == LowerOffset))) { 1158 B = B.removeBinding(I.getKey()); 1159 // Bound symbolic regions need to be invalidated for dead symbol 1160 // detection. 1161 SVal V = I.getData(); 1162 const MemRegion *R = V.getAsRegion(); 1163 if (R && isa<SymbolicRegion>(R)) 1164 VisitBinding(V); 1165 } 1166 } 1167 } 1168 conjure_default: 1169 // Set the default value of the array to conjured symbol. 1170 DefinedOrUnknownSVal V = 1171 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1172 AT->getElementType(), Count); 1173 B = B.addBinding(baseR, BindingKey::Default, V); 1174 return; 1175 } 1176 1177 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1178 T,Count); 1179 assert(SymbolManager::canSymbolicate(T) || V.isUnknown()); 1180 B = B.addBinding(baseR, BindingKey::Direct, V); 1181 } 1182 1183 bool invalidateRegionsWorker::isInitiallyIncludedGlobalRegion( 1184 const MemRegion *R) { 1185 switch (GlobalsFilter) { 1186 case GFK_None: 1187 return false; 1188 case GFK_SystemOnly: 1189 return isa<GlobalSystemSpaceRegion>(R->getMemorySpace()); 1190 case GFK_All: 1191 return isa<NonStaticGlobalSpaceRegion>(R->getMemorySpace()); 1192 } 1193 1194 llvm_unreachable("unknown globals filter"); 1195 } 1196 1197 bool invalidateRegionsWorker::includeEntireMemorySpace(const MemRegion *Base) { 1198 if (isInitiallyIncludedGlobalRegion(Base)) 1199 return true; 1200 1201 const MemSpaceRegion *MemSpace = Base->getMemorySpace(); 1202 return ITraits.hasTrait(MemSpace, 1203 RegionAndSymbolInvalidationTraits::TK_EntireMemSpace); 1204 } 1205 1206 RegionBindingsRef 1207 RegionStoreManager::invalidateGlobalRegion(MemRegion::Kind K, 1208 const Expr *Ex, 1209 unsigned Count, 1210 const LocationContext *LCtx, 1211 RegionBindingsRef B, 1212 InvalidatedRegions *Invalidated) { 1213 // Bind the globals memory space to a new symbol that we will use to derive 1214 // the bindings for all globals. 1215 const GlobalsSpaceRegion *GS = MRMgr.getGlobalsRegion(K); 1216 SVal V = svalBuilder.conjureSymbolVal(/* SymbolTag = */ (const void*) GS, Ex, LCtx, 1217 /* type does not matter */ Ctx.IntTy, 1218 Count); 1219 1220 B = B.removeBinding(GS) 1221 .addBinding(BindingKey::Make(GS, BindingKey::Default), V); 1222 1223 // Even if there are no bindings in the global scope, we still need to 1224 // record that we touched it. 1225 if (Invalidated) 1226 Invalidated->push_back(GS); 1227 1228 return B; 1229 } 1230 1231 void RegionStoreManager::populateWorkList(invalidateRegionsWorker &W, 1232 ArrayRef<SVal> Values, 1233 InvalidatedRegions *TopLevelRegions) { 1234 for (ArrayRef<SVal>::iterator I = Values.begin(), 1235 E = Values.end(); I != E; ++I) { 1236 SVal V = *I; 1237 if (Optional<nonloc::LazyCompoundVal> LCS = 1238 V.getAs<nonloc::LazyCompoundVal>()) { 1239 1240 const SValListTy &Vals = getInterestingValues(*LCS); 1241 1242 for (SValListTy::const_iterator I = Vals.begin(), 1243 E = Vals.end(); I != E; ++I) { 1244 // Note: the last argument is false here because these are 1245 // non-top-level regions. 1246 if (const MemRegion *R = (*I).getAsRegion()) 1247 W.AddToWorkList(R); 1248 } 1249 continue; 1250 } 1251 1252 if (const MemRegion *R = V.getAsRegion()) { 1253 if (TopLevelRegions) 1254 TopLevelRegions->push_back(R); 1255 W.AddToWorkList(R); 1256 continue; 1257 } 1258 } 1259 } 1260 1261 StoreRef 1262 RegionStoreManager::invalidateRegions(Store store, 1263 ArrayRef<SVal> Values, 1264 const Expr *Ex, unsigned Count, 1265 const LocationContext *LCtx, 1266 const CallEvent *Call, 1267 InvalidatedSymbols &IS, 1268 RegionAndSymbolInvalidationTraits &ITraits, 1269 InvalidatedRegions *TopLevelRegions, 1270 InvalidatedRegions *Invalidated) { 1271 GlobalsFilterKind GlobalsFilter; 1272 if (Call) { 1273 if (Call->isInSystemHeader()) 1274 GlobalsFilter = GFK_SystemOnly; 1275 else 1276 GlobalsFilter = GFK_All; 1277 } else { 1278 GlobalsFilter = GFK_None; 1279 } 1280 1281 RegionBindingsRef B = getRegionBindings(store); 1282 invalidateRegionsWorker W(*this, StateMgr, B, Ex, Count, LCtx, IS, ITraits, 1283 Invalidated, GlobalsFilter); 1284 1285 // Scan the bindings and generate the clusters. 1286 W.GenerateClusters(); 1287 1288 // Add the regions to the worklist. 1289 populateWorkList(W, Values, TopLevelRegions); 1290 1291 W.RunWorkList(); 1292 1293 // Return the new bindings. 1294 B = W.getRegionBindings(); 1295 1296 // For calls, determine which global regions should be invalidated and 1297 // invalidate them. (Note that function-static and immutable globals are never 1298 // invalidated by this.) 1299 // TODO: This could possibly be more precise with modules. 1300 switch (GlobalsFilter) { 1301 case GFK_All: 1302 B = invalidateGlobalRegion(MemRegion::GlobalInternalSpaceRegionKind, 1303 Ex, Count, LCtx, B, Invalidated); 1304 // FALLTHROUGH 1305 case GFK_SystemOnly: 1306 B = invalidateGlobalRegion(MemRegion::GlobalSystemSpaceRegionKind, 1307 Ex, Count, LCtx, B, Invalidated); 1308 // FALLTHROUGH 1309 case GFK_None: 1310 break; 1311 } 1312 1313 return StoreRef(B.asStore(), *this); 1314 } 1315 1316 //===----------------------------------------------------------------------===// 1317 // Extents for regions. 1318 //===----------------------------------------------------------------------===// 1319 1320 DefinedOrUnknownSVal 1321 RegionStoreManager::getSizeInElements(ProgramStateRef state, 1322 const MemRegion *R, 1323 QualType EleTy) { 1324 SVal Size = cast<SubRegion>(R)->getExtent(svalBuilder); 1325 const llvm::APSInt *SizeInt = svalBuilder.getKnownValue(state, Size); 1326 if (!SizeInt) 1327 return UnknownVal(); 1328 1329 CharUnits RegionSize = CharUnits::fromQuantity(SizeInt->getSExtValue()); 1330 1331 if (Ctx.getAsVariableArrayType(EleTy)) { 1332 // FIXME: We need to track extra state to properly record the size 1333 // of VLAs. Returning UnknownVal here, however, is a stop-gap so that 1334 // we don't have a divide-by-zero below. 1335 return UnknownVal(); 1336 } 1337 1338 CharUnits EleSize = Ctx.getTypeSizeInChars(EleTy); 1339 1340 // If a variable is reinterpreted as a type that doesn't fit into a larger 1341 // type evenly, round it down. 1342 // This is a signed value, since it's used in arithmetic with signed indices. 1343 return svalBuilder.makeIntVal(RegionSize / EleSize, false); 1344 } 1345 1346 //===----------------------------------------------------------------------===// 1347 // Location and region casting. 1348 //===----------------------------------------------------------------------===// 1349 1350 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 1351 /// type. 'Array' represents the lvalue of the array being decayed 1352 /// to a pointer, and the returned SVal represents the decayed 1353 /// version of that lvalue (i.e., a pointer to the first element of 1354 /// the array). This is called by ExprEngine when evaluating casts 1355 /// from arrays to pointers. 1356 SVal RegionStoreManager::ArrayToPointer(Loc Array, QualType T) { 1357 if (Array.getAs<loc::ConcreteInt>()) 1358 return Array; 1359 1360 if (!Array.getAs<loc::MemRegionVal>()) 1361 return UnknownVal(); 1362 1363 const SubRegion *R = 1364 cast<SubRegion>(Array.castAs<loc::MemRegionVal>().getRegion()); 1365 NonLoc ZeroIdx = svalBuilder.makeZeroArrayIndex(); 1366 return loc::MemRegionVal(MRMgr.getElementRegion(T, ZeroIdx, R, Ctx)); 1367 } 1368 1369 //===----------------------------------------------------------------------===// 1370 // Loading values from regions. 1371 //===----------------------------------------------------------------------===// 1372 1373 SVal RegionStoreManager::getBinding(RegionBindingsConstRef B, Loc L, QualType T) { 1374 assert(!L.getAs<UnknownVal>() && "location unknown"); 1375 assert(!L.getAs<UndefinedVal>() && "location undefined"); 1376 1377 // For access to concrete addresses, return UnknownVal. Checks 1378 // for null dereferences (and similar errors) are done by checkers, not 1379 // the Store. 1380 // FIXME: We can consider lazily symbolicating such memory, but we really 1381 // should defer this when we can reason easily about symbolicating arrays 1382 // of bytes. 1383 if (L.getAs<loc::ConcreteInt>()) { 1384 return UnknownVal(); 1385 } 1386 if (!L.getAs<loc::MemRegionVal>()) { 1387 return UnknownVal(); 1388 } 1389 1390 const MemRegion *MR = L.castAs<loc::MemRegionVal>().getRegion(); 1391 1392 if (isa<BlockDataRegion>(MR)) { 1393 return UnknownVal(); 1394 } 1395 1396 if (isa<AllocaRegion>(MR) || 1397 isa<SymbolicRegion>(MR) || 1398 isa<CodeTextRegion>(MR)) { 1399 if (T.isNull()) { 1400 if (const TypedRegion *TR = dyn_cast<TypedRegion>(MR)) 1401 T = TR->getLocationType(); 1402 else { 1403 const SymbolicRegion *SR = cast<SymbolicRegion>(MR); 1404 T = SR->getSymbol()->getType(); 1405 } 1406 } 1407 MR = GetElementZeroRegion(cast<SubRegion>(MR), T); 1408 } 1409 1410 // FIXME: Perhaps this method should just take a 'const MemRegion*' argument 1411 // instead of 'Loc', and have the other Loc cases handled at a higher level. 1412 const TypedValueRegion *R = cast<TypedValueRegion>(MR); 1413 QualType RTy = R->getValueType(); 1414 1415 // FIXME: we do not yet model the parts of a complex type, so treat the 1416 // whole thing as "unknown". 1417 if (RTy->isAnyComplexType()) 1418 return UnknownVal(); 1419 1420 // FIXME: We should eventually handle funny addressing. e.g.: 1421 // 1422 // int x = ...; 1423 // int *p = &x; 1424 // char *q = (char*) p; 1425 // char c = *q; // returns the first byte of 'x'. 1426 // 1427 // Such funny addressing will occur due to layering of regions. 1428 if (RTy->isStructureOrClassType()) 1429 return getBindingForStruct(B, R); 1430 1431 // FIXME: Handle unions. 1432 if (RTy->isUnionType()) 1433 return createLazyBinding(B, R); 1434 1435 if (RTy->isArrayType()) { 1436 if (RTy->isConstantArrayType()) 1437 return getBindingForArray(B, R); 1438 else 1439 return UnknownVal(); 1440 } 1441 1442 // FIXME: handle Vector types. 1443 if (RTy->isVectorType()) 1444 return UnknownVal(); 1445 1446 if (const FieldRegion* FR = dyn_cast<FieldRegion>(R)) 1447 return CastRetrievedVal(getBindingForField(B, FR), FR, T, false); 1448 1449 if (const ElementRegion* ER = dyn_cast<ElementRegion>(R)) { 1450 // FIXME: Here we actually perform an implicit conversion from the loaded 1451 // value to the element type. Eventually we want to compose these values 1452 // more intelligently. For example, an 'element' can encompass multiple 1453 // bound regions (e.g., several bound bytes), or could be a subset of 1454 // a larger value. 1455 return CastRetrievedVal(getBindingForElement(B, ER), ER, T, false); 1456 } 1457 1458 if (const ObjCIvarRegion *IVR = dyn_cast<ObjCIvarRegion>(R)) { 1459 // FIXME: Here we actually perform an implicit conversion from the loaded 1460 // value to the ivar type. What we should model is stores to ivars 1461 // that blow past the extent of the ivar. If the address of the ivar is 1462 // reinterpretted, it is possible we stored a different value that could 1463 // fit within the ivar. Either we need to cast these when storing them 1464 // or reinterpret them lazily (as we do here). 1465 return CastRetrievedVal(getBindingForObjCIvar(B, IVR), IVR, T, false); 1466 } 1467 1468 if (const VarRegion *VR = dyn_cast<VarRegion>(R)) { 1469 // FIXME: Here we actually perform an implicit conversion from the loaded 1470 // value to the variable type. What we should model is stores to variables 1471 // that blow past the extent of the variable. If the address of the 1472 // variable is reinterpretted, it is possible we stored a different value 1473 // that could fit within the variable. Either we need to cast these when 1474 // storing them or reinterpret them lazily (as we do here). 1475 return CastRetrievedVal(getBindingForVar(B, VR), VR, T, false); 1476 } 1477 1478 const SVal *V = B.lookup(R, BindingKey::Direct); 1479 1480 // Check if the region has a binding. 1481 if (V) 1482 return *V; 1483 1484 // The location does not have a bound value. This means that it has 1485 // the value it had upon its creation and/or entry to the analyzed 1486 // function/method. These are either symbolic values or 'undefined'. 1487 if (R->hasStackNonParametersStorage()) { 1488 // All stack variables are considered to have undefined values 1489 // upon creation. All heap allocated blocks are considered to 1490 // have undefined values as well unless they are explicitly bound 1491 // to specific values. 1492 return UndefinedVal(); 1493 } 1494 1495 // All other values are symbolic. 1496 return svalBuilder.getRegionValueSymbolVal(R); 1497 } 1498 1499 static QualType getUnderlyingType(const SubRegion *R) { 1500 QualType RegionTy; 1501 if (const TypedValueRegion *TVR = dyn_cast<TypedValueRegion>(R)) 1502 RegionTy = TVR->getValueType(); 1503 1504 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) 1505 RegionTy = SR->getSymbol()->getType(); 1506 1507 return RegionTy; 1508 } 1509 1510 /// Checks to see if store \p B has a lazy binding for region \p R. 1511 /// 1512 /// If \p AllowSubregionBindings is \c false, a lazy binding will be rejected 1513 /// if there are additional bindings within \p R. 1514 /// 1515 /// Note that unlike RegionStoreManager::findLazyBinding, this will not search 1516 /// for lazy bindings for super-regions of \p R. 1517 static Optional<nonloc::LazyCompoundVal> 1518 getExistingLazyBinding(SValBuilder &SVB, RegionBindingsConstRef B, 1519 const SubRegion *R, bool AllowSubregionBindings) { 1520 Optional<SVal> V = B.getDefaultBinding(R); 1521 if (!V) 1522 return None; 1523 1524 Optional<nonloc::LazyCompoundVal> LCV = V->getAs<nonloc::LazyCompoundVal>(); 1525 if (!LCV) 1526 return None; 1527 1528 // If the LCV is for a subregion, the types might not match, and we shouldn't 1529 // reuse the binding. 1530 QualType RegionTy = getUnderlyingType(R); 1531 if (!RegionTy.isNull() && 1532 !RegionTy->isVoidPointerType()) { 1533 QualType SourceRegionTy = LCV->getRegion()->getValueType(); 1534 if (!SVB.getContext().hasSameUnqualifiedType(RegionTy, SourceRegionTy)) 1535 return None; 1536 } 1537 1538 if (!AllowSubregionBindings) { 1539 // If there are any other bindings within this region, we shouldn't reuse 1540 // the top-level binding. 1541 SmallVector<BindingPair, 16> Bindings; 1542 collectSubRegionBindings(Bindings, SVB, *B.lookup(R->getBaseRegion()), R, 1543 /*IncludeAllDefaultBindings=*/true); 1544 if (Bindings.size() > 1) 1545 return None; 1546 } 1547 1548 return *LCV; 1549 } 1550 1551 1552 std::pair<Store, const SubRegion *> 1553 RegionStoreManager::findLazyBinding(RegionBindingsConstRef B, 1554 const SubRegion *R, 1555 const SubRegion *originalRegion) { 1556 if (originalRegion != R) { 1557 if (Optional<nonloc::LazyCompoundVal> V = 1558 getExistingLazyBinding(svalBuilder, B, R, true)) 1559 return std::make_pair(V->getStore(), V->getRegion()); 1560 } 1561 1562 typedef std::pair<Store, const SubRegion *> StoreRegionPair; 1563 StoreRegionPair Result = StoreRegionPair(); 1564 1565 if (const ElementRegion *ER = dyn_cast<ElementRegion>(R)) { 1566 Result = findLazyBinding(B, cast<SubRegion>(ER->getSuperRegion()), 1567 originalRegion); 1568 1569 if (Result.second) 1570 Result.second = MRMgr.getElementRegionWithSuper(ER, Result.second); 1571 1572 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) { 1573 Result = findLazyBinding(B, cast<SubRegion>(FR->getSuperRegion()), 1574 originalRegion); 1575 1576 if (Result.second) 1577 Result.second = MRMgr.getFieldRegionWithSuper(FR, Result.second); 1578 1579 } else if (const CXXBaseObjectRegion *BaseReg = 1580 dyn_cast<CXXBaseObjectRegion>(R)) { 1581 // C++ base object region is another kind of region that we should blast 1582 // through to look for lazy compound value. It is like a field region. 1583 Result = findLazyBinding(B, cast<SubRegion>(BaseReg->getSuperRegion()), 1584 originalRegion); 1585 1586 if (Result.second) 1587 Result.second = MRMgr.getCXXBaseObjectRegionWithSuper(BaseReg, 1588 Result.second); 1589 } 1590 1591 return Result; 1592 } 1593 1594 SVal RegionStoreManager::getBindingForElement(RegionBindingsConstRef B, 1595 const ElementRegion* R) { 1596 // We do not currently model bindings of the CompoundLiteralregion. 1597 if (isa<CompoundLiteralRegion>(R->getBaseRegion())) 1598 return UnknownVal(); 1599 1600 // Check if the region has a binding. 1601 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1602 return *V; 1603 1604 const MemRegion* superR = R->getSuperRegion(); 1605 1606 // Check if the region is an element region of a string literal. 1607 if (const StringRegion *StrR=dyn_cast<StringRegion>(superR)) { 1608 // FIXME: Handle loads from strings where the literal is treated as 1609 // an integer, e.g., *((unsigned int*)"hello") 1610 QualType T = Ctx.getAsArrayType(StrR->getValueType())->getElementType(); 1611 if (!Ctx.hasSameUnqualifiedType(T, R->getElementType())) 1612 return UnknownVal(); 1613 1614 const StringLiteral *Str = StrR->getStringLiteral(); 1615 SVal Idx = R->getIndex(); 1616 if (Optional<nonloc::ConcreteInt> CI = Idx.getAs<nonloc::ConcreteInt>()) { 1617 int64_t i = CI->getValue().getSExtValue(); 1618 // Abort on string underrun. This can be possible by arbitrary 1619 // clients of getBindingForElement(). 1620 if (i < 0) 1621 return UndefinedVal(); 1622 int64_t length = Str->getLength(); 1623 // Technically, only i == length is guaranteed to be null. 1624 // However, such overflows should be caught before reaching this point; 1625 // the only time such an access would be made is if a string literal was 1626 // used to initialize a larger array. 1627 char c = (i >= length) ? '\0' : Str->getCodeUnit(i); 1628 return svalBuilder.makeIntVal(c, T); 1629 } 1630 } 1631 1632 // Check for loads from a code text region. For such loads, just give up. 1633 if (isa<CodeTextRegion>(superR)) 1634 return UnknownVal(); 1635 1636 // Handle the case where we are indexing into a larger scalar object. 1637 // For example, this handles: 1638 // int x = ... 1639 // char *y = &x; 1640 // return *y; 1641 // FIXME: This is a hack, and doesn't do anything really intelligent yet. 1642 const RegionRawOffset &O = R->getAsArrayOffset(); 1643 1644 // If we cannot reason about the offset, return an unknown value. 1645 if (!O.getRegion()) 1646 return UnknownVal(); 1647 1648 if (const TypedValueRegion *baseR = 1649 dyn_cast_or_null<TypedValueRegion>(O.getRegion())) { 1650 QualType baseT = baseR->getValueType(); 1651 if (baseT->isScalarType()) { 1652 QualType elemT = R->getElementType(); 1653 if (elemT->isScalarType()) { 1654 if (Ctx.getTypeSizeInChars(baseT) >= Ctx.getTypeSizeInChars(elemT)) { 1655 if (const Optional<SVal> &V = B.getDirectBinding(superR)) { 1656 if (SymbolRef parentSym = V->getAsSymbol()) 1657 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1658 1659 if (V->isUnknownOrUndef()) 1660 return *V; 1661 // Other cases: give up. We are indexing into a larger object 1662 // that has some value, but we don't know how to handle that yet. 1663 return UnknownVal(); 1664 } 1665 } 1666 } 1667 } 1668 } 1669 return getBindingForFieldOrElementCommon(B, R, R->getElementType()); 1670 } 1671 1672 SVal RegionStoreManager::getBindingForField(RegionBindingsConstRef B, 1673 const FieldRegion* R) { 1674 1675 // Check if the region has a binding. 1676 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1677 return *V; 1678 1679 QualType Ty = R->getValueType(); 1680 return getBindingForFieldOrElementCommon(B, R, Ty); 1681 } 1682 1683 Optional<SVal> 1684 RegionStoreManager::getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 1685 const MemRegion *superR, 1686 const TypedValueRegion *R, 1687 QualType Ty) { 1688 1689 if (const Optional<SVal> &D = B.getDefaultBinding(superR)) { 1690 const SVal &val = D.getValue(); 1691 if (SymbolRef parentSym = val.getAsSymbol()) 1692 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1693 1694 if (val.isZeroConstant()) 1695 return svalBuilder.makeZeroVal(Ty); 1696 1697 if (val.isUnknownOrUndef()) 1698 return val; 1699 1700 // Lazy bindings are usually handled through getExistingLazyBinding(). 1701 // We should unify these two code paths at some point. 1702 if (val.getAs<nonloc::LazyCompoundVal>() || 1703 val.getAs<nonloc::CompoundVal>()) 1704 return val; 1705 1706 llvm_unreachable("Unknown default value"); 1707 } 1708 1709 return None; 1710 } 1711 1712 SVal RegionStoreManager::getLazyBinding(const SubRegion *LazyBindingRegion, 1713 RegionBindingsRef LazyBinding) { 1714 SVal Result; 1715 if (const ElementRegion *ER = dyn_cast<ElementRegion>(LazyBindingRegion)) 1716 Result = getBindingForElement(LazyBinding, ER); 1717 else 1718 Result = getBindingForField(LazyBinding, 1719 cast<FieldRegion>(LazyBindingRegion)); 1720 1721 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 1722 // default value for /part/ of an aggregate from a default value for the 1723 // /entire/ aggregate. The most common case of this is when struct Outer 1724 // has as its first member a struct Inner, which is copied in from a stack 1725 // variable. In this case, even if the Outer's default value is symbolic, 0, 1726 // or unknown, it gets overridden by the Inner's default value of undefined. 1727 // 1728 // This is a general problem -- if the Inner is zero-initialized, the Outer 1729 // will now look zero-initialized. The proper way to solve this is with a 1730 // new version of RegionStore that tracks the extent of a binding as well 1731 // as the offset. 1732 // 1733 // This hack only takes care of the undefined case because that can very 1734 // quickly result in a warning. 1735 if (Result.isUndef()) 1736 Result = UnknownVal(); 1737 1738 return Result; 1739 } 1740 1741 SVal 1742 RegionStoreManager::getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 1743 const TypedValueRegion *R, 1744 QualType Ty) { 1745 1746 // At this point we have already checked in either getBindingForElement or 1747 // getBindingForField if 'R' has a direct binding. 1748 1749 // Lazy binding? 1750 Store lazyBindingStore = nullptr; 1751 const SubRegion *lazyBindingRegion = nullptr; 1752 std::tie(lazyBindingStore, lazyBindingRegion) = findLazyBinding(B, R, R); 1753 if (lazyBindingRegion) 1754 return getLazyBinding(lazyBindingRegion, 1755 getRegionBindings(lazyBindingStore)); 1756 1757 // Record whether or not we see a symbolic index. That can completely 1758 // be out of scope of our lookup. 1759 bool hasSymbolicIndex = false; 1760 1761 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 1762 // default value for /part/ of an aggregate from a default value for the 1763 // /entire/ aggregate. The most common case of this is when struct Outer 1764 // has as its first member a struct Inner, which is copied in from a stack 1765 // variable. In this case, even if the Outer's default value is symbolic, 0, 1766 // or unknown, it gets overridden by the Inner's default value of undefined. 1767 // 1768 // This is a general problem -- if the Inner is zero-initialized, the Outer 1769 // will now look zero-initialized. The proper way to solve this is with a 1770 // new version of RegionStore that tracks the extent of a binding as well 1771 // as the offset. 1772 // 1773 // This hack only takes care of the undefined case because that can very 1774 // quickly result in a warning. 1775 bool hasPartialLazyBinding = false; 1776 1777 const SubRegion *SR = dyn_cast<SubRegion>(R); 1778 while (SR) { 1779 const MemRegion *Base = SR->getSuperRegion(); 1780 if (Optional<SVal> D = getBindingForDerivedDefaultValue(B, Base, R, Ty)) { 1781 if (D->getAs<nonloc::LazyCompoundVal>()) { 1782 hasPartialLazyBinding = true; 1783 break; 1784 } 1785 1786 return *D; 1787 } 1788 1789 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Base)) { 1790 NonLoc index = ER->getIndex(); 1791 if (!index.isConstant()) 1792 hasSymbolicIndex = true; 1793 } 1794 1795 // If our super region is a field or element itself, walk up the region 1796 // hierarchy to see if there is a default value installed in an ancestor. 1797 SR = dyn_cast<SubRegion>(Base); 1798 } 1799 1800 if (R->hasStackNonParametersStorage()) { 1801 if (isa<ElementRegion>(R)) { 1802 // Currently we don't reason specially about Clang-style vectors. Check 1803 // if superR is a vector and if so return Unknown. 1804 if (const TypedValueRegion *typedSuperR = 1805 dyn_cast<TypedValueRegion>(R->getSuperRegion())) { 1806 if (typedSuperR->getValueType()->isVectorType()) 1807 return UnknownVal(); 1808 } 1809 } 1810 1811 // FIXME: We also need to take ElementRegions with symbolic indexes into 1812 // account. This case handles both directly accessing an ElementRegion 1813 // with a symbolic offset, but also fields within an element with 1814 // a symbolic offset. 1815 if (hasSymbolicIndex) 1816 return UnknownVal(); 1817 1818 if (!hasPartialLazyBinding) 1819 return UndefinedVal(); 1820 } 1821 1822 // All other values are symbolic. 1823 return svalBuilder.getRegionValueSymbolVal(R); 1824 } 1825 1826 SVal RegionStoreManager::getBindingForObjCIvar(RegionBindingsConstRef B, 1827 const ObjCIvarRegion* R) { 1828 // Check if the region has a binding. 1829 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1830 return *V; 1831 1832 const MemRegion *superR = R->getSuperRegion(); 1833 1834 // Check if the super region has a default binding. 1835 if (const Optional<SVal> &V = B.getDefaultBinding(superR)) { 1836 if (SymbolRef parentSym = V->getAsSymbol()) 1837 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1838 1839 // Other cases: give up. 1840 return UnknownVal(); 1841 } 1842 1843 return getBindingForLazySymbol(R); 1844 } 1845 1846 SVal RegionStoreManager::getBindingForVar(RegionBindingsConstRef B, 1847 const VarRegion *R) { 1848 1849 // Check if the region has a binding. 1850 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1851 return *V; 1852 1853 // Lazily derive a value for the VarRegion. 1854 const VarDecl *VD = R->getDecl(); 1855 const MemSpaceRegion *MS = R->getMemorySpace(); 1856 1857 // Arguments are always symbolic. 1858 if (isa<StackArgumentsSpaceRegion>(MS)) 1859 return svalBuilder.getRegionValueSymbolVal(R); 1860 1861 // Is 'VD' declared constant? If so, retrieve the constant value. 1862 if (VD->getType().isConstQualified()) 1863 if (const Expr *Init = VD->getInit()) 1864 if (Optional<SVal> V = svalBuilder.getConstantVal(Init)) 1865 return *V; 1866 1867 // This must come after the check for constants because closure-captured 1868 // constant variables may appear in UnknownSpaceRegion. 1869 if (isa<UnknownSpaceRegion>(MS)) 1870 return svalBuilder.getRegionValueSymbolVal(R); 1871 1872 if (isa<GlobalsSpaceRegion>(MS)) { 1873 QualType T = VD->getType(); 1874 1875 // Function-scoped static variables are default-initialized to 0; if they 1876 // have an initializer, it would have been processed by now. 1877 // FIXME: This is only true when we're starting analysis from main(). 1878 // We're losing a lot of coverage here. 1879 if (isa<StaticGlobalSpaceRegion>(MS)) 1880 return svalBuilder.makeZeroVal(T); 1881 1882 if (Optional<SVal> V = getBindingForDerivedDefaultValue(B, MS, R, T)) { 1883 assert(!V->getAs<nonloc::LazyCompoundVal>()); 1884 return V.getValue(); 1885 } 1886 1887 return svalBuilder.getRegionValueSymbolVal(R); 1888 } 1889 1890 return UndefinedVal(); 1891 } 1892 1893 SVal RegionStoreManager::getBindingForLazySymbol(const TypedValueRegion *R) { 1894 // All other values are symbolic. 1895 return svalBuilder.getRegionValueSymbolVal(R); 1896 } 1897 1898 const RegionStoreManager::SValListTy & 1899 RegionStoreManager::getInterestingValues(nonloc::LazyCompoundVal LCV) { 1900 // First, check the cache. 1901 LazyBindingsMapTy::iterator I = LazyBindingsMap.find(LCV.getCVData()); 1902 if (I != LazyBindingsMap.end()) 1903 return I->second; 1904 1905 // If we don't have a list of values cached, start constructing it. 1906 SValListTy List; 1907 1908 const SubRegion *LazyR = LCV.getRegion(); 1909 RegionBindingsRef B = getRegionBindings(LCV.getStore()); 1910 1911 // If this region had /no/ bindings at the time, there are no interesting 1912 // values to return. 1913 const ClusterBindings *Cluster = B.lookup(LazyR->getBaseRegion()); 1914 if (!Cluster) 1915 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 1916 1917 SmallVector<BindingPair, 32> Bindings; 1918 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, LazyR, 1919 /*IncludeAllDefaultBindings=*/true); 1920 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 1921 E = Bindings.end(); 1922 I != E; ++I) { 1923 SVal V = I->second; 1924 if (V.isUnknownOrUndef() || V.isConstant()) 1925 continue; 1926 1927 if (Optional<nonloc::LazyCompoundVal> InnerLCV = 1928 V.getAs<nonloc::LazyCompoundVal>()) { 1929 const SValListTy &InnerList = getInterestingValues(*InnerLCV); 1930 List.insert(List.end(), InnerList.begin(), InnerList.end()); 1931 continue; 1932 } 1933 1934 List.push_back(V); 1935 } 1936 1937 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 1938 } 1939 1940 NonLoc RegionStoreManager::createLazyBinding(RegionBindingsConstRef B, 1941 const TypedValueRegion *R) { 1942 if (Optional<nonloc::LazyCompoundVal> V = 1943 getExistingLazyBinding(svalBuilder, B, R, false)) 1944 return *V; 1945 1946 return svalBuilder.makeLazyCompoundVal(StoreRef(B.asStore(), *this), R); 1947 } 1948 1949 static bool isRecordEmpty(const RecordDecl *RD) { 1950 if (!RD->field_empty()) 1951 return false; 1952 if (const CXXRecordDecl *CRD = dyn_cast<CXXRecordDecl>(RD)) 1953 return CRD->getNumBases() == 0; 1954 return true; 1955 } 1956 1957 SVal RegionStoreManager::getBindingForStruct(RegionBindingsConstRef B, 1958 const TypedValueRegion *R) { 1959 const RecordDecl *RD = R->getValueType()->castAs<RecordType>()->getDecl(); 1960 if (!RD->getDefinition() || isRecordEmpty(RD)) 1961 return UnknownVal(); 1962 1963 return createLazyBinding(B, R); 1964 } 1965 1966 SVal RegionStoreManager::getBindingForArray(RegionBindingsConstRef B, 1967 const TypedValueRegion *R) { 1968 assert(Ctx.getAsConstantArrayType(R->getValueType()) && 1969 "Only constant array types can have compound bindings."); 1970 1971 return createLazyBinding(B, R); 1972 } 1973 1974 bool RegionStoreManager::includedInBindings(Store store, 1975 const MemRegion *region) const { 1976 RegionBindingsRef B = getRegionBindings(store); 1977 region = region->getBaseRegion(); 1978 1979 // Quick path: if the base is the head of a cluster, the region is live. 1980 if (B.lookup(region)) 1981 return true; 1982 1983 // Slow path: if the region is the VALUE of any binding, it is live. 1984 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); RI != RE; ++RI) { 1985 const ClusterBindings &Cluster = RI.getData(); 1986 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 1987 CI != CE; ++CI) { 1988 const SVal &D = CI.getData(); 1989 if (const MemRegion *R = D.getAsRegion()) 1990 if (R->getBaseRegion() == region) 1991 return true; 1992 } 1993 } 1994 1995 return false; 1996 } 1997 1998 //===----------------------------------------------------------------------===// 1999 // Binding values to regions. 2000 //===----------------------------------------------------------------------===// 2001 2002 StoreRef RegionStoreManager::killBinding(Store ST, Loc L) { 2003 if (Optional<loc::MemRegionVal> LV = L.getAs<loc::MemRegionVal>()) 2004 if (const MemRegion* R = LV->getRegion()) 2005 return StoreRef(getRegionBindings(ST).removeBinding(R) 2006 .asImmutableMap() 2007 .getRootWithoutRetain(), 2008 *this); 2009 2010 return StoreRef(ST, *this); 2011 } 2012 2013 RegionBindingsRef 2014 RegionStoreManager::bind(RegionBindingsConstRef B, Loc L, SVal V) { 2015 if (L.getAs<loc::ConcreteInt>()) 2016 return B; 2017 2018 // If we get here, the location should be a region. 2019 const MemRegion *R = L.castAs<loc::MemRegionVal>().getRegion(); 2020 2021 // Check if the region is a struct region. 2022 if (const TypedValueRegion* TR = dyn_cast<TypedValueRegion>(R)) { 2023 QualType Ty = TR->getValueType(); 2024 if (Ty->isArrayType()) 2025 return bindArray(B, TR, V); 2026 if (Ty->isStructureOrClassType()) 2027 return bindStruct(B, TR, V); 2028 if (Ty->isVectorType()) 2029 return bindVector(B, TR, V); 2030 if (Ty->isUnionType()) 2031 return bindAggregate(B, TR, V); 2032 } 2033 2034 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) { 2035 // Binding directly to a symbolic region should be treated as binding 2036 // to element 0. 2037 QualType T = SR->getSymbol()->getType(); 2038 if (T->isAnyPointerType() || T->isReferenceType()) 2039 T = T->getPointeeType(); 2040 2041 R = GetElementZeroRegion(SR, T); 2042 } 2043 2044 // Clear out bindings that may overlap with this binding. 2045 RegionBindingsRef NewB = removeSubRegionBindings(B, cast<SubRegion>(R)); 2046 return NewB.addBinding(BindingKey::Make(R, BindingKey::Direct), V); 2047 } 2048 2049 RegionBindingsRef 2050 RegionStoreManager::setImplicitDefaultValue(RegionBindingsConstRef B, 2051 const MemRegion *R, 2052 QualType T) { 2053 SVal V; 2054 2055 if (Loc::isLocType(T)) 2056 V = svalBuilder.makeNull(); 2057 else if (T->isIntegralOrEnumerationType()) 2058 V = svalBuilder.makeZeroVal(T); 2059 else if (T->isStructureOrClassType() || T->isArrayType()) { 2060 // Set the default value to a zero constant when it is a structure 2061 // or array. The type doesn't really matter. 2062 V = svalBuilder.makeZeroVal(Ctx.IntTy); 2063 } 2064 else { 2065 // We can't represent values of this type, but we still need to set a value 2066 // to record that the region has been initialized. 2067 // If this assertion ever fires, a new case should be added above -- we 2068 // should know how to default-initialize any value we can symbolicate. 2069 assert(!SymbolManager::canSymbolicate(T) && "This type is representable"); 2070 V = UnknownVal(); 2071 } 2072 2073 return B.addBinding(R, BindingKey::Default, V); 2074 } 2075 2076 RegionBindingsRef 2077 RegionStoreManager::bindArray(RegionBindingsConstRef B, 2078 const TypedValueRegion* R, 2079 SVal Init) { 2080 2081 const ArrayType *AT =cast<ArrayType>(Ctx.getCanonicalType(R->getValueType())); 2082 QualType ElementTy = AT->getElementType(); 2083 Optional<uint64_t> Size; 2084 2085 if (const ConstantArrayType* CAT = dyn_cast<ConstantArrayType>(AT)) 2086 Size = CAT->getSize().getZExtValue(); 2087 2088 // Check if the init expr is a string literal. 2089 if (Optional<loc::MemRegionVal> MRV = Init.getAs<loc::MemRegionVal>()) { 2090 const StringRegion *S = cast<StringRegion>(MRV->getRegion()); 2091 2092 // Treat the string as a lazy compound value. 2093 StoreRef store(B.asStore(), *this); 2094 nonloc::LazyCompoundVal LCV = svalBuilder.makeLazyCompoundVal(store, S) 2095 .castAs<nonloc::LazyCompoundVal>(); 2096 return bindAggregate(B, R, LCV); 2097 } 2098 2099 // Handle lazy compound values. 2100 if (Init.getAs<nonloc::LazyCompoundVal>()) 2101 return bindAggregate(B, R, Init); 2102 2103 if (Init.isUnknown()) 2104 return bindAggregate(B, R, UnknownVal()); 2105 2106 // Remaining case: explicit compound values. 2107 const nonloc::CompoundVal& CV = Init.castAs<nonloc::CompoundVal>(); 2108 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2109 uint64_t i = 0; 2110 2111 RegionBindingsRef NewB(B); 2112 2113 for (; Size.hasValue() ? i < Size.getValue() : true ; ++i, ++VI) { 2114 // The init list might be shorter than the array length. 2115 if (VI == VE) 2116 break; 2117 2118 const NonLoc &Idx = svalBuilder.makeArrayIndex(i); 2119 const ElementRegion *ER = MRMgr.getElementRegion(ElementTy, Idx, R, Ctx); 2120 2121 if (ElementTy->isStructureOrClassType()) 2122 NewB = bindStruct(NewB, ER, *VI); 2123 else if (ElementTy->isArrayType()) 2124 NewB = bindArray(NewB, ER, *VI); 2125 else 2126 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2127 } 2128 2129 // If the init list is shorter than the array length, set the 2130 // array default value. 2131 if (Size.hasValue() && i < Size.getValue()) 2132 NewB = setImplicitDefaultValue(NewB, R, ElementTy); 2133 2134 return NewB; 2135 } 2136 2137 RegionBindingsRef RegionStoreManager::bindVector(RegionBindingsConstRef B, 2138 const TypedValueRegion* R, 2139 SVal V) { 2140 QualType T = R->getValueType(); 2141 assert(T->isVectorType()); 2142 const VectorType *VT = T->getAs<VectorType>(); // Use getAs for typedefs. 2143 2144 // Handle lazy compound values and symbolic values. 2145 if (V.getAs<nonloc::LazyCompoundVal>() || V.getAs<nonloc::SymbolVal>()) 2146 return bindAggregate(B, R, V); 2147 2148 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2149 // that we are binding symbolic struct value. Kill the field values, and if 2150 // the value is symbolic go and bind it as a "default" binding. 2151 if (!V.getAs<nonloc::CompoundVal>()) { 2152 return bindAggregate(B, R, UnknownVal()); 2153 } 2154 2155 QualType ElemType = VT->getElementType(); 2156 nonloc::CompoundVal CV = V.castAs<nonloc::CompoundVal>(); 2157 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2158 unsigned index = 0, numElements = VT->getNumElements(); 2159 RegionBindingsRef NewB(B); 2160 2161 for ( ; index != numElements ; ++index) { 2162 if (VI == VE) 2163 break; 2164 2165 NonLoc Idx = svalBuilder.makeArrayIndex(index); 2166 const ElementRegion *ER = MRMgr.getElementRegion(ElemType, Idx, R, Ctx); 2167 2168 if (ElemType->isArrayType()) 2169 NewB = bindArray(NewB, ER, *VI); 2170 else if (ElemType->isStructureOrClassType()) 2171 NewB = bindStruct(NewB, ER, *VI); 2172 else 2173 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2174 } 2175 return NewB; 2176 } 2177 2178 Optional<RegionBindingsRef> 2179 RegionStoreManager::tryBindSmallStruct(RegionBindingsConstRef B, 2180 const TypedValueRegion *R, 2181 const RecordDecl *RD, 2182 nonloc::LazyCompoundVal LCV) { 2183 FieldVector Fields; 2184 2185 if (const CXXRecordDecl *Class = dyn_cast<CXXRecordDecl>(RD)) 2186 if (Class->getNumBases() != 0 || Class->getNumVBases() != 0) 2187 return None; 2188 2189 for (const auto *FD : RD->fields()) { 2190 if (FD->isUnnamedBitfield()) 2191 continue; 2192 2193 // If there are too many fields, or if any of the fields are aggregates, 2194 // just use the LCV as a default binding. 2195 if (Fields.size() == SmallStructLimit) 2196 return None; 2197 2198 QualType Ty = FD->getType(); 2199 if (!(Ty->isScalarType() || Ty->isReferenceType())) 2200 return None; 2201 2202 Fields.push_back(FD); 2203 } 2204 2205 RegionBindingsRef NewB = B; 2206 2207 for (FieldVector::iterator I = Fields.begin(), E = Fields.end(); I != E; ++I){ 2208 const FieldRegion *SourceFR = MRMgr.getFieldRegion(*I, LCV.getRegion()); 2209 SVal V = getBindingForField(getRegionBindings(LCV.getStore()), SourceFR); 2210 2211 const FieldRegion *DestFR = MRMgr.getFieldRegion(*I, R); 2212 NewB = bind(NewB, loc::MemRegionVal(DestFR), V); 2213 } 2214 2215 return NewB; 2216 } 2217 2218 RegionBindingsRef RegionStoreManager::bindStruct(RegionBindingsConstRef B, 2219 const TypedValueRegion* R, 2220 SVal V) { 2221 if (!Features.supportsFields()) 2222 return B; 2223 2224 QualType T = R->getValueType(); 2225 assert(T->isStructureOrClassType()); 2226 2227 const RecordType* RT = T->getAs<RecordType>(); 2228 const RecordDecl *RD = RT->getDecl(); 2229 2230 if (!RD->isCompleteDefinition()) 2231 return B; 2232 2233 // Handle lazy compound values and symbolic values. 2234 if (Optional<nonloc::LazyCompoundVal> LCV = 2235 V.getAs<nonloc::LazyCompoundVal>()) { 2236 if (Optional<RegionBindingsRef> NewB = tryBindSmallStruct(B, R, RD, *LCV)) 2237 return *NewB; 2238 return bindAggregate(B, R, V); 2239 } 2240 if (V.getAs<nonloc::SymbolVal>()) 2241 return bindAggregate(B, R, V); 2242 2243 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2244 // that we are binding symbolic struct value. Kill the field values, and if 2245 // the value is symbolic go and bind it as a "default" binding. 2246 if (V.isUnknown() || !V.getAs<nonloc::CompoundVal>()) 2247 return bindAggregate(B, R, UnknownVal()); 2248 2249 const nonloc::CompoundVal& CV = V.castAs<nonloc::CompoundVal>(); 2250 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2251 2252 RecordDecl::field_iterator FI, FE; 2253 RegionBindingsRef NewB(B); 2254 2255 for (FI = RD->field_begin(), FE = RD->field_end(); FI != FE; ++FI) { 2256 2257 if (VI == VE) 2258 break; 2259 2260 // Skip any unnamed bitfields to stay in sync with the initializers. 2261 if (FI->isUnnamedBitfield()) 2262 continue; 2263 2264 QualType FTy = FI->getType(); 2265 const FieldRegion* FR = MRMgr.getFieldRegion(*FI, R); 2266 2267 if (FTy->isArrayType()) 2268 NewB = bindArray(NewB, FR, *VI); 2269 else if (FTy->isStructureOrClassType()) 2270 NewB = bindStruct(NewB, FR, *VI); 2271 else 2272 NewB = bind(NewB, loc::MemRegionVal(FR), *VI); 2273 ++VI; 2274 } 2275 2276 // There may be fewer values in the initialize list than the fields of struct. 2277 if (FI != FE) { 2278 NewB = NewB.addBinding(R, BindingKey::Default, 2279 svalBuilder.makeIntVal(0, false)); 2280 } 2281 2282 return NewB; 2283 } 2284 2285 RegionBindingsRef 2286 RegionStoreManager::bindAggregate(RegionBindingsConstRef B, 2287 const TypedRegion *R, 2288 SVal Val) { 2289 // Remove the old bindings, using 'R' as the root of all regions 2290 // we will invalidate. Then add the new binding. 2291 return removeSubRegionBindings(B, R).addBinding(R, BindingKey::Default, Val); 2292 } 2293 2294 //===----------------------------------------------------------------------===// 2295 // State pruning. 2296 //===----------------------------------------------------------------------===// 2297 2298 namespace { 2299 class removeDeadBindingsWorker : 2300 public ClusterAnalysis<removeDeadBindingsWorker> { 2301 SmallVector<const SymbolicRegion*, 12> Postponed; 2302 SymbolReaper &SymReaper; 2303 const StackFrameContext *CurrentLCtx; 2304 2305 public: 2306 removeDeadBindingsWorker(RegionStoreManager &rm, 2307 ProgramStateManager &stateMgr, 2308 RegionBindingsRef b, SymbolReaper &symReaper, 2309 const StackFrameContext *LCtx) 2310 : ClusterAnalysis<removeDeadBindingsWorker>(rm, stateMgr, b), 2311 SymReaper(symReaper), CurrentLCtx(LCtx) {} 2312 2313 // Called by ClusterAnalysis. 2314 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C); 2315 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 2316 using ClusterAnalysis<removeDeadBindingsWorker>::VisitCluster; 2317 2318 using ClusterAnalysis::AddToWorkList; 2319 2320 bool AddToWorkList(const MemRegion *R); 2321 2322 bool UpdatePostponed(); 2323 void VisitBinding(SVal V); 2324 }; 2325 } 2326 2327 bool removeDeadBindingsWorker::AddToWorkList(const MemRegion *R) { 2328 const MemRegion *BaseR = R->getBaseRegion(); 2329 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 2330 } 2331 2332 void removeDeadBindingsWorker::VisitAddedToCluster(const MemRegion *baseR, 2333 const ClusterBindings &C) { 2334 2335 if (const VarRegion *VR = dyn_cast<VarRegion>(baseR)) { 2336 if (SymReaper.isLive(VR)) 2337 AddToWorkList(baseR, &C); 2338 2339 return; 2340 } 2341 2342 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) { 2343 if (SymReaper.isLive(SR->getSymbol())) 2344 AddToWorkList(SR, &C); 2345 else 2346 Postponed.push_back(SR); 2347 2348 return; 2349 } 2350 2351 if (isa<NonStaticGlobalSpaceRegion>(baseR)) { 2352 AddToWorkList(baseR, &C); 2353 return; 2354 } 2355 2356 // CXXThisRegion in the current or parent location context is live. 2357 if (const CXXThisRegion *TR = dyn_cast<CXXThisRegion>(baseR)) { 2358 const StackArgumentsSpaceRegion *StackReg = 2359 cast<StackArgumentsSpaceRegion>(TR->getSuperRegion()); 2360 const StackFrameContext *RegCtx = StackReg->getStackFrame(); 2361 if (CurrentLCtx && 2362 (RegCtx == CurrentLCtx || RegCtx->isParentOf(CurrentLCtx))) 2363 AddToWorkList(TR, &C); 2364 } 2365 } 2366 2367 void removeDeadBindingsWorker::VisitCluster(const MemRegion *baseR, 2368 const ClusterBindings *C) { 2369 if (!C) 2370 return; 2371 2372 // Mark the symbol for any SymbolicRegion with live bindings as live itself. 2373 // This means we should continue to track that symbol. 2374 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(baseR)) 2375 SymReaper.markLive(SymR->getSymbol()); 2376 2377 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) { 2378 // Element index of a binding key is live. 2379 SymReaper.markElementIndicesLive(I.getKey().getRegion()); 2380 2381 VisitBinding(I.getData()); 2382 } 2383 } 2384 2385 void removeDeadBindingsWorker::VisitBinding(SVal V) { 2386 // Is it a LazyCompoundVal? All referenced regions are live as well. 2387 if (Optional<nonloc::LazyCompoundVal> LCS = 2388 V.getAs<nonloc::LazyCompoundVal>()) { 2389 2390 const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS); 2391 2392 for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(), 2393 E = Vals.end(); 2394 I != E; ++I) 2395 VisitBinding(*I); 2396 2397 return; 2398 } 2399 2400 // If V is a region, then add it to the worklist. 2401 if (const MemRegion *R = V.getAsRegion()) { 2402 AddToWorkList(R); 2403 SymReaper.markLive(R); 2404 2405 // All regions captured by a block are also live. 2406 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(R)) { 2407 BlockDataRegion::referenced_vars_iterator I = BR->referenced_vars_begin(), 2408 E = BR->referenced_vars_end(); 2409 for ( ; I != E; ++I) 2410 AddToWorkList(I.getCapturedRegion()); 2411 } 2412 } 2413 2414 2415 // Update the set of live symbols. 2416 for (SymExpr::symbol_iterator SI = V.symbol_begin(), SE = V.symbol_end(); 2417 SI!=SE; ++SI) 2418 SymReaper.markLive(*SI); 2419 } 2420 2421 bool removeDeadBindingsWorker::UpdatePostponed() { 2422 // See if any postponed SymbolicRegions are actually live now, after 2423 // having done a scan. 2424 bool changed = false; 2425 2426 for (SmallVectorImpl<const SymbolicRegion*>::iterator 2427 I = Postponed.begin(), E = Postponed.end() ; I != E ; ++I) { 2428 if (const SymbolicRegion *SR = *I) { 2429 if (SymReaper.isLive(SR->getSymbol())) { 2430 changed |= AddToWorkList(SR); 2431 *I = nullptr; 2432 } 2433 } 2434 } 2435 2436 return changed; 2437 } 2438 2439 StoreRef RegionStoreManager::removeDeadBindings(Store store, 2440 const StackFrameContext *LCtx, 2441 SymbolReaper& SymReaper) { 2442 RegionBindingsRef B = getRegionBindings(store); 2443 removeDeadBindingsWorker W(*this, StateMgr, B, SymReaper, LCtx); 2444 W.GenerateClusters(); 2445 2446 // Enqueue the region roots onto the worklist. 2447 for (SymbolReaper::region_iterator I = SymReaper.region_begin(), 2448 E = SymReaper.region_end(); I != E; ++I) { 2449 W.AddToWorkList(*I); 2450 } 2451 2452 do W.RunWorkList(); while (W.UpdatePostponed()); 2453 2454 // We have now scanned the store, marking reachable regions and symbols 2455 // as live. We now remove all the regions that are dead from the store 2456 // as well as update DSymbols with the set symbols that are now dead. 2457 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 2458 const MemRegion *Base = I.getKey(); 2459 2460 // If the cluster has been visited, we know the region has been marked. 2461 if (W.isVisited(Base)) 2462 continue; 2463 2464 // Remove the dead entry. 2465 B = B.remove(Base); 2466 2467 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(Base)) 2468 SymReaper.maybeDead(SymR->getSymbol()); 2469 2470 // Mark all non-live symbols that this binding references as dead. 2471 const ClusterBindings &Cluster = I.getData(); 2472 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 2473 CI != CE; ++CI) { 2474 SVal X = CI.getData(); 2475 SymExpr::symbol_iterator SI = X.symbol_begin(), SE = X.symbol_end(); 2476 for (; SI != SE; ++SI) 2477 SymReaper.maybeDead(*SI); 2478 } 2479 } 2480 2481 return StoreRef(B.asStore(), *this); 2482 } 2483 2484 //===----------------------------------------------------------------------===// 2485 // Utility methods. 2486 //===----------------------------------------------------------------------===// 2487 2488 void RegionStoreManager::print(Store store, raw_ostream &OS, 2489 const char* nl, const char *sep) { 2490 RegionBindingsRef B = getRegionBindings(store); 2491 OS << "Store (direct and default bindings), " 2492 << B.asStore() 2493 << " :" << nl; 2494 B.dump(OS, nl); 2495 } 2496