1 //== RegionStore.cpp - Field-sensitive store model --------------*- C++ -*--==// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a basic region store model. In this model, we do have field 11 // sensitivity. But we assume nothing about the heap shape. So recursive data 12 // structures are largely ignored. Basically we do 1-limiting analysis. 13 // Parameter pointers are assumed with no aliasing. Pointee objects of 14 // parameters are created lazily. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "clang/AST/Attr.h" 19 #include "clang/AST/CharUnits.h" 20 #include "clang/ASTMatchers/ASTMatchFinder.h" 21 #include "clang/Analysis/Analyses/LiveVariables.h" 22 #include "clang/Analysis/AnalysisDeclContext.h" 23 #include "clang/Basic/TargetInfo.h" 24 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 25 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 26 #include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramStateTrait.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h" 30 #include "llvm/ADT/ImmutableMap.h" 31 #include "llvm/ADT/Optional.h" 32 #include "llvm/Support/raw_ostream.h" 33 #include <utility> 34 35 using namespace clang; 36 using namespace ento; 37 38 //===----------------------------------------------------------------------===// 39 // Representation of binding keys. 40 //===----------------------------------------------------------------------===// 41 42 namespace { 43 class BindingKey { 44 public: 45 enum Kind { Default = 0x0, Direct = 0x1 }; 46 private: 47 enum { Symbolic = 0x2 }; 48 49 llvm::PointerIntPair<const MemRegion *, 2> P; 50 uint64_t Data; 51 52 /// Create a key for a binding to region \p r, which has a symbolic offset 53 /// from region \p Base. 54 explicit BindingKey(const SubRegion *r, const SubRegion *Base, Kind k) 55 : P(r, k | Symbolic), Data(reinterpret_cast<uintptr_t>(Base)) { 56 assert(r && Base && "Must have known regions."); 57 assert(getConcreteOffsetRegion() == Base && "Failed to store base region"); 58 } 59 60 /// Create a key for a binding at \p offset from base region \p r. 61 explicit BindingKey(const MemRegion *r, uint64_t offset, Kind k) 62 : P(r, k), Data(offset) { 63 assert(r && "Must have known regions."); 64 assert(getOffset() == offset && "Failed to store offset"); 65 assert((r == r->getBaseRegion() || isa<ObjCIvarRegion>(r) || 66 isa <CXXDerivedObjectRegion>(r)) && 67 "Not a base"); 68 } 69 public: 70 71 bool isDirect() const { return P.getInt() & Direct; } 72 bool hasSymbolicOffset() const { return P.getInt() & Symbolic; } 73 74 const MemRegion *getRegion() const { return P.getPointer(); } 75 uint64_t getOffset() const { 76 assert(!hasSymbolicOffset()); 77 return Data; 78 } 79 80 const SubRegion *getConcreteOffsetRegion() const { 81 assert(hasSymbolicOffset()); 82 return reinterpret_cast<const SubRegion *>(static_cast<uintptr_t>(Data)); 83 } 84 85 const MemRegion *getBaseRegion() const { 86 if (hasSymbolicOffset()) 87 return getConcreteOffsetRegion()->getBaseRegion(); 88 return getRegion()->getBaseRegion(); 89 } 90 91 void Profile(llvm::FoldingSetNodeID& ID) const { 92 ID.AddPointer(P.getOpaqueValue()); 93 ID.AddInteger(Data); 94 } 95 96 static BindingKey Make(const MemRegion *R, Kind k); 97 98 bool operator<(const BindingKey &X) const { 99 if (P.getOpaqueValue() < X.P.getOpaqueValue()) 100 return true; 101 if (P.getOpaqueValue() > X.P.getOpaqueValue()) 102 return false; 103 return Data < X.Data; 104 } 105 106 bool operator==(const BindingKey &X) const { 107 return P.getOpaqueValue() == X.P.getOpaqueValue() && 108 Data == X.Data; 109 } 110 111 void dump() const; 112 }; 113 } // end anonymous namespace 114 115 BindingKey BindingKey::Make(const MemRegion *R, Kind k) { 116 const RegionOffset &RO = R->getAsOffset(); 117 if (RO.hasSymbolicOffset()) 118 return BindingKey(cast<SubRegion>(R), cast<SubRegion>(RO.getRegion()), k); 119 120 return BindingKey(RO.getRegion(), RO.getOffset(), k); 121 } 122 123 namespace llvm { 124 static inline 125 raw_ostream &operator<<(raw_ostream &os, BindingKey K) { 126 os << '(' << K.getRegion(); 127 if (!K.hasSymbolicOffset()) 128 os << ',' << K.getOffset(); 129 os << ',' << (K.isDirect() ? "direct" : "default") 130 << ')'; 131 return os; 132 } 133 134 template <typename T> struct isPodLike; 135 template <> struct isPodLike<BindingKey> { 136 static const bool value = true; 137 }; 138 } // end llvm namespace 139 140 #ifndef NDEBUG 141 LLVM_DUMP_METHOD void BindingKey::dump() const { llvm::errs() << *this; } 142 #endif 143 144 //===----------------------------------------------------------------------===// 145 // Actual Store type. 146 //===----------------------------------------------------------------------===// 147 148 typedef llvm::ImmutableMap<BindingKey, SVal> ClusterBindings; 149 typedef llvm::ImmutableMapRef<BindingKey, SVal> ClusterBindingsRef; 150 typedef std::pair<BindingKey, SVal> BindingPair; 151 152 typedef llvm::ImmutableMap<const MemRegion *, ClusterBindings> 153 RegionBindings; 154 155 namespace { 156 class RegionBindingsRef : public llvm::ImmutableMapRef<const MemRegion *, 157 ClusterBindings> { 158 ClusterBindings::Factory *CBFactory; 159 160 public: 161 typedef llvm::ImmutableMapRef<const MemRegion *, ClusterBindings> 162 ParentTy; 163 164 RegionBindingsRef(ClusterBindings::Factory &CBFactory, 165 const RegionBindings::TreeTy *T, 166 RegionBindings::TreeTy::Factory *F) 167 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(T, F), 168 CBFactory(&CBFactory) {} 169 170 RegionBindingsRef(const ParentTy &P, ClusterBindings::Factory &CBFactory) 171 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(P), 172 CBFactory(&CBFactory) {} 173 174 RegionBindingsRef add(key_type_ref K, data_type_ref D) const { 175 return RegionBindingsRef(static_cast<const ParentTy *>(this)->add(K, D), 176 *CBFactory); 177 } 178 179 RegionBindingsRef remove(key_type_ref K) const { 180 return RegionBindingsRef(static_cast<const ParentTy *>(this)->remove(K), 181 *CBFactory); 182 } 183 184 RegionBindingsRef addBinding(BindingKey K, SVal V) const; 185 186 RegionBindingsRef addBinding(const MemRegion *R, 187 BindingKey::Kind k, SVal V) const; 188 189 const SVal *lookup(BindingKey K) const; 190 const SVal *lookup(const MemRegion *R, BindingKey::Kind k) const; 191 using llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>::lookup; 192 193 RegionBindingsRef removeBinding(BindingKey K); 194 195 RegionBindingsRef removeBinding(const MemRegion *R, 196 BindingKey::Kind k); 197 198 RegionBindingsRef removeBinding(const MemRegion *R) { 199 return removeBinding(R, BindingKey::Direct). 200 removeBinding(R, BindingKey::Default); 201 } 202 203 Optional<SVal> getDirectBinding(const MemRegion *R) const; 204 205 /// getDefaultBinding - Returns an SVal* representing an optional default 206 /// binding associated with a region and its subregions. 207 Optional<SVal> getDefaultBinding(const MemRegion *R) const; 208 209 /// Return the internal tree as a Store. 210 Store asStore() const { 211 return asImmutableMap().getRootWithoutRetain(); 212 } 213 214 void dump(raw_ostream &OS, const char *nl) const { 215 for (iterator I = begin(), E = end(); I != E; ++I) { 216 const ClusterBindings &Cluster = I.getData(); 217 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 218 CI != CE; ++CI) { 219 OS << ' ' << CI.getKey() << " : " << CI.getData() << nl; 220 } 221 OS << nl; 222 } 223 } 224 225 LLVM_DUMP_METHOD void dump() const { dump(llvm::errs(), "\n"); } 226 }; 227 } // end anonymous namespace 228 229 typedef const RegionBindingsRef& RegionBindingsConstRef; 230 231 Optional<SVal> RegionBindingsRef::getDirectBinding(const MemRegion *R) const { 232 return Optional<SVal>::create(lookup(R, BindingKey::Direct)); 233 } 234 235 Optional<SVal> RegionBindingsRef::getDefaultBinding(const MemRegion *R) const { 236 return Optional<SVal>::create(lookup(R, BindingKey::Default)); 237 } 238 239 RegionBindingsRef RegionBindingsRef::addBinding(BindingKey K, SVal V) const { 240 const MemRegion *Base = K.getBaseRegion(); 241 242 const ClusterBindings *ExistingCluster = lookup(Base); 243 ClusterBindings Cluster = 244 (ExistingCluster ? *ExistingCluster : CBFactory->getEmptyMap()); 245 246 ClusterBindings NewCluster = CBFactory->add(Cluster, K, V); 247 return add(Base, NewCluster); 248 } 249 250 251 RegionBindingsRef RegionBindingsRef::addBinding(const MemRegion *R, 252 BindingKey::Kind k, 253 SVal V) const { 254 return addBinding(BindingKey::Make(R, k), V); 255 } 256 257 const SVal *RegionBindingsRef::lookup(BindingKey K) const { 258 const ClusterBindings *Cluster = lookup(K.getBaseRegion()); 259 if (!Cluster) 260 return nullptr; 261 return Cluster->lookup(K); 262 } 263 264 const SVal *RegionBindingsRef::lookup(const MemRegion *R, 265 BindingKey::Kind k) const { 266 return lookup(BindingKey::Make(R, k)); 267 } 268 269 RegionBindingsRef RegionBindingsRef::removeBinding(BindingKey K) { 270 const MemRegion *Base = K.getBaseRegion(); 271 const ClusterBindings *Cluster = lookup(Base); 272 if (!Cluster) 273 return *this; 274 275 ClusterBindings NewCluster = CBFactory->remove(*Cluster, K); 276 if (NewCluster.isEmpty()) 277 return remove(Base); 278 return add(Base, NewCluster); 279 } 280 281 RegionBindingsRef RegionBindingsRef::removeBinding(const MemRegion *R, 282 BindingKey::Kind k){ 283 return removeBinding(BindingKey::Make(R, k)); 284 } 285 286 //===----------------------------------------------------------------------===// 287 // Fine-grained control of RegionStoreManager. 288 //===----------------------------------------------------------------------===// 289 290 namespace { 291 struct minimal_features_tag {}; 292 struct maximal_features_tag {}; 293 294 class RegionStoreFeatures { 295 bool SupportsFields; 296 public: 297 RegionStoreFeatures(minimal_features_tag) : 298 SupportsFields(false) {} 299 300 RegionStoreFeatures(maximal_features_tag) : 301 SupportsFields(true) {} 302 303 void enableFields(bool t) { SupportsFields = t; } 304 305 bool supportsFields() const { return SupportsFields; } 306 }; 307 } 308 309 //===----------------------------------------------------------------------===// 310 // Main RegionStore logic. 311 //===----------------------------------------------------------------------===// 312 313 namespace { 314 class InvalidateRegionsWorker; 315 316 class RegionStoreManager : public StoreManager { 317 public: 318 const RegionStoreFeatures Features; 319 320 RegionBindings::Factory RBFactory; 321 mutable ClusterBindings::Factory CBFactory; 322 323 typedef std::vector<SVal> SValListTy; 324 private: 325 typedef llvm::DenseMap<const LazyCompoundValData *, 326 SValListTy> LazyBindingsMapTy; 327 LazyBindingsMapTy LazyBindingsMap; 328 329 /// The largest number of fields a struct can have and still be 330 /// considered "small". 331 /// 332 /// This is currently used to decide whether or not it is worth "forcing" a 333 /// LazyCompoundVal on bind. 334 /// 335 /// This is controlled by 'region-store-small-struct-limit' option. 336 /// To disable all small-struct-dependent behavior, set the option to "0". 337 unsigned SmallStructLimit; 338 339 /// A helper used to populate the work list with the given set of 340 /// regions. 341 void populateWorkList(InvalidateRegionsWorker &W, 342 ArrayRef<SVal> Values, 343 InvalidatedRegions *TopLevelRegions); 344 345 public: 346 RegionStoreManager(ProgramStateManager& mgr, const RegionStoreFeatures &f) 347 : StoreManager(mgr), Features(f), 348 RBFactory(mgr.getAllocator()), CBFactory(mgr.getAllocator()), 349 SmallStructLimit(0) { 350 SubEngine &Eng = StateMgr.getOwningEngine(); 351 AnalyzerOptions &Options = Eng.getAnalysisManager().options; 352 SmallStructLimit = Options.RegionStoreSmallStructLimit; 353 } 354 355 356 /// setImplicitDefaultValue - Set the default binding for the provided 357 /// MemRegion to the value implicitly defined for compound literals when 358 /// the value is not specified. 359 RegionBindingsRef setImplicitDefaultValue(RegionBindingsConstRef B, 360 const MemRegion *R, QualType T); 361 362 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 363 /// type. 'Array' represents the lvalue of the array being decayed 364 /// to a pointer, and the returned SVal represents the decayed 365 /// version of that lvalue (i.e., a pointer to the first element of 366 /// the array). This is called by ExprEngine when evaluating 367 /// casts from arrays to pointers. 368 SVal ArrayToPointer(Loc Array, QualType ElementTy) override; 369 370 StoreRef getInitialStore(const LocationContext *InitLoc) override { 371 return StoreRef(RBFactory.getEmptyMap().getRootWithoutRetain(), *this); 372 } 373 374 //===-------------------------------------------------------------------===// 375 // Binding values to regions. 376 //===-------------------------------------------------------------------===// 377 RegionBindingsRef invalidateGlobalRegion(MemRegion::Kind K, 378 const Expr *Ex, 379 unsigned Count, 380 const LocationContext *LCtx, 381 RegionBindingsRef B, 382 InvalidatedRegions *Invalidated); 383 384 StoreRef invalidateRegions(Store store, 385 ArrayRef<SVal> Values, 386 const Expr *E, unsigned Count, 387 const LocationContext *LCtx, 388 const CallEvent *Call, 389 InvalidatedSymbols &IS, 390 RegionAndSymbolInvalidationTraits &ITraits, 391 InvalidatedRegions *Invalidated, 392 InvalidatedRegions *InvalidatedTopLevel) override; 393 394 bool scanReachableSymbols(Store S, const MemRegion *R, 395 ScanReachableSymbols &Callbacks) override; 396 397 RegionBindingsRef removeSubRegionBindings(RegionBindingsConstRef B, 398 const SubRegion *R); 399 400 public: // Part of public interface to class. 401 402 StoreRef Bind(Store store, Loc LV, SVal V) override { 403 return StoreRef(bind(getRegionBindings(store), LV, V).asStore(), *this); 404 } 405 406 RegionBindingsRef bind(RegionBindingsConstRef B, Loc LV, SVal V); 407 408 // BindDefaultInitial is only used to initialize a region with 409 // a default value. 410 StoreRef BindDefaultInitial(Store store, const MemRegion *R, 411 SVal V) override { 412 RegionBindingsRef B = getRegionBindings(store); 413 // Use other APIs when you have to wipe the region that was initialized 414 // earlier. 415 assert(!(B.getDefaultBinding(R) || B.getDirectBinding(R)) && 416 "Double initialization!"); 417 B = B.addBinding(BindingKey::Make(R, BindingKey::Default), V); 418 return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this); 419 } 420 421 // BindDefaultZero is used for zeroing constructors that may accidentally 422 // overwrite existing bindings. 423 StoreRef BindDefaultZero(Store store, const MemRegion *R) override { 424 // FIXME: The offsets of empty bases can be tricky because of 425 // of the so called "empty base class optimization". 426 // If a base class has been optimized out 427 // we should not try to create a binding, otherwise we should. 428 // Unfortunately, at the moment ASTRecordLayout doesn't expose 429 // the actual sizes of the empty bases 430 // and trying to infer them from offsets/alignments 431 // seems to be error-prone and non-trivial because of the trailing padding. 432 // As a temporary mitigation we don't create bindings for empty bases. 433 if (const auto *BR = dyn_cast<CXXBaseObjectRegion>(R)) 434 if (BR->getDecl()->isEmpty()) 435 return StoreRef(store, *this); 436 437 RegionBindingsRef B = getRegionBindings(store); 438 SVal V = svalBuilder.makeZeroVal(Ctx.CharTy); 439 B = removeSubRegionBindings(B, cast<SubRegion>(R)); 440 B = B.addBinding(BindingKey::Make(R, BindingKey::Default), V); 441 return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this); 442 } 443 444 /// Attempt to extract the fields of \p LCV and bind them to the struct region 445 /// \p R. 446 /// 447 /// This path is used when it seems advantageous to "force" loading the values 448 /// within a LazyCompoundVal to bind memberwise to the struct region, rather 449 /// than using a Default binding at the base of the entire region. This is a 450 /// heuristic attempting to avoid building long chains of LazyCompoundVals. 451 /// 452 /// \returns The updated store bindings, or \c None if binding non-lazily 453 /// would be too expensive. 454 Optional<RegionBindingsRef> tryBindSmallStruct(RegionBindingsConstRef B, 455 const TypedValueRegion *R, 456 const RecordDecl *RD, 457 nonloc::LazyCompoundVal LCV); 458 459 /// BindStruct - Bind a compound value to a structure. 460 RegionBindingsRef bindStruct(RegionBindingsConstRef B, 461 const TypedValueRegion* R, SVal V); 462 463 /// BindVector - Bind a compound value to a vector. 464 RegionBindingsRef bindVector(RegionBindingsConstRef B, 465 const TypedValueRegion* R, SVal V); 466 467 RegionBindingsRef bindArray(RegionBindingsConstRef B, 468 const TypedValueRegion* R, 469 SVal V); 470 471 /// Clears out all bindings in the given region and assigns a new value 472 /// as a Default binding. 473 RegionBindingsRef bindAggregate(RegionBindingsConstRef B, 474 const TypedRegion *R, 475 SVal DefaultVal); 476 477 /// Create a new store with the specified binding removed. 478 /// \param ST the original store, that is the basis for the new store. 479 /// \param L the location whose binding should be removed. 480 StoreRef killBinding(Store ST, Loc L) override; 481 482 void incrementReferenceCount(Store store) override { 483 getRegionBindings(store).manualRetain(); 484 } 485 486 /// If the StoreManager supports it, decrement the reference count of 487 /// the specified Store object. If the reference count hits 0, the memory 488 /// associated with the object is recycled. 489 void decrementReferenceCount(Store store) override { 490 getRegionBindings(store).manualRelease(); 491 } 492 493 bool includedInBindings(Store store, const MemRegion *region) const override; 494 495 /// Return the value bound to specified location in a given state. 496 /// 497 /// The high level logic for this method is this: 498 /// getBinding (L) 499 /// if L has binding 500 /// return L's binding 501 /// else if L is in killset 502 /// return unknown 503 /// else 504 /// if L is on stack or heap 505 /// return undefined 506 /// else 507 /// return symbolic 508 SVal getBinding(Store S, Loc L, QualType T) override { 509 return getBinding(getRegionBindings(S), L, T); 510 } 511 512 Optional<SVal> getDefaultBinding(Store S, const MemRegion *R) override { 513 RegionBindingsRef B = getRegionBindings(S); 514 // Default bindings are always applied over a base region so look up the 515 // base region's default binding, otherwise the lookup will fail when R 516 // is at an offset from R->getBaseRegion(). 517 return B.getDefaultBinding(R->getBaseRegion()); 518 } 519 520 SVal getBinding(RegionBindingsConstRef B, Loc L, QualType T = QualType()); 521 522 SVal getBindingForElement(RegionBindingsConstRef B, const ElementRegion *R); 523 524 SVal getBindingForField(RegionBindingsConstRef B, const FieldRegion *R); 525 526 SVal getBindingForObjCIvar(RegionBindingsConstRef B, const ObjCIvarRegion *R); 527 528 SVal getBindingForVar(RegionBindingsConstRef B, const VarRegion *R); 529 530 SVal getBindingForLazySymbol(const TypedValueRegion *R); 531 532 SVal getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 533 const TypedValueRegion *R, 534 QualType Ty); 535 536 SVal getLazyBinding(const SubRegion *LazyBindingRegion, 537 RegionBindingsRef LazyBinding); 538 539 /// Get bindings for the values in a struct and return a CompoundVal, used 540 /// when doing struct copy: 541 /// struct s x, y; 542 /// x = y; 543 /// y's value is retrieved by this method. 544 SVal getBindingForStruct(RegionBindingsConstRef B, const TypedValueRegion *R); 545 SVal getBindingForArray(RegionBindingsConstRef B, const TypedValueRegion *R); 546 NonLoc createLazyBinding(RegionBindingsConstRef B, const TypedValueRegion *R); 547 548 /// Used to lazily generate derived symbols for bindings that are defined 549 /// implicitly by default bindings in a super region. 550 /// 551 /// Note that callers may need to specially handle LazyCompoundVals, which 552 /// are returned as is in case the caller needs to treat them differently. 553 Optional<SVal> getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 554 const MemRegion *superR, 555 const TypedValueRegion *R, 556 QualType Ty); 557 558 /// Get the state and region whose binding this region \p R corresponds to. 559 /// 560 /// If there is no lazy binding for \p R, the returned value will have a null 561 /// \c second. Note that a null pointer can represents a valid Store. 562 std::pair<Store, const SubRegion *> 563 findLazyBinding(RegionBindingsConstRef B, const SubRegion *R, 564 const SubRegion *originalRegion); 565 566 /// Returns the cached set of interesting SVals contained within a lazy 567 /// binding. 568 /// 569 /// The precise value of "interesting" is determined for the purposes of 570 /// RegionStore's internal analysis. It must always contain all regions and 571 /// symbols, but may omit constants and other kinds of SVal. 572 const SValListTy &getInterestingValues(nonloc::LazyCompoundVal LCV); 573 574 //===------------------------------------------------------------------===// 575 // State pruning. 576 //===------------------------------------------------------------------===// 577 578 /// removeDeadBindings - Scans the RegionStore of 'state' for dead values. 579 /// It returns a new Store with these values removed. 580 StoreRef removeDeadBindings(Store store, const StackFrameContext *LCtx, 581 SymbolReaper& SymReaper) override; 582 583 //===------------------------------------------------------------------===// 584 // Region "extents". 585 //===------------------------------------------------------------------===// 586 587 // FIXME: This method will soon be eliminated; see the note in Store.h. 588 DefinedOrUnknownSVal getSizeInElements(ProgramStateRef state, 589 const MemRegion* R, 590 QualType EleTy) override; 591 592 //===------------------------------------------------------------------===// 593 // Utility methods. 594 //===------------------------------------------------------------------===// 595 596 RegionBindingsRef getRegionBindings(Store store) const { 597 return RegionBindingsRef(CBFactory, 598 static_cast<const RegionBindings::TreeTy*>(store), 599 RBFactory.getTreeFactory()); 600 } 601 602 void print(Store store, raw_ostream &Out, const char* nl) override; 603 604 void iterBindings(Store store, BindingsHandler& f) override { 605 RegionBindingsRef B = getRegionBindings(store); 606 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 607 const ClusterBindings &Cluster = I.getData(); 608 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 609 CI != CE; ++CI) { 610 const BindingKey &K = CI.getKey(); 611 if (!K.isDirect()) 612 continue; 613 if (const SubRegion *R = dyn_cast<SubRegion>(K.getRegion())) { 614 // FIXME: Possibly incorporate the offset? 615 if (!f.HandleBinding(*this, store, R, CI.getData())) 616 return; 617 } 618 } 619 } 620 } 621 }; 622 623 } // end anonymous namespace 624 625 //===----------------------------------------------------------------------===// 626 // RegionStore creation. 627 //===----------------------------------------------------------------------===// 628 629 std::unique_ptr<StoreManager> 630 ento::CreateRegionStoreManager(ProgramStateManager &StMgr) { 631 RegionStoreFeatures F = maximal_features_tag(); 632 return llvm::make_unique<RegionStoreManager>(StMgr, F); 633 } 634 635 std::unique_ptr<StoreManager> 636 ento::CreateFieldsOnlyRegionStoreManager(ProgramStateManager &StMgr) { 637 RegionStoreFeatures F = minimal_features_tag(); 638 F.enableFields(true); 639 return llvm::make_unique<RegionStoreManager>(StMgr, F); 640 } 641 642 643 //===----------------------------------------------------------------------===// 644 // Region Cluster analysis. 645 //===----------------------------------------------------------------------===// 646 647 namespace { 648 /// Used to determine which global regions are automatically included in the 649 /// initial worklist of a ClusterAnalysis. 650 enum GlobalsFilterKind { 651 /// Don't include any global regions. 652 GFK_None, 653 /// Only include system globals. 654 GFK_SystemOnly, 655 /// Include all global regions. 656 GFK_All 657 }; 658 659 template <typename DERIVED> 660 class ClusterAnalysis { 661 protected: 662 typedef llvm::DenseMap<const MemRegion *, const ClusterBindings *> ClusterMap; 663 typedef const MemRegion * WorkListElement; 664 typedef SmallVector<WorkListElement, 10> WorkList; 665 666 llvm::SmallPtrSet<const ClusterBindings *, 16> Visited; 667 668 WorkList WL; 669 670 RegionStoreManager &RM; 671 ASTContext &Ctx; 672 SValBuilder &svalBuilder; 673 674 RegionBindingsRef B; 675 676 677 protected: 678 const ClusterBindings *getCluster(const MemRegion *R) { 679 return B.lookup(R); 680 } 681 682 /// Returns true if all clusters in the given memspace should be initially 683 /// included in the cluster analysis. Subclasses may provide their 684 /// own implementation. 685 bool includeEntireMemorySpace(const MemRegion *Base) { 686 return false; 687 } 688 689 public: 690 ClusterAnalysis(RegionStoreManager &rm, ProgramStateManager &StateMgr, 691 RegionBindingsRef b) 692 : RM(rm), Ctx(StateMgr.getContext()), 693 svalBuilder(StateMgr.getSValBuilder()), B(std::move(b)) {} 694 695 RegionBindingsRef getRegionBindings() const { return B; } 696 697 bool isVisited(const MemRegion *R) { 698 return Visited.count(getCluster(R)); 699 } 700 701 void GenerateClusters() { 702 // Scan the entire set of bindings and record the region clusters. 703 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); 704 RI != RE; ++RI){ 705 const MemRegion *Base = RI.getKey(); 706 707 const ClusterBindings &Cluster = RI.getData(); 708 assert(!Cluster.isEmpty() && "Empty clusters should be removed"); 709 static_cast<DERIVED*>(this)->VisitAddedToCluster(Base, Cluster); 710 711 // If the base's memspace should be entirely invalidated, add the cluster 712 // to the workspace up front. 713 if (static_cast<DERIVED*>(this)->includeEntireMemorySpace(Base)) 714 AddToWorkList(WorkListElement(Base), &Cluster); 715 } 716 } 717 718 bool AddToWorkList(WorkListElement E, const ClusterBindings *C) { 719 if (C && !Visited.insert(C).second) 720 return false; 721 WL.push_back(E); 722 return true; 723 } 724 725 bool AddToWorkList(const MemRegion *R) { 726 return static_cast<DERIVED*>(this)->AddToWorkList(R); 727 } 728 729 void RunWorkList() { 730 while (!WL.empty()) { 731 WorkListElement E = WL.pop_back_val(); 732 const MemRegion *BaseR = E; 733 734 static_cast<DERIVED*>(this)->VisitCluster(BaseR, getCluster(BaseR)); 735 } 736 } 737 738 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C) {} 739 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C) {} 740 741 void VisitCluster(const MemRegion *BaseR, const ClusterBindings *C, 742 bool Flag) { 743 static_cast<DERIVED*>(this)->VisitCluster(BaseR, C); 744 } 745 }; 746 } 747 748 //===----------------------------------------------------------------------===// 749 // Binding invalidation. 750 //===----------------------------------------------------------------------===// 751 752 bool RegionStoreManager::scanReachableSymbols(Store S, const MemRegion *R, 753 ScanReachableSymbols &Callbacks) { 754 assert(R == R->getBaseRegion() && "Should only be called for base regions"); 755 RegionBindingsRef B = getRegionBindings(S); 756 const ClusterBindings *Cluster = B.lookup(R); 757 758 if (!Cluster) 759 return true; 760 761 for (ClusterBindings::iterator RI = Cluster->begin(), RE = Cluster->end(); 762 RI != RE; ++RI) { 763 if (!Callbacks.scan(RI.getData())) 764 return false; 765 } 766 767 return true; 768 } 769 770 static inline bool isUnionField(const FieldRegion *FR) { 771 return FR->getDecl()->getParent()->isUnion(); 772 } 773 774 typedef SmallVector<const FieldDecl *, 8> FieldVector; 775 776 static void getSymbolicOffsetFields(BindingKey K, FieldVector &Fields) { 777 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 778 779 const MemRegion *Base = K.getConcreteOffsetRegion(); 780 const MemRegion *R = K.getRegion(); 781 782 while (R != Base) { 783 if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) 784 if (!isUnionField(FR)) 785 Fields.push_back(FR->getDecl()); 786 787 R = cast<SubRegion>(R)->getSuperRegion(); 788 } 789 } 790 791 static bool isCompatibleWithFields(BindingKey K, const FieldVector &Fields) { 792 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 793 794 if (Fields.empty()) 795 return true; 796 797 FieldVector FieldsInBindingKey; 798 getSymbolicOffsetFields(K, FieldsInBindingKey); 799 800 ptrdiff_t Delta = FieldsInBindingKey.size() - Fields.size(); 801 if (Delta >= 0) 802 return std::equal(FieldsInBindingKey.begin() + Delta, 803 FieldsInBindingKey.end(), 804 Fields.begin()); 805 else 806 return std::equal(FieldsInBindingKey.begin(), FieldsInBindingKey.end(), 807 Fields.begin() - Delta); 808 } 809 810 /// Collects all bindings in \p Cluster that may refer to bindings within 811 /// \p Top. 812 /// 813 /// Each binding is a pair whose \c first is the key (a BindingKey) and whose 814 /// \c second is the value (an SVal). 815 /// 816 /// The \p IncludeAllDefaultBindings parameter specifies whether to include 817 /// default bindings that may extend beyond \p Top itself, e.g. if \p Top is 818 /// an aggregate within a larger aggregate with a default binding. 819 static void 820 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 821 SValBuilder &SVB, const ClusterBindings &Cluster, 822 const SubRegion *Top, BindingKey TopKey, 823 bool IncludeAllDefaultBindings) { 824 FieldVector FieldsInSymbolicSubregions; 825 if (TopKey.hasSymbolicOffset()) { 826 getSymbolicOffsetFields(TopKey, FieldsInSymbolicSubregions); 827 Top = TopKey.getConcreteOffsetRegion(); 828 TopKey = BindingKey::Make(Top, BindingKey::Default); 829 } 830 831 // Find the length (in bits) of the region being invalidated. 832 uint64_t Length = UINT64_MAX; 833 SVal Extent = Top->getExtent(SVB); 834 if (Optional<nonloc::ConcreteInt> ExtentCI = 835 Extent.getAs<nonloc::ConcreteInt>()) { 836 const llvm::APSInt &ExtentInt = ExtentCI->getValue(); 837 assert(ExtentInt.isNonNegative() || ExtentInt.isUnsigned()); 838 // Extents are in bytes but region offsets are in bits. Be careful! 839 Length = ExtentInt.getLimitedValue() * SVB.getContext().getCharWidth(); 840 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(Top)) { 841 if (FR->getDecl()->isBitField()) 842 Length = FR->getDecl()->getBitWidthValue(SVB.getContext()); 843 } 844 845 for (ClusterBindings::iterator I = Cluster.begin(), E = Cluster.end(); 846 I != E; ++I) { 847 BindingKey NextKey = I.getKey(); 848 if (NextKey.getRegion() == TopKey.getRegion()) { 849 // FIXME: This doesn't catch the case where we're really invalidating a 850 // region with a symbolic offset. Example: 851 // R: points[i].y 852 // Next: points[0].x 853 854 if (NextKey.getOffset() > TopKey.getOffset() && 855 NextKey.getOffset() - TopKey.getOffset() < Length) { 856 // Case 1: The next binding is inside the region we're invalidating. 857 // Include it. 858 Bindings.push_back(*I); 859 860 } else if (NextKey.getOffset() == TopKey.getOffset()) { 861 // Case 2: The next binding is at the same offset as the region we're 862 // invalidating. In this case, we need to leave default bindings alone, 863 // since they may be providing a default value for a regions beyond what 864 // we're invalidating. 865 // FIXME: This is probably incorrect; consider invalidating an outer 866 // struct whose first field is bound to a LazyCompoundVal. 867 if (IncludeAllDefaultBindings || NextKey.isDirect()) 868 Bindings.push_back(*I); 869 } 870 871 } else if (NextKey.hasSymbolicOffset()) { 872 const MemRegion *Base = NextKey.getConcreteOffsetRegion(); 873 if (Top->isSubRegionOf(Base) && Top != Base) { 874 // Case 3: The next key is symbolic and we just changed something within 875 // its concrete region. We don't know if the binding is still valid, so 876 // we'll be conservative and include it. 877 if (IncludeAllDefaultBindings || NextKey.isDirect()) 878 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 879 Bindings.push_back(*I); 880 } else if (const SubRegion *BaseSR = dyn_cast<SubRegion>(Base)) { 881 // Case 4: The next key is symbolic, but we changed a known 882 // super-region. In this case the binding is certainly included. 883 if (BaseSR->isSubRegionOf(Top)) 884 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 885 Bindings.push_back(*I); 886 } 887 } 888 } 889 } 890 891 static void 892 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 893 SValBuilder &SVB, const ClusterBindings &Cluster, 894 const SubRegion *Top, bool IncludeAllDefaultBindings) { 895 collectSubRegionBindings(Bindings, SVB, Cluster, Top, 896 BindingKey::Make(Top, BindingKey::Default), 897 IncludeAllDefaultBindings); 898 } 899 900 RegionBindingsRef 901 RegionStoreManager::removeSubRegionBindings(RegionBindingsConstRef B, 902 const SubRegion *Top) { 903 BindingKey TopKey = BindingKey::Make(Top, BindingKey::Default); 904 const MemRegion *ClusterHead = TopKey.getBaseRegion(); 905 906 if (Top == ClusterHead) { 907 // We can remove an entire cluster's bindings all in one go. 908 return B.remove(Top); 909 } 910 911 const ClusterBindings *Cluster = B.lookup(ClusterHead); 912 if (!Cluster) { 913 // If we're invalidating a region with a symbolic offset, we need to make 914 // sure we don't treat the base region as uninitialized anymore. 915 if (TopKey.hasSymbolicOffset()) { 916 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 917 return B.addBinding(Concrete, BindingKey::Default, UnknownVal()); 918 } 919 return B; 920 } 921 922 SmallVector<BindingPair, 32> Bindings; 923 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, Top, TopKey, 924 /*IncludeAllDefaultBindings=*/false); 925 926 ClusterBindingsRef Result(*Cluster, CBFactory); 927 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 928 E = Bindings.end(); 929 I != E; ++I) 930 Result = Result.remove(I->first); 931 932 // If we're invalidating a region with a symbolic offset, we need to make sure 933 // we don't treat the base region as uninitialized anymore. 934 // FIXME: This isn't very precise; see the example in 935 // collectSubRegionBindings. 936 if (TopKey.hasSymbolicOffset()) { 937 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 938 Result = Result.add(BindingKey::Make(Concrete, BindingKey::Default), 939 UnknownVal()); 940 } 941 942 if (Result.isEmpty()) 943 return B.remove(ClusterHead); 944 return B.add(ClusterHead, Result.asImmutableMap()); 945 } 946 947 namespace { 948 class InvalidateRegionsWorker : public ClusterAnalysis<InvalidateRegionsWorker> 949 { 950 const Expr *Ex; 951 unsigned Count; 952 const LocationContext *LCtx; 953 InvalidatedSymbols &IS; 954 RegionAndSymbolInvalidationTraits &ITraits; 955 StoreManager::InvalidatedRegions *Regions; 956 GlobalsFilterKind GlobalsFilter; 957 public: 958 InvalidateRegionsWorker(RegionStoreManager &rm, 959 ProgramStateManager &stateMgr, 960 RegionBindingsRef b, 961 const Expr *ex, unsigned count, 962 const LocationContext *lctx, 963 InvalidatedSymbols &is, 964 RegionAndSymbolInvalidationTraits &ITraitsIn, 965 StoreManager::InvalidatedRegions *r, 966 GlobalsFilterKind GFK) 967 : ClusterAnalysis<InvalidateRegionsWorker>(rm, stateMgr, b), 968 Ex(ex), Count(count), LCtx(lctx), IS(is), ITraits(ITraitsIn), Regions(r), 969 GlobalsFilter(GFK) {} 970 971 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 972 void VisitBinding(SVal V); 973 974 using ClusterAnalysis::AddToWorkList; 975 976 bool AddToWorkList(const MemRegion *R); 977 978 /// Returns true if all clusters in the memory space for \p Base should be 979 /// be invalidated. 980 bool includeEntireMemorySpace(const MemRegion *Base); 981 982 /// Returns true if the memory space of the given region is one of the global 983 /// regions specially included at the start of invalidation. 984 bool isInitiallyIncludedGlobalRegion(const MemRegion *R); 985 }; 986 } 987 988 bool InvalidateRegionsWorker::AddToWorkList(const MemRegion *R) { 989 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 990 R, RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 991 const MemRegion *BaseR = doNotInvalidateSuperRegion ? R : R->getBaseRegion(); 992 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 993 } 994 995 void InvalidateRegionsWorker::VisitBinding(SVal V) { 996 // A symbol? Mark it touched by the invalidation. 997 if (SymbolRef Sym = V.getAsSymbol()) 998 IS.insert(Sym); 999 1000 if (const MemRegion *R = V.getAsRegion()) { 1001 AddToWorkList(R); 1002 return; 1003 } 1004 1005 // Is it a LazyCompoundVal? All references get invalidated as well. 1006 if (Optional<nonloc::LazyCompoundVal> LCS = 1007 V.getAs<nonloc::LazyCompoundVal>()) { 1008 1009 const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS); 1010 1011 for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(), 1012 E = Vals.end(); 1013 I != E; ++I) 1014 VisitBinding(*I); 1015 1016 return; 1017 } 1018 } 1019 1020 void InvalidateRegionsWorker::VisitCluster(const MemRegion *baseR, 1021 const ClusterBindings *C) { 1022 1023 bool PreserveRegionsContents = 1024 ITraits.hasTrait(baseR, 1025 RegionAndSymbolInvalidationTraits::TK_PreserveContents); 1026 1027 if (C) { 1028 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) 1029 VisitBinding(I.getData()); 1030 1031 // Invalidate regions contents. 1032 if (!PreserveRegionsContents) 1033 B = B.remove(baseR); 1034 } 1035 1036 if (const auto *TO = dyn_cast<TypedValueRegion>(baseR)) { 1037 if (const auto *RD = TO->getValueType()->getAsCXXRecordDecl()) { 1038 1039 // Lambdas can affect all static local variables without explicitly 1040 // capturing those. 1041 // We invalidate all static locals referenced inside the lambda body. 1042 if (RD->isLambda() && RD->getLambdaCallOperator()->getBody()) { 1043 using namespace ast_matchers; 1044 1045 const char *DeclBind = "DeclBind"; 1046 StatementMatcher RefToStatic = stmt(hasDescendant(declRefExpr( 1047 to(varDecl(hasStaticStorageDuration()).bind(DeclBind))))); 1048 auto Matches = 1049 match(RefToStatic, *RD->getLambdaCallOperator()->getBody(), 1050 RD->getASTContext()); 1051 1052 for (BoundNodes &Match : Matches) { 1053 auto *VD = Match.getNodeAs<VarDecl>(DeclBind); 1054 const VarRegion *ToInvalidate = 1055 RM.getRegionManager().getVarRegion(VD, LCtx); 1056 AddToWorkList(ToInvalidate); 1057 } 1058 } 1059 } 1060 } 1061 1062 // BlockDataRegion? If so, invalidate captured variables that are passed 1063 // by reference. 1064 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(baseR)) { 1065 for (BlockDataRegion::referenced_vars_iterator 1066 BI = BR->referenced_vars_begin(), BE = BR->referenced_vars_end() ; 1067 BI != BE; ++BI) { 1068 const VarRegion *VR = BI.getCapturedRegion(); 1069 const VarDecl *VD = VR->getDecl(); 1070 if (VD->hasAttr<BlocksAttr>() || !VD->hasLocalStorage()) { 1071 AddToWorkList(VR); 1072 } 1073 else if (Loc::isLocType(VR->getValueType())) { 1074 // Map the current bindings to a Store to retrieve the value 1075 // of the binding. If that binding itself is a region, we should 1076 // invalidate that region. This is because a block may capture 1077 // a pointer value, but the thing pointed by that pointer may 1078 // get invalidated. 1079 SVal V = RM.getBinding(B, loc::MemRegionVal(VR)); 1080 if (Optional<Loc> L = V.getAs<Loc>()) { 1081 if (const MemRegion *LR = L->getAsRegion()) 1082 AddToWorkList(LR); 1083 } 1084 } 1085 } 1086 return; 1087 } 1088 1089 // Symbolic region? 1090 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) 1091 IS.insert(SR->getSymbol()); 1092 1093 // Nothing else should be done in the case when we preserve regions context. 1094 if (PreserveRegionsContents) 1095 return; 1096 1097 // Otherwise, we have a normal data region. Record that we touched the region. 1098 if (Regions) 1099 Regions->push_back(baseR); 1100 1101 if (isa<AllocaRegion>(baseR) || isa<SymbolicRegion>(baseR)) { 1102 // Invalidate the region by setting its default value to 1103 // conjured symbol. The type of the symbol is irrelevant. 1104 DefinedOrUnknownSVal V = 1105 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, Ctx.IntTy, Count); 1106 B = B.addBinding(baseR, BindingKey::Default, V); 1107 return; 1108 } 1109 1110 if (!baseR->isBoundable()) 1111 return; 1112 1113 const TypedValueRegion *TR = cast<TypedValueRegion>(baseR); 1114 QualType T = TR->getValueType(); 1115 1116 if (isInitiallyIncludedGlobalRegion(baseR)) { 1117 // If the region is a global and we are invalidating all globals, 1118 // erasing the entry is good enough. This causes all globals to be lazily 1119 // symbolicated from the same base symbol. 1120 return; 1121 } 1122 1123 if (T->isRecordType()) { 1124 // Invalidate the region by setting its default value to 1125 // conjured symbol. The type of the symbol is irrelevant. 1126 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1127 Ctx.IntTy, Count); 1128 B = B.addBinding(baseR, BindingKey::Default, V); 1129 return; 1130 } 1131 1132 if (const ArrayType *AT = Ctx.getAsArrayType(T)) { 1133 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 1134 baseR, 1135 RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 1136 1137 if (doNotInvalidateSuperRegion) { 1138 // We are not doing blank invalidation of the whole array region so we 1139 // have to manually invalidate each elements. 1140 Optional<uint64_t> NumElements; 1141 1142 // Compute lower and upper offsets for region within array. 1143 if (const ConstantArrayType *CAT = dyn_cast<ConstantArrayType>(AT)) 1144 NumElements = CAT->getSize().getZExtValue(); 1145 if (!NumElements) // We are not dealing with a constant size array 1146 goto conjure_default; 1147 QualType ElementTy = AT->getElementType(); 1148 uint64_t ElemSize = Ctx.getTypeSize(ElementTy); 1149 const RegionOffset &RO = baseR->getAsOffset(); 1150 const MemRegion *SuperR = baseR->getBaseRegion(); 1151 if (RO.hasSymbolicOffset()) { 1152 // If base region has a symbolic offset, 1153 // we revert to invalidating the super region. 1154 if (SuperR) 1155 AddToWorkList(SuperR); 1156 goto conjure_default; 1157 } 1158 1159 uint64_t LowerOffset = RO.getOffset(); 1160 uint64_t UpperOffset = LowerOffset + *NumElements * ElemSize; 1161 bool UpperOverflow = UpperOffset < LowerOffset; 1162 1163 // Invalidate regions which are within array boundaries, 1164 // or have a symbolic offset. 1165 if (!SuperR) 1166 goto conjure_default; 1167 1168 const ClusterBindings *C = B.lookup(SuperR); 1169 if (!C) 1170 goto conjure_default; 1171 1172 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; 1173 ++I) { 1174 const BindingKey &BK = I.getKey(); 1175 Optional<uint64_t> ROffset = 1176 BK.hasSymbolicOffset() ? Optional<uint64_t>() : BK.getOffset(); 1177 1178 // Check offset is not symbolic and within array's boundaries. 1179 // Handles arrays of 0 elements and of 0-sized elements as well. 1180 if (!ROffset || 1181 ((*ROffset >= LowerOffset && *ROffset < UpperOffset) || 1182 (UpperOverflow && 1183 (*ROffset >= LowerOffset || *ROffset < UpperOffset)) || 1184 (LowerOffset == UpperOffset && *ROffset == LowerOffset))) { 1185 B = B.removeBinding(I.getKey()); 1186 // Bound symbolic regions need to be invalidated for dead symbol 1187 // detection. 1188 SVal V = I.getData(); 1189 const MemRegion *R = V.getAsRegion(); 1190 if (R && isa<SymbolicRegion>(R)) 1191 VisitBinding(V); 1192 } 1193 } 1194 } 1195 conjure_default: 1196 // Set the default value of the array to conjured symbol. 1197 DefinedOrUnknownSVal V = 1198 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1199 AT->getElementType(), Count); 1200 B = B.addBinding(baseR, BindingKey::Default, V); 1201 return; 1202 } 1203 1204 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1205 T,Count); 1206 assert(SymbolManager::canSymbolicate(T) || V.isUnknown()); 1207 B = B.addBinding(baseR, BindingKey::Direct, V); 1208 } 1209 1210 bool InvalidateRegionsWorker::isInitiallyIncludedGlobalRegion( 1211 const MemRegion *R) { 1212 switch (GlobalsFilter) { 1213 case GFK_None: 1214 return false; 1215 case GFK_SystemOnly: 1216 return isa<GlobalSystemSpaceRegion>(R->getMemorySpace()); 1217 case GFK_All: 1218 return isa<NonStaticGlobalSpaceRegion>(R->getMemorySpace()); 1219 } 1220 1221 llvm_unreachable("unknown globals filter"); 1222 } 1223 1224 bool InvalidateRegionsWorker::includeEntireMemorySpace(const MemRegion *Base) { 1225 if (isInitiallyIncludedGlobalRegion(Base)) 1226 return true; 1227 1228 const MemSpaceRegion *MemSpace = Base->getMemorySpace(); 1229 return ITraits.hasTrait(MemSpace, 1230 RegionAndSymbolInvalidationTraits::TK_EntireMemSpace); 1231 } 1232 1233 RegionBindingsRef 1234 RegionStoreManager::invalidateGlobalRegion(MemRegion::Kind K, 1235 const Expr *Ex, 1236 unsigned Count, 1237 const LocationContext *LCtx, 1238 RegionBindingsRef B, 1239 InvalidatedRegions *Invalidated) { 1240 // Bind the globals memory space to a new symbol that we will use to derive 1241 // the bindings for all globals. 1242 const GlobalsSpaceRegion *GS = MRMgr.getGlobalsRegion(K); 1243 SVal V = svalBuilder.conjureSymbolVal(/* SymbolTag = */ (const void*) GS, Ex, LCtx, 1244 /* type does not matter */ Ctx.IntTy, 1245 Count); 1246 1247 B = B.removeBinding(GS) 1248 .addBinding(BindingKey::Make(GS, BindingKey::Default), V); 1249 1250 // Even if there are no bindings in the global scope, we still need to 1251 // record that we touched it. 1252 if (Invalidated) 1253 Invalidated->push_back(GS); 1254 1255 return B; 1256 } 1257 1258 void RegionStoreManager::populateWorkList(InvalidateRegionsWorker &W, 1259 ArrayRef<SVal> Values, 1260 InvalidatedRegions *TopLevelRegions) { 1261 for (ArrayRef<SVal>::iterator I = Values.begin(), 1262 E = Values.end(); I != E; ++I) { 1263 SVal V = *I; 1264 if (Optional<nonloc::LazyCompoundVal> LCS = 1265 V.getAs<nonloc::LazyCompoundVal>()) { 1266 1267 const SValListTy &Vals = getInterestingValues(*LCS); 1268 1269 for (SValListTy::const_iterator I = Vals.begin(), 1270 E = Vals.end(); I != E; ++I) { 1271 // Note: the last argument is false here because these are 1272 // non-top-level regions. 1273 if (const MemRegion *R = (*I).getAsRegion()) 1274 W.AddToWorkList(R); 1275 } 1276 continue; 1277 } 1278 1279 if (const MemRegion *R = V.getAsRegion()) { 1280 if (TopLevelRegions) 1281 TopLevelRegions->push_back(R); 1282 W.AddToWorkList(R); 1283 continue; 1284 } 1285 } 1286 } 1287 1288 StoreRef 1289 RegionStoreManager::invalidateRegions(Store store, 1290 ArrayRef<SVal> Values, 1291 const Expr *Ex, unsigned Count, 1292 const LocationContext *LCtx, 1293 const CallEvent *Call, 1294 InvalidatedSymbols &IS, 1295 RegionAndSymbolInvalidationTraits &ITraits, 1296 InvalidatedRegions *TopLevelRegions, 1297 InvalidatedRegions *Invalidated) { 1298 GlobalsFilterKind GlobalsFilter; 1299 if (Call) { 1300 if (Call->isInSystemHeader()) 1301 GlobalsFilter = GFK_SystemOnly; 1302 else 1303 GlobalsFilter = GFK_All; 1304 } else { 1305 GlobalsFilter = GFK_None; 1306 } 1307 1308 RegionBindingsRef B = getRegionBindings(store); 1309 InvalidateRegionsWorker W(*this, StateMgr, B, Ex, Count, LCtx, IS, ITraits, 1310 Invalidated, GlobalsFilter); 1311 1312 // Scan the bindings and generate the clusters. 1313 W.GenerateClusters(); 1314 1315 // Add the regions to the worklist. 1316 populateWorkList(W, Values, TopLevelRegions); 1317 1318 W.RunWorkList(); 1319 1320 // Return the new bindings. 1321 B = W.getRegionBindings(); 1322 1323 // For calls, determine which global regions should be invalidated and 1324 // invalidate them. (Note that function-static and immutable globals are never 1325 // invalidated by this.) 1326 // TODO: This could possibly be more precise with modules. 1327 switch (GlobalsFilter) { 1328 case GFK_All: 1329 B = invalidateGlobalRegion(MemRegion::GlobalInternalSpaceRegionKind, 1330 Ex, Count, LCtx, B, Invalidated); 1331 LLVM_FALLTHROUGH; 1332 case GFK_SystemOnly: 1333 B = invalidateGlobalRegion(MemRegion::GlobalSystemSpaceRegionKind, 1334 Ex, Count, LCtx, B, Invalidated); 1335 LLVM_FALLTHROUGH; 1336 case GFK_None: 1337 break; 1338 } 1339 1340 return StoreRef(B.asStore(), *this); 1341 } 1342 1343 //===----------------------------------------------------------------------===// 1344 // Extents for regions. 1345 //===----------------------------------------------------------------------===// 1346 1347 DefinedOrUnknownSVal 1348 RegionStoreManager::getSizeInElements(ProgramStateRef state, 1349 const MemRegion *R, 1350 QualType EleTy) { 1351 SVal Size = cast<SubRegion>(R)->getExtent(svalBuilder); 1352 const llvm::APSInt *SizeInt = svalBuilder.getKnownValue(state, Size); 1353 if (!SizeInt) 1354 return UnknownVal(); 1355 1356 CharUnits RegionSize = CharUnits::fromQuantity(SizeInt->getSExtValue()); 1357 1358 if (Ctx.getAsVariableArrayType(EleTy)) { 1359 // FIXME: We need to track extra state to properly record the size 1360 // of VLAs. Returning UnknownVal here, however, is a stop-gap so that 1361 // we don't have a divide-by-zero below. 1362 return UnknownVal(); 1363 } 1364 1365 CharUnits EleSize = Ctx.getTypeSizeInChars(EleTy); 1366 1367 // If a variable is reinterpreted as a type that doesn't fit into a larger 1368 // type evenly, round it down. 1369 // This is a signed value, since it's used in arithmetic with signed indices. 1370 return svalBuilder.makeIntVal(RegionSize / EleSize, 1371 svalBuilder.getArrayIndexType()); 1372 } 1373 1374 //===----------------------------------------------------------------------===// 1375 // Location and region casting. 1376 //===----------------------------------------------------------------------===// 1377 1378 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 1379 /// type. 'Array' represents the lvalue of the array being decayed 1380 /// to a pointer, and the returned SVal represents the decayed 1381 /// version of that lvalue (i.e., a pointer to the first element of 1382 /// the array). This is called by ExprEngine when evaluating casts 1383 /// from arrays to pointers. 1384 SVal RegionStoreManager::ArrayToPointer(Loc Array, QualType T) { 1385 if (Array.getAs<loc::ConcreteInt>()) 1386 return Array; 1387 1388 if (!Array.getAs<loc::MemRegionVal>()) 1389 return UnknownVal(); 1390 1391 const SubRegion *R = 1392 cast<SubRegion>(Array.castAs<loc::MemRegionVal>().getRegion()); 1393 NonLoc ZeroIdx = svalBuilder.makeZeroArrayIndex(); 1394 return loc::MemRegionVal(MRMgr.getElementRegion(T, ZeroIdx, R, Ctx)); 1395 } 1396 1397 //===----------------------------------------------------------------------===// 1398 // Loading values from regions. 1399 //===----------------------------------------------------------------------===// 1400 1401 SVal RegionStoreManager::getBinding(RegionBindingsConstRef B, Loc L, QualType T) { 1402 assert(!L.getAs<UnknownVal>() && "location unknown"); 1403 assert(!L.getAs<UndefinedVal>() && "location undefined"); 1404 1405 // For access to concrete addresses, return UnknownVal. Checks 1406 // for null dereferences (and similar errors) are done by checkers, not 1407 // the Store. 1408 // FIXME: We can consider lazily symbolicating such memory, but we really 1409 // should defer this when we can reason easily about symbolicating arrays 1410 // of bytes. 1411 if (L.getAs<loc::ConcreteInt>()) { 1412 return UnknownVal(); 1413 } 1414 if (!L.getAs<loc::MemRegionVal>()) { 1415 return UnknownVal(); 1416 } 1417 1418 const MemRegion *MR = L.castAs<loc::MemRegionVal>().getRegion(); 1419 1420 if (isa<BlockDataRegion>(MR)) { 1421 return UnknownVal(); 1422 } 1423 1424 if (!isa<TypedValueRegion>(MR)) { 1425 if (T.isNull()) { 1426 if (const TypedRegion *TR = dyn_cast<TypedRegion>(MR)) 1427 T = TR->getLocationType()->getPointeeType(); 1428 else if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(MR)) 1429 T = SR->getSymbol()->getType()->getPointeeType(); 1430 } 1431 assert(!T.isNull() && "Unable to auto-detect binding type!"); 1432 assert(!T->isVoidType() && "Attempting to dereference a void pointer!"); 1433 MR = GetElementZeroRegion(cast<SubRegion>(MR), T); 1434 } else { 1435 T = cast<TypedValueRegion>(MR)->getValueType(); 1436 } 1437 1438 // FIXME: Perhaps this method should just take a 'const MemRegion*' argument 1439 // instead of 'Loc', and have the other Loc cases handled at a higher level. 1440 const TypedValueRegion *R = cast<TypedValueRegion>(MR); 1441 QualType RTy = R->getValueType(); 1442 1443 // FIXME: we do not yet model the parts of a complex type, so treat the 1444 // whole thing as "unknown". 1445 if (RTy->isAnyComplexType()) 1446 return UnknownVal(); 1447 1448 // FIXME: We should eventually handle funny addressing. e.g.: 1449 // 1450 // int x = ...; 1451 // int *p = &x; 1452 // char *q = (char*) p; 1453 // char c = *q; // returns the first byte of 'x'. 1454 // 1455 // Such funny addressing will occur due to layering of regions. 1456 if (RTy->isStructureOrClassType()) 1457 return getBindingForStruct(B, R); 1458 1459 // FIXME: Handle unions. 1460 if (RTy->isUnionType()) 1461 return createLazyBinding(B, R); 1462 1463 if (RTy->isArrayType()) { 1464 if (RTy->isConstantArrayType()) 1465 return getBindingForArray(B, R); 1466 else 1467 return UnknownVal(); 1468 } 1469 1470 // FIXME: handle Vector types. 1471 if (RTy->isVectorType()) 1472 return UnknownVal(); 1473 1474 if (const FieldRegion* FR = dyn_cast<FieldRegion>(R)) 1475 return CastRetrievedVal(getBindingForField(B, FR), FR, T); 1476 1477 if (const ElementRegion* ER = dyn_cast<ElementRegion>(R)) { 1478 // FIXME: Here we actually perform an implicit conversion from the loaded 1479 // value to the element type. Eventually we want to compose these values 1480 // more intelligently. For example, an 'element' can encompass multiple 1481 // bound regions (e.g., several bound bytes), or could be a subset of 1482 // a larger value. 1483 return CastRetrievedVal(getBindingForElement(B, ER), ER, T); 1484 } 1485 1486 if (const ObjCIvarRegion *IVR = dyn_cast<ObjCIvarRegion>(R)) { 1487 // FIXME: Here we actually perform an implicit conversion from the loaded 1488 // value to the ivar type. What we should model is stores to ivars 1489 // that blow past the extent of the ivar. If the address of the ivar is 1490 // reinterpretted, it is possible we stored a different value that could 1491 // fit within the ivar. Either we need to cast these when storing them 1492 // or reinterpret them lazily (as we do here). 1493 return CastRetrievedVal(getBindingForObjCIvar(B, IVR), IVR, T); 1494 } 1495 1496 if (const VarRegion *VR = dyn_cast<VarRegion>(R)) { 1497 // FIXME: Here we actually perform an implicit conversion from the loaded 1498 // value to the variable type. What we should model is stores to variables 1499 // that blow past the extent of the variable. If the address of the 1500 // variable is reinterpretted, it is possible we stored a different value 1501 // that could fit within the variable. Either we need to cast these when 1502 // storing them or reinterpret them lazily (as we do here). 1503 return CastRetrievedVal(getBindingForVar(B, VR), VR, T); 1504 } 1505 1506 const SVal *V = B.lookup(R, BindingKey::Direct); 1507 1508 // Check if the region has a binding. 1509 if (V) 1510 return *V; 1511 1512 // The location does not have a bound value. This means that it has 1513 // the value it had upon its creation and/or entry to the analyzed 1514 // function/method. These are either symbolic values or 'undefined'. 1515 if (R->hasStackNonParametersStorage()) { 1516 // All stack variables are considered to have undefined values 1517 // upon creation. All heap allocated blocks are considered to 1518 // have undefined values as well unless they are explicitly bound 1519 // to specific values. 1520 return UndefinedVal(); 1521 } 1522 1523 // All other values are symbolic. 1524 return svalBuilder.getRegionValueSymbolVal(R); 1525 } 1526 1527 static QualType getUnderlyingType(const SubRegion *R) { 1528 QualType RegionTy; 1529 if (const TypedValueRegion *TVR = dyn_cast<TypedValueRegion>(R)) 1530 RegionTy = TVR->getValueType(); 1531 1532 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) 1533 RegionTy = SR->getSymbol()->getType(); 1534 1535 return RegionTy; 1536 } 1537 1538 /// Checks to see if store \p B has a lazy binding for region \p R. 1539 /// 1540 /// If \p AllowSubregionBindings is \c false, a lazy binding will be rejected 1541 /// if there are additional bindings within \p R. 1542 /// 1543 /// Note that unlike RegionStoreManager::findLazyBinding, this will not search 1544 /// for lazy bindings for super-regions of \p R. 1545 static Optional<nonloc::LazyCompoundVal> 1546 getExistingLazyBinding(SValBuilder &SVB, RegionBindingsConstRef B, 1547 const SubRegion *R, bool AllowSubregionBindings) { 1548 Optional<SVal> V = B.getDefaultBinding(R); 1549 if (!V) 1550 return None; 1551 1552 Optional<nonloc::LazyCompoundVal> LCV = V->getAs<nonloc::LazyCompoundVal>(); 1553 if (!LCV) 1554 return None; 1555 1556 // If the LCV is for a subregion, the types might not match, and we shouldn't 1557 // reuse the binding. 1558 QualType RegionTy = getUnderlyingType(R); 1559 if (!RegionTy.isNull() && 1560 !RegionTy->isVoidPointerType()) { 1561 QualType SourceRegionTy = LCV->getRegion()->getValueType(); 1562 if (!SVB.getContext().hasSameUnqualifiedType(RegionTy, SourceRegionTy)) 1563 return None; 1564 } 1565 1566 if (!AllowSubregionBindings) { 1567 // If there are any other bindings within this region, we shouldn't reuse 1568 // the top-level binding. 1569 SmallVector<BindingPair, 16> Bindings; 1570 collectSubRegionBindings(Bindings, SVB, *B.lookup(R->getBaseRegion()), R, 1571 /*IncludeAllDefaultBindings=*/true); 1572 if (Bindings.size() > 1) 1573 return None; 1574 } 1575 1576 return *LCV; 1577 } 1578 1579 1580 std::pair<Store, const SubRegion *> 1581 RegionStoreManager::findLazyBinding(RegionBindingsConstRef B, 1582 const SubRegion *R, 1583 const SubRegion *originalRegion) { 1584 if (originalRegion != R) { 1585 if (Optional<nonloc::LazyCompoundVal> V = 1586 getExistingLazyBinding(svalBuilder, B, R, true)) 1587 return std::make_pair(V->getStore(), V->getRegion()); 1588 } 1589 1590 typedef std::pair<Store, const SubRegion *> StoreRegionPair; 1591 StoreRegionPair Result = StoreRegionPair(); 1592 1593 if (const ElementRegion *ER = dyn_cast<ElementRegion>(R)) { 1594 Result = findLazyBinding(B, cast<SubRegion>(ER->getSuperRegion()), 1595 originalRegion); 1596 1597 if (Result.second) 1598 Result.second = MRMgr.getElementRegionWithSuper(ER, Result.second); 1599 1600 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) { 1601 Result = findLazyBinding(B, cast<SubRegion>(FR->getSuperRegion()), 1602 originalRegion); 1603 1604 if (Result.second) 1605 Result.second = MRMgr.getFieldRegionWithSuper(FR, Result.second); 1606 1607 } else if (const CXXBaseObjectRegion *BaseReg = 1608 dyn_cast<CXXBaseObjectRegion>(R)) { 1609 // C++ base object region is another kind of region that we should blast 1610 // through to look for lazy compound value. It is like a field region. 1611 Result = findLazyBinding(B, cast<SubRegion>(BaseReg->getSuperRegion()), 1612 originalRegion); 1613 1614 if (Result.second) 1615 Result.second = MRMgr.getCXXBaseObjectRegionWithSuper(BaseReg, 1616 Result.second); 1617 } 1618 1619 return Result; 1620 } 1621 1622 SVal RegionStoreManager::getBindingForElement(RegionBindingsConstRef B, 1623 const ElementRegion* R) { 1624 // We do not currently model bindings of the CompoundLiteralregion. 1625 if (isa<CompoundLiteralRegion>(R->getBaseRegion())) 1626 return UnknownVal(); 1627 1628 // Check if the region has a binding. 1629 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1630 return *V; 1631 1632 const MemRegion* superR = R->getSuperRegion(); 1633 1634 // Check if the region is an element region of a string literal. 1635 if (const StringRegion *StrR = dyn_cast<StringRegion>(superR)) { 1636 // FIXME: Handle loads from strings where the literal is treated as 1637 // an integer, e.g., *((unsigned int*)"hello") 1638 QualType T = Ctx.getAsArrayType(StrR->getValueType())->getElementType(); 1639 if (!Ctx.hasSameUnqualifiedType(T, R->getElementType())) 1640 return UnknownVal(); 1641 1642 const StringLiteral *Str = StrR->getStringLiteral(); 1643 SVal Idx = R->getIndex(); 1644 if (Optional<nonloc::ConcreteInt> CI = Idx.getAs<nonloc::ConcreteInt>()) { 1645 int64_t i = CI->getValue().getSExtValue(); 1646 // Abort on string underrun. This can be possible by arbitrary 1647 // clients of getBindingForElement(). 1648 if (i < 0) 1649 return UndefinedVal(); 1650 int64_t length = Str->getLength(); 1651 // Technically, only i == length is guaranteed to be null. 1652 // However, such overflows should be caught before reaching this point; 1653 // the only time such an access would be made is if a string literal was 1654 // used to initialize a larger array. 1655 char c = (i >= length) ? '\0' : Str->getCodeUnit(i); 1656 return svalBuilder.makeIntVal(c, T); 1657 } 1658 } else if (const VarRegion *VR = dyn_cast<VarRegion>(superR)) { 1659 // Check if the containing array is const and has an initialized value. 1660 const VarDecl *VD = VR->getDecl(); 1661 // Either the array or the array element has to be const. 1662 if (VD->getType().isConstQualified() || R->getElementType().isConstQualified()) { 1663 if (const Expr *Init = VD->getInit()) { 1664 if (const auto *InitList = dyn_cast<InitListExpr>(Init)) { 1665 // The array index has to be known. 1666 if (auto CI = R->getIndex().getAs<nonloc::ConcreteInt>()) { 1667 int64_t i = CI->getValue().getSExtValue(); 1668 // If it is known that the index is out of bounds, we can return 1669 // an undefined value. 1670 if (i < 0) 1671 return UndefinedVal(); 1672 1673 if (auto CAT = Ctx.getAsConstantArrayType(VD->getType())) 1674 if (CAT->getSize().sle(i)) 1675 return UndefinedVal(); 1676 1677 // If there is a list, but no init, it must be zero. 1678 if (i >= InitList->getNumInits()) 1679 return svalBuilder.makeZeroVal(R->getElementType()); 1680 1681 if (const Expr *ElemInit = InitList->getInit(i)) 1682 if (Optional<SVal> V = svalBuilder.getConstantVal(ElemInit)) 1683 return *V; 1684 } 1685 } 1686 } 1687 } 1688 } 1689 1690 // Check for loads from a code text region. For such loads, just give up. 1691 if (isa<CodeTextRegion>(superR)) 1692 return UnknownVal(); 1693 1694 // Handle the case where we are indexing into a larger scalar object. 1695 // For example, this handles: 1696 // int x = ... 1697 // char *y = &x; 1698 // return *y; 1699 // FIXME: This is a hack, and doesn't do anything really intelligent yet. 1700 const RegionRawOffset &O = R->getAsArrayOffset(); 1701 1702 // If we cannot reason about the offset, return an unknown value. 1703 if (!O.getRegion()) 1704 return UnknownVal(); 1705 1706 if (const TypedValueRegion *baseR = 1707 dyn_cast_or_null<TypedValueRegion>(O.getRegion())) { 1708 QualType baseT = baseR->getValueType(); 1709 if (baseT->isScalarType()) { 1710 QualType elemT = R->getElementType(); 1711 if (elemT->isScalarType()) { 1712 if (Ctx.getTypeSizeInChars(baseT) >= Ctx.getTypeSizeInChars(elemT)) { 1713 if (const Optional<SVal> &V = B.getDirectBinding(superR)) { 1714 if (SymbolRef parentSym = V->getAsSymbol()) 1715 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1716 1717 if (V->isUnknownOrUndef()) 1718 return *V; 1719 // Other cases: give up. We are indexing into a larger object 1720 // that has some value, but we don't know how to handle that yet. 1721 return UnknownVal(); 1722 } 1723 } 1724 } 1725 } 1726 } 1727 return getBindingForFieldOrElementCommon(B, R, R->getElementType()); 1728 } 1729 1730 SVal RegionStoreManager::getBindingForField(RegionBindingsConstRef B, 1731 const FieldRegion* R) { 1732 1733 // Check if the region has a binding. 1734 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1735 return *V; 1736 1737 // Is the field declared constant and has an in-class initializer? 1738 const FieldDecl *FD = R->getDecl(); 1739 QualType Ty = FD->getType(); 1740 if (Ty.isConstQualified()) 1741 if (const Expr *Init = FD->getInClassInitializer()) 1742 if (Optional<SVal> V = svalBuilder.getConstantVal(Init)) 1743 return *V; 1744 1745 // If the containing record was initialized, try to get its constant value. 1746 const MemRegion* superR = R->getSuperRegion(); 1747 if (const auto *VR = dyn_cast<VarRegion>(superR)) { 1748 const VarDecl *VD = VR->getDecl(); 1749 QualType RecordVarTy = VD->getType(); 1750 unsigned Index = FD->getFieldIndex(); 1751 // Either the record variable or the field has to be const qualified. 1752 if (RecordVarTy.isConstQualified() || Ty.isConstQualified()) 1753 if (const Expr *Init = VD->getInit()) 1754 if (const auto *InitList = dyn_cast<InitListExpr>(Init)) { 1755 if (Index < InitList->getNumInits()) { 1756 if (const Expr *FieldInit = InitList->getInit(Index)) 1757 if (Optional<SVal> V = svalBuilder.getConstantVal(FieldInit)) 1758 return *V; 1759 } else { 1760 return svalBuilder.makeZeroVal(Ty); 1761 } 1762 } 1763 } 1764 1765 return getBindingForFieldOrElementCommon(B, R, Ty); 1766 } 1767 1768 Optional<SVal> 1769 RegionStoreManager::getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 1770 const MemRegion *superR, 1771 const TypedValueRegion *R, 1772 QualType Ty) { 1773 1774 if (const Optional<SVal> &D = B.getDefaultBinding(superR)) { 1775 const SVal &val = D.getValue(); 1776 if (SymbolRef parentSym = val.getAsSymbol()) 1777 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1778 1779 if (val.isZeroConstant()) 1780 return svalBuilder.makeZeroVal(Ty); 1781 1782 if (val.isUnknownOrUndef()) 1783 return val; 1784 1785 // Lazy bindings are usually handled through getExistingLazyBinding(). 1786 // We should unify these two code paths at some point. 1787 if (val.getAs<nonloc::LazyCompoundVal>() || 1788 val.getAs<nonloc::CompoundVal>()) 1789 return val; 1790 1791 llvm_unreachable("Unknown default value"); 1792 } 1793 1794 return None; 1795 } 1796 1797 SVal RegionStoreManager::getLazyBinding(const SubRegion *LazyBindingRegion, 1798 RegionBindingsRef LazyBinding) { 1799 SVal Result; 1800 if (const ElementRegion *ER = dyn_cast<ElementRegion>(LazyBindingRegion)) 1801 Result = getBindingForElement(LazyBinding, ER); 1802 else 1803 Result = getBindingForField(LazyBinding, 1804 cast<FieldRegion>(LazyBindingRegion)); 1805 1806 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 1807 // default value for /part/ of an aggregate from a default value for the 1808 // /entire/ aggregate. The most common case of this is when struct Outer 1809 // has as its first member a struct Inner, which is copied in from a stack 1810 // variable. In this case, even if the Outer's default value is symbolic, 0, 1811 // or unknown, it gets overridden by the Inner's default value of undefined. 1812 // 1813 // This is a general problem -- if the Inner is zero-initialized, the Outer 1814 // will now look zero-initialized. The proper way to solve this is with a 1815 // new version of RegionStore that tracks the extent of a binding as well 1816 // as the offset. 1817 // 1818 // This hack only takes care of the undefined case because that can very 1819 // quickly result in a warning. 1820 if (Result.isUndef()) 1821 Result = UnknownVal(); 1822 1823 return Result; 1824 } 1825 1826 SVal 1827 RegionStoreManager::getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 1828 const TypedValueRegion *R, 1829 QualType Ty) { 1830 1831 // At this point we have already checked in either getBindingForElement or 1832 // getBindingForField if 'R' has a direct binding. 1833 1834 // Lazy binding? 1835 Store lazyBindingStore = nullptr; 1836 const SubRegion *lazyBindingRegion = nullptr; 1837 std::tie(lazyBindingStore, lazyBindingRegion) = findLazyBinding(B, R, R); 1838 if (lazyBindingRegion) 1839 return getLazyBinding(lazyBindingRegion, 1840 getRegionBindings(lazyBindingStore)); 1841 1842 // Record whether or not we see a symbolic index. That can completely 1843 // be out of scope of our lookup. 1844 bool hasSymbolicIndex = false; 1845 1846 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 1847 // default value for /part/ of an aggregate from a default value for the 1848 // /entire/ aggregate. The most common case of this is when struct Outer 1849 // has as its first member a struct Inner, which is copied in from a stack 1850 // variable. In this case, even if the Outer's default value is symbolic, 0, 1851 // or unknown, it gets overridden by the Inner's default value of undefined. 1852 // 1853 // This is a general problem -- if the Inner is zero-initialized, the Outer 1854 // will now look zero-initialized. The proper way to solve this is with a 1855 // new version of RegionStore that tracks the extent of a binding as well 1856 // as the offset. 1857 // 1858 // This hack only takes care of the undefined case because that can very 1859 // quickly result in a warning. 1860 bool hasPartialLazyBinding = false; 1861 1862 const SubRegion *SR = R; 1863 while (SR) { 1864 const MemRegion *Base = SR->getSuperRegion(); 1865 if (Optional<SVal> D = getBindingForDerivedDefaultValue(B, Base, R, Ty)) { 1866 if (D->getAs<nonloc::LazyCompoundVal>()) { 1867 hasPartialLazyBinding = true; 1868 break; 1869 } 1870 1871 return *D; 1872 } 1873 1874 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Base)) { 1875 NonLoc index = ER->getIndex(); 1876 if (!index.isConstant()) 1877 hasSymbolicIndex = true; 1878 } 1879 1880 // If our super region is a field or element itself, walk up the region 1881 // hierarchy to see if there is a default value installed in an ancestor. 1882 SR = dyn_cast<SubRegion>(Base); 1883 } 1884 1885 if (R->hasStackNonParametersStorage()) { 1886 if (isa<ElementRegion>(R)) { 1887 // Currently we don't reason specially about Clang-style vectors. Check 1888 // if superR is a vector and if so return Unknown. 1889 if (const TypedValueRegion *typedSuperR = 1890 dyn_cast<TypedValueRegion>(R->getSuperRegion())) { 1891 if (typedSuperR->getValueType()->isVectorType()) 1892 return UnknownVal(); 1893 } 1894 } 1895 1896 // FIXME: We also need to take ElementRegions with symbolic indexes into 1897 // account. This case handles both directly accessing an ElementRegion 1898 // with a symbolic offset, but also fields within an element with 1899 // a symbolic offset. 1900 if (hasSymbolicIndex) 1901 return UnknownVal(); 1902 1903 if (!hasPartialLazyBinding) 1904 return UndefinedVal(); 1905 } 1906 1907 // All other values are symbolic. 1908 return svalBuilder.getRegionValueSymbolVal(R); 1909 } 1910 1911 SVal RegionStoreManager::getBindingForObjCIvar(RegionBindingsConstRef B, 1912 const ObjCIvarRegion* R) { 1913 // Check if the region has a binding. 1914 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1915 return *V; 1916 1917 const MemRegion *superR = R->getSuperRegion(); 1918 1919 // Check if the super region has a default binding. 1920 if (const Optional<SVal> &V = B.getDefaultBinding(superR)) { 1921 if (SymbolRef parentSym = V->getAsSymbol()) 1922 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1923 1924 // Other cases: give up. 1925 return UnknownVal(); 1926 } 1927 1928 return getBindingForLazySymbol(R); 1929 } 1930 1931 SVal RegionStoreManager::getBindingForVar(RegionBindingsConstRef B, 1932 const VarRegion *R) { 1933 1934 // Check if the region has a binding. 1935 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1936 return *V; 1937 1938 // Lazily derive a value for the VarRegion. 1939 const VarDecl *VD = R->getDecl(); 1940 const MemSpaceRegion *MS = R->getMemorySpace(); 1941 1942 // Arguments are always symbolic. 1943 if (isa<StackArgumentsSpaceRegion>(MS)) 1944 return svalBuilder.getRegionValueSymbolVal(R); 1945 1946 // Is 'VD' declared constant? If so, retrieve the constant value. 1947 if (VD->getType().isConstQualified()) { 1948 if (const Expr *Init = VD->getInit()) { 1949 if (Optional<SVal> V = svalBuilder.getConstantVal(Init)) 1950 return *V; 1951 1952 // If the variable is const qualified and has an initializer but 1953 // we couldn't evaluate initializer to a value, treat the value as 1954 // unknown. 1955 return UnknownVal(); 1956 } 1957 } 1958 1959 // This must come after the check for constants because closure-captured 1960 // constant variables may appear in UnknownSpaceRegion. 1961 if (isa<UnknownSpaceRegion>(MS)) 1962 return svalBuilder.getRegionValueSymbolVal(R); 1963 1964 if (isa<GlobalsSpaceRegion>(MS)) { 1965 QualType T = VD->getType(); 1966 1967 // Function-scoped static variables are default-initialized to 0; if they 1968 // have an initializer, it would have been processed by now. 1969 // FIXME: This is only true when we're starting analysis from main(). 1970 // We're losing a lot of coverage here. 1971 if (isa<StaticGlobalSpaceRegion>(MS)) 1972 return svalBuilder.makeZeroVal(T); 1973 1974 if (Optional<SVal> V = getBindingForDerivedDefaultValue(B, MS, R, T)) { 1975 assert(!V->getAs<nonloc::LazyCompoundVal>()); 1976 return V.getValue(); 1977 } 1978 1979 return svalBuilder.getRegionValueSymbolVal(R); 1980 } 1981 1982 return UndefinedVal(); 1983 } 1984 1985 SVal RegionStoreManager::getBindingForLazySymbol(const TypedValueRegion *R) { 1986 // All other values are symbolic. 1987 return svalBuilder.getRegionValueSymbolVal(R); 1988 } 1989 1990 const RegionStoreManager::SValListTy & 1991 RegionStoreManager::getInterestingValues(nonloc::LazyCompoundVal LCV) { 1992 // First, check the cache. 1993 LazyBindingsMapTy::iterator I = LazyBindingsMap.find(LCV.getCVData()); 1994 if (I != LazyBindingsMap.end()) 1995 return I->second; 1996 1997 // If we don't have a list of values cached, start constructing it. 1998 SValListTy List; 1999 2000 const SubRegion *LazyR = LCV.getRegion(); 2001 RegionBindingsRef B = getRegionBindings(LCV.getStore()); 2002 2003 // If this region had /no/ bindings at the time, there are no interesting 2004 // values to return. 2005 const ClusterBindings *Cluster = B.lookup(LazyR->getBaseRegion()); 2006 if (!Cluster) 2007 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 2008 2009 SmallVector<BindingPair, 32> Bindings; 2010 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, LazyR, 2011 /*IncludeAllDefaultBindings=*/true); 2012 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 2013 E = Bindings.end(); 2014 I != E; ++I) { 2015 SVal V = I->second; 2016 if (V.isUnknownOrUndef() || V.isConstant()) 2017 continue; 2018 2019 if (Optional<nonloc::LazyCompoundVal> InnerLCV = 2020 V.getAs<nonloc::LazyCompoundVal>()) { 2021 const SValListTy &InnerList = getInterestingValues(*InnerLCV); 2022 List.insert(List.end(), InnerList.begin(), InnerList.end()); 2023 continue; 2024 } 2025 2026 List.push_back(V); 2027 } 2028 2029 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 2030 } 2031 2032 NonLoc RegionStoreManager::createLazyBinding(RegionBindingsConstRef B, 2033 const TypedValueRegion *R) { 2034 if (Optional<nonloc::LazyCompoundVal> V = 2035 getExistingLazyBinding(svalBuilder, B, R, false)) 2036 return *V; 2037 2038 return svalBuilder.makeLazyCompoundVal(StoreRef(B.asStore(), *this), R); 2039 } 2040 2041 static bool isRecordEmpty(const RecordDecl *RD) { 2042 if (!RD->field_empty()) 2043 return false; 2044 if (const CXXRecordDecl *CRD = dyn_cast<CXXRecordDecl>(RD)) 2045 return CRD->getNumBases() == 0; 2046 return true; 2047 } 2048 2049 SVal RegionStoreManager::getBindingForStruct(RegionBindingsConstRef B, 2050 const TypedValueRegion *R) { 2051 const RecordDecl *RD = R->getValueType()->castAs<RecordType>()->getDecl(); 2052 if (!RD->getDefinition() || isRecordEmpty(RD)) 2053 return UnknownVal(); 2054 2055 return createLazyBinding(B, R); 2056 } 2057 2058 SVal RegionStoreManager::getBindingForArray(RegionBindingsConstRef B, 2059 const TypedValueRegion *R) { 2060 assert(Ctx.getAsConstantArrayType(R->getValueType()) && 2061 "Only constant array types can have compound bindings."); 2062 2063 return createLazyBinding(B, R); 2064 } 2065 2066 bool RegionStoreManager::includedInBindings(Store store, 2067 const MemRegion *region) const { 2068 RegionBindingsRef B = getRegionBindings(store); 2069 region = region->getBaseRegion(); 2070 2071 // Quick path: if the base is the head of a cluster, the region is live. 2072 if (B.lookup(region)) 2073 return true; 2074 2075 // Slow path: if the region is the VALUE of any binding, it is live. 2076 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); RI != RE; ++RI) { 2077 const ClusterBindings &Cluster = RI.getData(); 2078 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 2079 CI != CE; ++CI) { 2080 const SVal &D = CI.getData(); 2081 if (const MemRegion *R = D.getAsRegion()) 2082 if (R->getBaseRegion() == region) 2083 return true; 2084 } 2085 } 2086 2087 return false; 2088 } 2089 2090 //===----------------------------------------------------------------------===// 2091 // Binding values to regions. 2092 //===----------------------------------------------------------------------===// 2093 2094 StoreRef RegionStoreManager::killBinding(Store ST, Loc L) { 2095 if (Optional<loc::MemRegionVal> LV = L.getAs<loc::MemRegionVal>()) 2096 if (const MemRegion* R = LV->getRegion()) 2097 return StoreRef(getRegionBindings(ST).removeBinding(R) 2098 .asImmutableMap() 2099 .getRootWithoutRetain(), 2100 *this); 2101 2102 return StoreRef(ST, *this); 2103 } 2104 2105 RegionBindingsRef 2106 RegionStoreManager::bind(RegionBindingsConstRef B, Loc L, SVal V) { 2107 if (L.getAs<loc::ConcreteInt>()) 2108 return B; 2109 2110 // If we get here, the location should be a region. 2111 const MemRegion *R = L.castAs<loc::MemRegionVal>().getRegion(); 2112 2113 // Check if the region is a struct region. 2114 if (const TypedValueRegion* TR = dyn_cast<TypedValueRegion>(R)) { 2115 QualType Ty = TR->getValueType(); 2116 if (Ty->isArrayType()) 2117 return bindArray(B, TR, V); 2118 if (Ty->isStructureOrClassType()) 2119 return bindStruct(B, TR, V); 2120 if (Ty->isVectorType()) 2121 return bindVector(B, TR, V); 2122 if (Ty->isUnionType()) 2123 return bindAggregate(B, TR, V); 2124 } 2125 2126 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) { 2127 // Binding directly to a symbolic region should be treated as binding 2128 // to element 0. 2129 QualType T = SR->getSymbol()->getType(); 2130 if (T->isAnyPointerType() || T->isReferenceType()) 2131 T = T->getPointeeType(); 2132 2133 R = GetElementZeroRegion(SR, T); 2134 } 2135 2136 assert((!isa<CXXThisRegion>(R) || !B.lookup(R)) && 2137 "'this' pointer is not an l-value and is not assignable"); 2138 2139 // Clear out bindings that may overlap with this binding. 2140 RegionBindingsRef NewB = removeSubRegionBindings(B, cast<SubRegion>(R)); 2141 return NewB.addBinding(BindingKey::Make(R, BindingKey::Direct), V); 2142 } 2143 2144 RegionBindingsRef 2145 RegionStoreManager::setImplicitDefaultValue(RegionBindingsConstRef B, 2146 const MemRegion *R, 2147 QualType T) { 2148 SVal V; 2149 2150 if (Loc::isLocType(T)) 2151 V = svalBuilder.makeNull(); 2152 else if (T->isIntegralOrEnumerationType()) 2153 V = svalBuilder.makeZeroVal(T); 2154 else if (T->isStructureOrClassType() || T->isArrayType()) { 2155 // Set the default value to a zero constant when it is a structure 2156 // or array. The type doesn't really matter. 2157 V = svalBuilder.makeZeroVal(Ctx.IntTy); 2158 } 2159 else { 2160 // We can't represent values of this type, but we still need to set a value 2161 // to record that the region has been initialized. 2162 // If this assertion ever fires, a new case should be added above -- we 2163 // should know how to default-initialize any value we can symbolicate. 2164 assert(!SymbolManager::canSymbolicate(T) && "This type is representable"); 2165 V = UnknownVal(); 2166 } 2167 2168 return B.addBinding(R, BindingKey::Default, V); 2169 } 2170 2171 RegionBindingsRef 2172 RegionStoreManager::bindArray(RegionBindingsConstRef B, 2173 const TypedValueRegion* R, 2174 SVal Init) { 2175 2176 const ArrayType *AT =cast<ArrayType>(Ctx.getCanonicalType(R->getValueType())); 2177 QualType ElementTy = AT->getElementType(); 2178 Optional<uint64_t> Size; 2179 2180 if (const ConstantArrayType* CAT = dyn_cast<ConstantArrayType>(AT)) 2181 Size = CAT->getSize().getZExtValue(); 2182 2183 // Check if the init expr is a literal. If so, bind the rvalue instead. 2184 // FIXME: It's not responsibility of the Store to transform this lvalue 2185 // to rvalue. ExprEngine or maybe even CFG should do this before binding. 2186 if (Optional<loc::MemRegionVal> MRV = Init.getAs<loc::MemRegionVal>()) { 2187 SVal V = getBinding(B.asStore(), *MRV, R->getValueType()); 2188 return bindAggregate(B, R, V); 2189 } 2190 2191 // Handle lazy compound values. 2192 if (Init.getAs<nonloc::LazyCompoundVal>()) 2193 return bindAggregate(B, R, Init); 2194 2195 if (Init.isUnknown()) 2196 return bindAggregate(B, R, UnknownVal()); 2197 2198 // Remaining case: explicit compound values. 2199 const nonloc::CompoundVal& CV = Init.castAs<nonloc::CompoundVal>(); 2200 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2201 uint64_t i = 0; 2202 2203 RegionBindingsRef NewB(B); 2204 2205 for (; Size.hasValue() ? i < Size.getValue() : true ; ++i, ++VI) { 2206 // The init list might be shorter than the array length. 2207 if (VI == VE) 2208 break; 2209 2210 const NonLoc &Idx = svalBuilder.makeArrayIndex(i); 2211 const ElementRegion *ER = MRMgr.getElementRegion(ElementTy, Idx, R, Ctx); 2212 2213 if (ElementTy->isStructureOrClassType()) 2214 NewB = bindStruct(NewB, ER, *VI); 2215 else if (ElementTy->isArrayType()) 2216 NewB = bindArray(NewB, ER, *VI); 2217 else 2218 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2219 } 2220 2221 // If the init list is shorter than the array length (or the array has 2222 // variable length), set the array default value. Values that are already set 2223 // are not overwritten. 2224 if (!Size.hasValue() || i < Size.getValue()) 2225 NewB = setImplicitDefaultValue(NewB, R, ElementTy); 2226 2227 return NewB; 2228 } 2229 2230 RegionBindingsRef RegionStoreManager::bindVector(RegionBindingsConstRef B, 2231 const TypedValueRegion* R, 2232 SVal V) { 2233 QualType T = R->getValueType(); 2234 assert(T->isVectorType()); 2235 const VectorType *VT = T->getAs<VectorType>(); // Use getAs for typedefs. 2236 2237 // Handle lazy compound values and symbolic values. 2238 if (V.getAs<nonloc::LazyCompoundVal>() || V.getAs<nonloc::SymbolVal>()) 2239 return bindAggregate(B, R, V); 2240 2241 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2242 // that we are binding symbolic struct value. Kill the field values, and if 2243 // the value is symbolic go and bind it as a "default" binding. 2244 if (!V.getAs<nonloc::CompoundVal>()) { 2245 return bindAggregate(B, R, UnknownVal()); 2246 } 2247 2248 QualType ElemType = VT->getElementType(); 2249 nonloc::CompoundVal CV = V.castAs<nonloc::CompoundVal>(); 2250 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2251 unsigned index = 0, numElements = VT->getNumElements(); 2252 RegionBindingsRef NewB(B); 2253 2254 for ( ; index != numElements ; ++index) { 2255 if (VI == VE) 2256 break; 2257 2258 NonLoc Idx = svalBuilder.makeArrayIndex(index); 2259 const ElementRegion *ER = MRMgr.getElementRegion(ElemType, Idx, R, Ctx); 2260 2261 if (ElemType->isArrayType()) 2262 NewB = bindArray(NewB, ER, *VI); 2263 else if (ElemType->isStructureOrClassType()) 2264 NewB = bindStruct(NewB, ER, *VI); 2265 else 2266 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2267 } 2268 return NewB; 2269 } 2270 2271 Optional<RegionBindingsRef> 2272 RegionStoreManager::tryBindSmallStruct(RegionBindingsConstRef B, 2273 const TypedValueRegion *R, 2274 const RecordDecl *RD, 2275 nonloc::LazyCompoundVal LCV) { 2276 FieldVector Fields; 2277 2278 if (const CXXRecordDecl *Class = dyn_cast<CXXRecordDecl>(RD)) 2279 if (Class->getNumBases() != 0 || Class->getNumVBases() != 0) 2280 return None; 2281 2282 for (const auto *FD : RD->fields()) { 2283 if (FD->isUnnamedBitfield()) 2284 continue; 2285 2286 // If there are too many fields, or if any of the fields are aggregates, 2287 // just use the LCV as a default binding. 2288 if (Fields.size() == SmallStructLimit) 2289 return None; 2290 2291 QualType Ty = FD->getType(); 2292 if (!(Ty->isScalarType() || Ty->isReferenceType())) 2293 return None; 2294 2295 Fields.push_back(FD); 2296 } 2297 2298 RegionBindingsRef NewB = B; 2299 2300 for (FieldVector::iterator I = Fields.begin(), E = Fields.end(); I != E; ++I){ 2301 const FieldRegion *SourceFR = MRMgr.getFieldRegion(*I, LCV.getRegion()); 2302 SVal V = getBindingForField(getRegionBindings(LCV.getStore()), SourceFR); 2303 2304 const FieldRegion *DestFR = MRMgr.getFieldRegion(*I, R); 2305 NewB = bind(NewB, loc::MemRegionVal(DestFR), V); 2306 } 2307 2308 return NewB; 2309 } 2310 2311 RegionBindingsRef RegionStoreManager::bindStruct(RegionBindingsConstRef B, 2312 const TypedValueRegion* R, 2313 SVal V) { 2314 if (!Features.supportsFields()) 2315 return B; 2316 2317 QualType T = R->getValueType(); 2318 assert(T->isStructureOrClassType()); 2319 2320 const RecordType* RT = T->getAs<RecordType>(); 2321 const RecordDecl *RD = RT->getDecl(); 2322 2323 if (!RD->isCompleteDefinition()) 2324 return B; 2325 2326 // Handle lazy compound values and symbolic values. 2327 if (Optional<nonloc::LazyCompoundVal> LCV = 2328 V.getAs<nonloc::LazyCompoundVal>()) { 2329 if (Optional<RegionBindingsRef> NewB = tryBindSmallStruct(B, R, RD, *LCV)) 2330 return *NewB; 2331 return bindAggregate(B, R, V); 2332 } 2333 if (V.getAs<nonloc::SymbolVal>()) 2334 return bindAggregate(B, R, V); 2335 2336 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2337 // that we are binding symbolic struct value. Kill the field values, and if 2338 // the value is symbolic go and bind it as a "default" binding. 2339 if (V.isUnknown() || !V.getAs<nonloc::CompoundVal>()) 2340 return bindAggregate(B, R, UnknownVal()); 2341 2342 const nonloc::CompoundVal& CV = V.castAs<nonloc::CompoundVal>(); 2343 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2344 2345 RecordDecl::field_iterator FI, FE; 2346 RegionBindingsRef NewB(B); 2347 2348 for (FI = RD->field_begin(), FE = RD->field_end(); FI != FE; ++FI) { 2349 2350 if (VI == VE) 2351 break; 2352 2353 // Skip any unnamed bitfields to stay in sync with the initializers. 2354 if (FI->isUnnamedBitfield()) 2355 continue; 2356 2357 QualType FTy = FI->getType(); 2358 const FieldRegion* FR = MRMgr.getFieldRegion(*FI, R); 2359 2360 if (FTy->isArrayType()) 2361 NewB = bindArray(NewB, FR, *VI); 2362 else if (FTy->isStructureOrClassType()) 2363 NewB = bindStruct(NewB, FR, *VI); 2364 else 2365 NewB = bind(NewB, loc::MemRegionVal(FR), *VI); 2366 ++VI; 2367 } 2368 2369 // There may be fewer values in the initialize list than the fields of struct. 2370 if (FI != FE) { 2371 NewB = NewB.addBinding(R, BindingKey::Default, 2372 svalBuilder.makeIntVal(0, false)); 2373 } 2374 2375 return NewB; 2376 } 2377 2378 RegionBindingsRef 2379 RegionStoreManager::bindAggregate(RegionBindingsConstRef B, 2380 const TypedRegion *R, 2381 SVal Val) { 2382 // Remove the old bindings, using 'R' as the root of all regions 2383 // we will invalidate. Then add the new binding. 2384 return removeSubRegionBindings(B, R).addBinding(R, BindingKey::Default, Val); 2385 } 2386 2387 //===----------------------------------------------------------------------===// 2388 // State pruning. 2389 //===----------------------------------------------------------------------===// 2390 2391 namespace { 2392 class RemoveDeadBindingsWorker 2393 : public ClusterAnalysis<RemoveDeadBindingsWorker> { 2394 using ChildrenListTy = SmallVector<const SymbolDerived *, 4>; 2395 using MapParentsToDerivedTy = llvm::DenseMap<SymbolRef, ChildrenListTy>; 2396 2397 MapParentsToDerivedTy ParentsToDerived; 2398 SymbolReaper &SymReaper; 2399 const StackFrameContext *CurrentLCtx; 2400 2401 public: 2402 RemoveDeadBindingsWorker(RegionStoreManager &rm, 2403 ProgramStateManager &stateMgr, 2404 RegionBindingsRef b, SymbolReaper &symReaper, 2405 const StackFrameContext *LCtx) 2406 : ClusterAnalysis<RemoveDeadBindingsWorker>(rm, stateMgr, b), 2407 SymReaper(symReaper), CurrentLCtx(LCtx) {} 2408 2409 // Called by ClusterAnalysis. 2410 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C); 2411 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 2412 using ClusterAnalysis<RemoveDeadBindingsWorker>::VisitCluster; 2413 2414 using ClusterAnalysis::AddToWorkList; 2415 2416 bool AddToWorkList(const MemRegion *R); 2417 2418 void VisitBinding(SVal V); 2419 2420 private: 2421 void populateWorklistFromSymbol(SymbolRef s); 2422 }; 2423 } 2424 2425 bool RemoveDeadBindingsWorker::AddToWorkList(const MemRegion *R) { 2426 const MemRegion *BaseR = R->getBaseRegion(); 2427 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 2428 } 2429 2430 void RemoveDeadBindingsWorker::VisitAddedToCluster(const MemRegion *baseR, 2431 const ClusterBindings &C) { 2432 2433 if (const VarRegion *VR = dyn_cast<VarRegion>(baseR)) { 2434 if (SymReaper.isLive(VR)) 2435 AddToWorkList(baseR, &C); 2436 2437 return; 2438 } 2439 2440 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) { 2441 if (SymReaper.isLive(SR->getSymbol())) { 2442 AddToWorkList(SR, &C); 2443 } else if (const auto *SD = dyn_cast<SymbolDerived>(SR->getSymbol())) { 2444 ParentsToDerived[SD->getParentSymbol()].push_back(SD); 2445 } 2446 2447 return; 2448 } 2449 2450 if (isa<NonStaticGlobalSpaceRegion>(baseR)) { 2451 AddToWorkList(baseR, &C); 2452 return; 2453 } 2454 2455 // CXXThisRegion in the current or parent location context is live. 2456 if (const CXXThisRegion *TR = dyn_cast<CXXThisRegion>(baseR)) { 2457 const auto *StackReg = 2458 cast<StackArgumentsSpaceRegion>(TR->getSuperRegion()); 2459 const StackFrameContext *RegCtx = StackReg->getStackFrame(); 2460 if (CurrentLCtx && 2461 (RegCtx == CurrentLCtx || RegCtx->isParentOf(CurrentLCtx))) 2462 AddToWorkList(TR, &C); 2463 } 2464 } 2465 2466 void RemoveDeadBindingsWorker::VisitCluster(const MemRegion *baseR, 2467 const ClusterBindings *C) { 2468 if (!C) 2469 return; 2470 2471 // Mark the symbol for any SymbolicRegion with live bindings as live itself. 2472 // This means we should continue to track that symbol. 2473 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(baseR)) 2474 SymReaper.markLive(SymR->getSymbol()); 2475 2476 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) { 2477 // Element index of a binding key is live. 2478 SymReaper.markElementIndicesLive(I.getKey().getRegion()); 2479 2480 VisitBinding(I.getData()); 2481 } 2482 } 2483 2484 void RemoveDeadBindingsWorker::VisitBinding(SVal V) { 2485 // Is it a LazyCompoundVal? All referenced regions are live as well. 2486 if (Optional<nonloc::LazyCompoundVal> LCS = 2487 V.getAs<nonloc::LazyCompoundVal>()) { 2488 2489 const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS); 2490 2491 for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(), 2492 E = Vals.end(); 2493 I != E; ++I) 2494 VisitBinding(*I); 2495 2496 return; 2497 } 2498 2499 // If V is a region, then add it to the worklist. 2500 if (const MemRegion *R = V.getAsRegion()) { 2501 AddToWorkList(R); 2502 2503 if (const auto *TVR = dyn_cast<TypedValueRegion>(R)) { 2504 DefinedOrUnknownSVal RVS = 2505 RM.getSValBuilder().getRegionValueSymbolVal(TVR); 2506 if (const MemRegion *SR = RVS.getAsRegion()) { 2507 AddToWorkList(SR); 2508 } 2509 } 2510 2511 SymReaper.markLive(R); 2512 2513 // All regions captured by a block are also live. 2514 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(R)) { 2515 BlockDataRegion::referenced_vars_iterator I = BR->referenced_vars_begin(), 2516 E = BR->referenced_vars_end(); 2517 for ( ; I != E; ++I) 2518 AddToWorkList(I.getCapturedRegion()); 2519 } 2520 } 2521 2522 2523 // Update the set of live symbols. 2524 for (auto SI = V.symbol_begin(), SE = V.symbol_end(); SI != SE; ++SI) { 2525 populateWorklistFromSymbol(*SI); 2526 2527 for (const auto *SD : ParentsToDerived[*SI]) 2528 populateWorklistFromSymbol(SD); 2529 2530 SymReaper.markLive(*SI); 2531 } 2532 } 2533 2534 void RemoveDeadBindingsWorker::populateWorklistFromSymbol(SymbolRef S) { 2535 if (const auto *SD = dyn_cast<SymbolData>(S)) { 2536 if (Loc::isLocType(SD->getType()) && !SymReaper.isLive(SD)) { 2537 const SymbolicRegion *SR = RM.getRegionManager().getSymbolicRegion(SD); 2538 2539 if (B.contains(SR)) 2540 AddToWorkList(SR); 2541 2542 const SymbolicRegion *SHR = 2543 RM.getRegionManager().getSymbolicHeapRegion(SD); 2544 if (B.contains(SHR)) 2545 AddToWorkList(SHR); 2546 } 2547 } 2548 } 2549 2550 StoreRef RegionStoreManager::removeDeadBindings(Store store, 2551 const StackFrameContext *LCtx, 2552 SymbolReaper& SymReaper) { 2553 RegionBindingsRef B = getRegionBindings(store); 2554 RemoveDeadBindingsWorker W(*this, StateMgr, B, SymReaper, LCtx); 2555 W.GenerateClusters(); 2556 2557 // Enqueue the region roots onto the worklist. 2558 for (SymbolReaper::region_iterator I = SymReaper.region_begin(), 2559 E = SymReaper.region_end(); I != E; ++I) { 2560 W.AddToWorkList(*I); 2561 } 2562 2563 W.RunWorkList(); 2564 2565 // We have now scanned the store, marking reachable regions and symbols 2566 // as live. We now remove all the regions that are dead from the store 2567 // as well as update DSymbols with the set symbols that are now dead. 2568 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 2569 const MemRegion *Base = I.getKey(); 2570 2571 // If the cluster has been visited, we know the region has been marked. 2572 // Otherwise, remove the dead entry. 2573 if (!W.isVisited(Base)) 2574 B = B.remove(Base); 2575 } 2576 2577 return StoreRef(B.asStore(), *this); 2578 } 2579 2580 //===----------------------------------------------------------------------===// 2581 // Utility methods. 2582 //===----------------------------------------------------------------------===// 2583 2584 void RegionStoreManager::print(Store store, raw_ostream &OS, 2585 const char* nl) { 2586 RegionBindingsRef B = getRegionBindings(store); 2587 OS << "Store (direct and default bindings), " 2588 << B.asStore() 2589 << " :" << nl; 2590 B.dump(OS, nl); 2591 } 2592