1 //== RegionStore.cpp - Field-sensitive store model --------------*- C++ -*--==// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a basic region store model. In this model, we do have field 11 // sensitivity. But we assume nothing about the heap shape. So recursive data 12 // structures are largely ignored. Basically we do 1-limiting analysis. 13 // Parameter pointers are assumed with no aliasing. Pointee objects of 14 // parameters are created lazily. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "clang/AST/Attr.h" 19 #include "clang/AST/CharUnits.h" 20 #include "clang/ASTMatchers/ASTMatchFinder.h" 21 #include "clang/Analysis/Analyses/LiveVariables.h" 22 #include "clang/Analysis/AnalysisDeclContext.h" 23 #include "clang/Basic/TargetInfo.h" 24 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 25 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 26 #include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramStateTrait.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h" 30 #include "llvm/ADT/ImmutableMap.h" 31 #include "llvm/ADT/Optional.h" 32 #include "llvm/Support/raw_ostream.h" 33 #include <utility> 34 35 using namespace clang; 36 using namespace ento; 37 38 //===----------------------------------------------------------------------===// 39 // Representation of binding keys. 40 //===----------------------------------------------------------------------===// 41 42 namespace { 43 class BindingKey { 44 public: 45 enum Kind { Default = 0x0, Direct = 0x1 }; 46 private: 47 enum { Symbolic = 0x2 }; 48 49 llvm::PointerIntPair<const MemRegion *, 2> P; 50 uint64_t Data; 51 52 /// Create a key for a binding to region \p r, which has a symbolic offset 53 /// from region \p Base. 54 explicit BindingKey(const SubRegion *r, const SubRegion *Base, Kind k) 55 : P(r, k | Symbolic), Data(reinterpret_cast<uintptr_t>(Base)) { 56 assert(r && Base && "Must have known regions."); 57 assert(getConcreteOffsetRegion() == Base && "Failed to store base region"); 58 } 59 60 /// Create a key for a binding at \p offset from base region \p r. 61 explicit BindingKey(const MemRegion *r, uint64_t offset, Kind k) 62 : P(r, k), Data(offset) { 63 assert(r && "Must have known regions."); 64 assert(getOffset() == offset && "Failed to store offset"); 65 assert((r == r->getBaseRegion() || isa<ObjCIvarRegion>(r) || 66 isa <CXXDerivedObjectRegion>(r)) && 67 "Not a base"); 68 } 69 public: 70 71 bool isDirect() const { return P.getInt() & Direct; } 72 bool hasSymbolicOffset() const { return P.getInt() & Symbolic; } 73 74 const MemRegion *getRegion() const { return P.getPointer(); } 75 uint64_t getOffset() const { 76 assert(!hasSymbolicOffset()); 77 return Data; 78 } 79 80 const SubRegion *getConcreteOffsetRegion() const { 81 assert(hasSymbolicOffset()); 82 return reinterpret_cast<const SubRegion *>(static_cast<uintptr_t>(Data)); 83 } 84 85 const MemRegion *getBaseRegion() const { 86 if (hasSymbolicOffset()) 87 return getConcreteOffsetRegion()->getBaseRegion(); 88 return getRegion()->getBaseRegion(); 89 } 90 91 void Profile(llvm::FoldingSetNodeID& ID) const { 92 ID.AddPointer(P.getOpaqueValue()); 93 ID.AddInteger(Data); 94 } 95 96 static BindingKey Make(const MemRegion *R, Kind k); 97 98 bool operator<(const BindingKey &X) const { 99 if (P.getOpaqueValue() < X.P.getOpaqueValue()) 100 return true; 101 if (P.getOpaqueValue() > X.P.getOpaqueValue()) 102 return false; 103 return Data < X.Data; 104 } 105 106 bool operator==(const BindingKey &X) const { 107 return P.getOpaqueValue() == X.P.getOpaqueValue() && 108 Data == X.Data; 109 } 110 111 void dump() const; 112 }; 113 } // end anonymous namespace 114 115 BindingKey BindingKey::Make(const MemRegion *R, Kind k) { 116 const RegionOffset &RO = R->getAsOffset(); 117 if (RO.hasSymbolicOffset()) 118 return BindingKey(cast<SubRegion>(R), cast<SubRegion>(RO.getRegion()), k); 119 120 return BindingKey(RO.getRegion(), RO.getOffset(), k); 121 } 122 123 namespace llvm { 124 static inline 125 raw_ostream &operator<<(raw_ostream &os, BindingKey K) { 126 os << '(' << K.getRegion(); 127 if (!K.hasSymbolicOffset()) 128 os << ',' << K.getOffset(); 129 os << ',' << (K.isDirect() ? "direct" : "default") 130 << ')'; 131 return os; 132 } 133 134 template <typename T> struct isPodLike; 135 template <> struct isPodLike<BindingKey> { 136 static const bool value = true; 137 }; 138 } // end llvm namespace 139 140 #ifndef NDEBUG 141 LLVM_DUMP_METHOD void BindingKey::dump() const { llvm::errs() << *this; } 142 #endif 143 144 //===----------------------------------------------------------------------===// 145 // Actual Store type. 146 //===----------------------------------------------------------------------===// 147 148 typedef llvm::ImmutableMap<BindingKey, SVal> ClusterBindings; 149 typedef llvm::ImmutableMapRef<BindingKey, SVal> ClusterBindingsRef; 150 typedef std::pair<BindingKey, SVal> BindingPair; 151 152 typedef llvm::ImmutableMap<const MemRegion *, ClusterBindings> 153 RegionBindings; 154 155 namespace { 156 class RegionBindingsRef : public llvm::ImmutableMapRef<const MemRegion *, 157 ClusterBindings> { 158 ClusterBindings::Factory *CBFactory; 159 160 public: 161 typedef llvm::ImmutableMapRef<const MemRegion *, ClusterBindings> 162 ParentTy; 163 164 RegionBindingsRef(ClusterBindings::Factory &CBFactory, 165 const RegionBindings::TreeTy *T, 166 RegionBindings::TreeTy::Factory *F) 167 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(T, F), 168 CBFactory(&CBFactory) {} 169 170 RegionBindingsRef(const ParentTy &P, ClusterBindings::Factory &CBFactory) 171 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(P), 172 CBFactory(&CBFactory) {} 173 174 RegionBindingsRef add(key_type_ref K, data_type_ref D) const { 175 return RegionBindingsRef(static_cast<const ParentTy *>(this)->add(K, D), 176 *CBFactory); 177 } 178 179 RegionBindingsRef remove(key_type_ref K) const { 180 return RegionBindingsRef(static_cast<const ParentTy *>(this)->remove(K), 181 *CBFactory); 182 } 183 184 RegionBindingsRef addBinding(BindingKey K, SVal V) const; 185 186 RegionBindingsRef addBinding(const MemRegion *R, 187 BindingKey::Kind k, SVal V) const; 188 189 const SVal *lookup(BindingKey K) const; 190 const SVal *lookup(const MemRegion *R, BindingKey::Kind k) const; 191 using llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>::lookup; 192 193 RegionBindingsRef removeBinding(BindingKey K); 194 195 RegionBindingsRef removeBinding(const MemRegion *R, 196 BindingKey::Kind k); 197 198 RegionBindingsRef removeBinding(const MemRegion *R) { 199 return removeBinding(R, BindingKey::Direct). 200 removeBinding(R, BindingKey::Default); 201 } 202 203 Optional<SVal> getDirectBinding(const MemRegion *R) const; 204 205 /// getDefaultBinding - Returns an SVal* representing an optional default 206 /// binding associated with a region and its subregions. 207 Optional<SVal> getDefaultBinding(const MemRegion *R) const; 208 209 /// Return the internal tree as a Store. 210 Store asStore() const { 211 return asImmutableMap().getRootWithoutRetain(); 212 } 213 214 void dump(raw_ostream &OS, const char *nl) const { 215 for (iterator I = begin(), E = end(); I != E; ++I) { 216 const ClusterBindings &Cluster = I.getData(); 217 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 218 CI != CE; ++CI) { 219 OS << ' ' << CI.getKey() << " : " << CI.getData() << nl; 220 } 221 OS << nl; 222 } 223 } 224 225 LLVM_DUMP_METHOD void dump() const { dump(llvm::errs(), "\n"); } 226 }; 227 } // end anonymous namespace 228 229 typedef const RegionBindingsRef& RegionBindingsConstRef; 230 231 Optional<SVal> RegionBindingsRef::getDirectBinding(const MemRegion *R) const { 232 return Optional<SVal>::create(lookup(R, BindingKey::Direct)); 233 } 234 235 Optional<SVal> RegionBindingsRef::getDefaultBinding(const MemRegion *R) const { 236 return Optional<SVal>::create(lookup(R, BindingKey::Default)); 237 } 238 239 RegionBindingsRef RegionBindingsRef::addBinding(BindingKey K, SVal V) const { 240 const MemRegion *Base = K.getBaseRegion(); 241 242 const ClusterBindings *ExistingCluster = lookup(Base); 243 ClusterBindings Cluster = 244 (ExistingCluster ? *ExistingCluster : CBFactory->getEmptyMap()); 245 246 ClusterBindings NewCluster = CBFactory->add(Cluster, K, V); 247 return add(Base, NewCluster); 248 } 249 250 251 RegionBindingsRef RegionBindingsRef::addBinding(const MemRegion *R, 252 BindingKey::Kind k, 253 SVal V) const { 254 return addBinding(BindingKey::Make(R, k), V); 255 } 256 257 const SVal *RegionBindingsRef::lookup(BindingKey K) const { 258 const ClusterBindings *Cluster = lookup(K.getBaseRegion()); 259 if (!Cluster) 260 return nullptr; 261 return Cluster->lookup(K); 262 } 263 264 const SVal *RegionBindingsRef::lookup(const MemRegion *R, 265 BindingKey::Kind k) const { 266 return lookup(BindingKey::Make(R, k)); 267 } 268 269 RegionBindingsRef RegionBindingsRef::removeBinding(BindingKey K) { 270 const MemRegion *Base = K.getBaseRegion(); 271 const ClusterBindings *Cluster = lookup(Base); 272 if (!Cluster) 273 return *this; 274 275 ClusterBindings NewCluster = CBFactory->remove(*Cluster, K); 276 if (NewCluster.isEmpty()) 277 return remove(Base); 278 return add(Base, NewCluster); 279 } 280 281 RegionBindingsRef RegionBindingsRef::removeBinding(const MemRegion *R, 282 BindingKey::Kind k){ 283 return removeBinding(BindingKey::Make(R, k)); 284 } 285 286 //===----------------------------------------------------------------------===// 287 // Fine-grained control of RegionStoreManager. 288 //===----------------------------------------------------------------------===// 289 290 namespace { 291 struct minimal_features_tag {}; 292 struct maximal_features_tag {}; 293 294 class RegionStoreFeatures { 295 bool SupportsFields; 296 public: 297 RegionStoreFeatures(minimal_features_tag) : 298 SupportsFields(false) {} 299 300 RegionStoreFeatures(maximal_features_tag) : 301 SupportsFields(true) {} 302 303 void enableFields(bool t) { SupportsFields = t; } 304 305 bool supportsFields() const { return SupportsFields; } 306 }; 307 } 308 309 //===----------------------------------------------------------------------===// 310 // Main RegionStore logic. 311 //===----------------------------------------------------------------------===// 312 313 namespace { 314 class InvalidateRegionsWorker; 315 316 class RegionStoreManager : public StoreManager { 317 public: 318 const RegionStoreFeatures Features; 319 320 RegionBindings::Factory RBFactory; 321 mutable ClusterBindings::Factory CBFactory; 322 323 typedef std::vector<SVal> SValListTy; 324 private: 325 typedef llvm::DenseMap<const LazyCompoundValData *, 326 SValListTy> LazyBindingsMapTy; 327 LazyBindingsMapTy LazyBindingsMap; 328 329 /// The largest number of fields a struct can have and still be 330 /// considered "small". 331 /// 332 /// This is currently used to decide whether or not it is worth "forcing" a 333 /// LazyCompoundVal on bind. 334 /// 335 /// This is controlled by 'region-store-small-struct-limit' option. 336 /// To disable all small-struct-dependent behavior, set the option to "0". 337 unsigned SmallStructLimit; 338 339 /// A helper used to populate the work list with the given set of 340 /// regions. 341 void populateWorkList(InvalidateRegionsWorker &W, 342 ArrayRef<SVal> Values, 343 InvalidatedRegions *TopLevelRegions); 344 345 public: 346 RegionStoreManager(ProgramStateManager& mgr, const RegionStoreFeatures &f) 347 : StoreManager(mgr), Features(f), 348 RBFactory(mgr.getAllocator()), CBFactory(mgr.getAllocator()), 349 SmallStructLimit(0) { 350 if (SubEngine *Eng = StateMgr.getOwningEngine()) { 351 AnalyzerOptions &Options = Eng->getAnalysisManager().options; 352 SmallStructLimit = 353 Options.getRegionStoreSmallStructLimit(); 354 } 355 } 356 357 358 /// setImplicitDefaultValue - Set the default binding for the provided 359 /// MemRegion to the value implicitly defined for compound literals when 360 /// the value is not specified. 361 RegionBindingsRef setImplicitDefaultValue(RegionBindingsConstRef B, 362 const MemRegion *R, QualType T); 363 364 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 365 /// type. 'Array' represents the lvalue of the array being decayed 366 /// to a pointer, and the returned SVal represents the decayed 367 /// version of that lvalue (i.e., a pointer to the first element of 368 /// the array). This is called by ExprEngine when evaluating 369 /// casts from arrays to pointers. 370 SVal ArrayToPointer(Loc Array, QualType ElementTy) override; 371 372 StoreRef getInitialStore(const LocationContext *InitLoc) override { 373 return StoreRef(RBFactory.getEmptyMap().getRootWithoutRetain(), *this); 374 } 375 376 //===-------------------------------------------------------------------===// 377 // Binding values to regions. 378 //===-------------------------------------------------------------------===// 379 RegionBindingsRef invalidateGlobalRegion(MemRegion::Kind K, 380 const Expr *Ex, 381 unsigned Count, 382 const LocationContext *LCtx, 383 RegionBindingsRef B, 384 InvalidatedRegions *Invalidated); 385 386 StoreRef invalidateRegions(Store store, 387 ArrayRef<SVal> Values, 388 const Expr *E, unsigned Count, 389 const LocationContext *LCtx, 390 const CallEvent *Call, 391 InvalidatedSymbols &IS, 392 RegionAndSymbolInvalidationTraits &ITraits, 393 InvalidatedRegions *Invalidated, 394 InvalidatedRegions *InvalidatedTopLevel) override; 395 396 bool scanReachableSymbols(Store S, const MemRegion *R, 397 ScanReachableSymbols &Callbacks) override; 398 399 RegionBindingsRef removeSubRegionBindings(RegionBindingsConstRef B, 400 const SubRegion *R); 401 402 public: // Part of public interface to class. 403 404 StoreRef Bind(Store store, Loc LV, SVal V) override { 405 return StoreRef(bind(getRegionBindings(store), LV, V).asStore(), *this); 406 } 407 408 RegionBindingsRef bind(RegionBindingsConstRef B, Loc LV, SVal V); 409 410 // BindDefaultInitial is only used to initialize a region with 411 // a default value. 412 StoreRef BindDefaultInitial(Store store, const MemRegion *R, 413 SVal V) override { 414 RegionBindingsRef B = getRegionBindings(store); 415 // Use other APIs when you have to wipe the region that was initialized 416 // earlier. 417 assert(!(B.getDefaultBinding(R) || B.getDirectBinding(R)) && 418 "Double initialization!"); 419 B = B.addBinding(BindingKey::Make(R, BindingKey::Default), V); 420 return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this); 421 } 422 423 // BindDefaultZero is used for zeroing constructors that may accidentally 424 // overwrite existing bindings. 425 StoreRef BindDefaultZero(Store store, const MemRegion *R) override { 426 // FIXME: The offsets of empty bases can be tricky because of 427 // of the so called "empty base class optimization". 428 // If a base class has been optimized out 429 // we should not try to create a binding, otherwise we should. 430 // Unfortunately, at the moment ASTRecordLayout doesn't expose 431 // the actual sizes of the empty bases 432 // and trying to infer them from offsets/alignments 433 // seems to be error-prone and non-trivial because of the trailing padding. 434 // As a temporary mitigation we don't create bindings for empty bases. 435 if (const auto *BR = dyn_cast<CXXBaseObjectRegion>(R)) 436 if (BR->getDecl()->isEmpty()) 437 return StoreRef(store, *this); 438 439 RegionBindingsRef B = getRegionBindings(store); 440 SVal V = svalBuilder.makeZeroVal(Ctx.CharTy); 441 B = removeSubRegionBindings(B, cast<SubRegion>(R)); 442 B = B.addBinding(BindingKey::Make(R, BindingKey::Default), V); 443 return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this); 444 } 445 446 /// Attempt to extract the fields of \p LCV and bind them to the struct region 447 /// \p R. 448 /// 449 /// This path is used when it seems advantageous to "force" loading the values 450 /// within a LazyCompoundVal to bind memberwise to the struct region, rather 451 /// than using a Default binding at the base of the entire region. This is a 452 /// heuristic attempting to avoid building long chains of LazyCompoundVals. 453 /// 454 /// \returns The updated store bindings, or \c None if binding non-lazily 455 /// would be too expensive. 456 Optional<RegionBindingsRef> tryBindSmallStruct(RegionBindingsConstRef B, 457 const TypedValueRegion *R, 458 const RecordDecl *RD, 459 nonloc::LazyCompoundVal LCV); 460 461 /// BindStruct - Bind a compound value to a structure. 462 RegionBindingsRef bindStruct(RegionBindingsConstRef B, 463 const TypedValueRegion* R, SVal V); 464 465 /// BindVector - Bind a compound value to a vector. 466 RegionBindingsRef bindVector(RegionBindingsConstRef B, 467 const TypedValueRegion* R, SVal V); 468 469 RegionBindingsRef bindArray(RegionBindingsConstRef B, 470 const TypedValueRegion* R, 471 SVal V); 472 473 /// Clears out all bindings in the given region and assigns a new value 474 /// as a Default binding. 475 RegionBindingsRef bindAggregate(RegionBindingsConstRef B, 476 const TypedRegion *R, 477 SVal DefaultVal); 478 479 /// Create a new store with the specified binding removed. 480 /// \param ST the original store, that is the basis for the new store. 481 /// \param L the location whose binding should be removed. 482 StoreRef killBinding(Store ST, Loc L) override; 483 484 void incrementReferenceCount(Store store) override { 485 getRegionBindings(store).manualRetain(); 486 } 487 488 /// If the StoreManager supports it, decrement the reference count of 489 /// the specified Store object. If the reference count hits 0, the memory 490 /// associated with the object is recycled. 491 void decrementReferenceCount(Store store) override { 492 getRegionBindings(store).manualRelease(); 493 } 494 495 bool includedInBindings(Store store, const MemRegion *region) const override; 496 497 /// Return the value bound to specified location in a given state. 498 /// 499 /// The high level logic for this method is this: 500 /// getBinding (L) 501 /// if L has binding 502 /// return L's binding 503 /// else if L is in killset 504 /// return unknown 505 /// else 506 /// if L is on stack or heap 507 /// return undefined 508 /// else 509 /// return symbolic 510 SVal getBinding(Store S, Loc L, QualType T) override { 511 return getBinding(getRegionBindings(S), L, T); 512 } 513 514 Optional<SVal> getDefaultBinding(Store S, const MemRegion *R) override { 515 RegionBindingsRef B = getRegionBindings(S); 516 // Default bindings are always applied over a base region so look up the 517 // base region's default binding, otherwise the lookup will fail when R 518 // is at an offset from R->getBaseRegion(). 519 return B.getDefaultBinding(R->getBaseRegion()); 520 } 521 522 SVal getBinding(RegionBindingsConstRef B, Loc L, QualType T = QualType()); 523 524 SVal getBindingForElement(RegionBindingsConstRef B, const ElementRegion *R); 525 526 SVal getBindingForField(RegionBindingsConstRef B, const FieldRegion *R); 527 528 SVal getBindingForObjCIvar(RegionBindingsConstRef B, const ObjCIvarRegion *R); 529 530 SVal getBindingForVar(RegionBindingsConstRef B, const VarRegion *R); 531 532 SVal getBindingForLazySymbol(const TypedValueRegion *R); 533 534 SVal getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 535 const TypedValueRegion *R, 536 QualType Ty); 537 538 SVal getLazyBinding(const SubRegion *LazyBindingRegion, 539 RegionBindingsRef LazyBinding); 540 541 /// Get bindings for the values in a struct and return a CompoundVal, used 542 /// when doing struct copy: 543 /// struct s x, y; 544 /// x = y; 545 /// y's value is retrieved by this method. 546 SVal getBindingForStruct(RegionBindingsConstRef B, const TypedValueRegion *R); 547 SVal getBindingForArray(RegionBindingsConstRef B, const TypedValueRegion *R); 548 NonLoc createLazyBinding(RegionBindingsConstRef B, const TypedValueRegion *R); 549 550 /// Used to lazily generate derived symbols for bindings that are defined 551 /// implicitly by default bindings in a super region. 552 /// 553 /// Note that callers may need to specially handle LazyCompoundVals, which 554 /// are returned as is in case the caller needs to treat them differently. 555 Optional<SVal> getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 556 const MemRegion *superR, 557 const TypedValueRegion *R, 558 QualType Ty); 559 560 /// Get the state and region whose binding this region \p R corresponds to. 561 /// 562 /// If there is no lazy binding for \p R, the returned value will have a null 563 /// \c second. Note that a null pointer can represents a valid Store. 564 std::pair<Store, const SubRegion *> 565 findLazyBinding(RegionBindingsConstRef B, const SubRegion *R, 566 const SubRegion *originalRegion); 567 568 /// Returns the cached set of interesting SVals contained within a lazy 569 /// binding. 570 /// 571 /// The precise value of "interesting" is determined for the purposes of 572 /// RegionStore's internal analysis. It must always contain all regions and 573 /// symbols, but may omit constants and other kinds of SVal. 574 const SValListTy &getInterestingValues(nonloc::LazyCompoundVal LCV); 575 576 //===------------------------------------------------------------------===// 577 // State pruning. 578 //===------------------------------------------------------------------===// 579 580 /// removeDeadBindings - Scans the RegionStore of 'state' for dead values. 581 /// It returns a new Store with these values removed. 582 StoreRef removeDeadBindings(Store store, const StackFrameContext *LCtx, 583 SymbolReaper& SymReaper) override; 584 585 //===------------------------------------------------------------------===// 586 // Region "extents". 587 //===------------------------------------------------------------------===// 588 589 // FIXME: This method will soon be eliminated; see the note in Store.h. 590 DefinedOrUnknownSVal getSizeInElements(ProgramStateRef state, 591 const MemRegion* R, 592 QualType EleTy) override; 593 594 //===------------------------------------------------------------------===// 595 // Utility methods. 596 //===------------------------------------------------------------------===// 597 598 RegionBindingsRef getRegionBindings(Store store) const { 599 return RegionBindingsRef(CBFactory, 600 static_cast<const RegionBindings::TreeTy*>(store), 601 RBFactory.getTreeFactory()); 602 } 603 604 void print(Store store, raw_ostream &Out, const char* nl) override; 605 606 void iterBindings(Store store, BindingsHandler& f) override { 607 RegionBindingsRef B = getRegionBindings(store); 608 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 609 const ClusterBindings &Cluster = I.getData(); 610 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 611 CI != CE; ++CI) { 612 const BindingKey &K = CI.getKey(); 613 if (!K.isDirect()) 614 continue; 615 if (const SubRegion *R = dyn_cast<SubRegion>(K.getRegion())) { 616 // FIXME: Possibly incorporate the offset? 617 if (!f.HandleBinding(*this, store, R, CI.getData())) 618 return; 619 } 620 } 621 } 622 } 623 }; 624 625 } // end anonymous namespace 626 627 //===----------------------------------------------------------------------===// 628 // RegionStore creation. 629 //===----------------------------------------------------------------------===// 630 631 std::unique_ptr<StoreManager> 632 ento::CreateRegionStoreManager(ProgramStateManager &StMgr) { 633 RegionStoreFeatures F = maximal_features_tag(); 634 return llvm::make_unique<RegionStoreManager>(StMgr, F); 635 } 636 637 std::unique_ptr<StoreManager> 638 ento::CreateFieldsOnlyRegionStoreManager(ProgramStateManager &StMgr) { 639 RegionStoreFeatures F = minimal_features_tag(); 640 F.enableFields(true); 641 return llvm::make_unique<RegionStoreManager>(StMgr, F); 642 } 643 644 645 //===----------------------------------------------------------------------===// 646 // Region Cluster analysis. 647 //===----------------------------------------------------------------------===// 648 649 namespace { 650 /// Used to determine which global regions are automatically included in the 651 /// initial worklist of a ClusterAnalysis. 652 enum GlobalsFilterKind { 653 /// Don't include any global regions. 654 GFK_None, 655 /// Only include system globals. 656 GFK_SystemOnly, 657 /// Include all global regions. 658 GFK_All 659 }; 660 661 template <typename DERIVED> 662 class ClusterAnalysis { 663 protected: 664 typedef llvm::DenseMap<const MemRegion *, const ClusterBindings *> ClusterMap; 665 typedef const MemRegion * WorkListElement; 666 typedef SmallVector<WorkListElement, 10> WorkList; 667 668 llvm::SmallPtrSet<const ClusterBindings *, 16> Visited; 669 670 WorkList WL; 671 672 RegionStoreManager &RM; 673 ASTContext &Ctx; 674 SValBuilder &svalBuilder; 675 676 RegionBindingsRef B; 677 678 679 protected: 680 const ClusterBindings *getCluster(const MemRegion *R) { 681 return B.lookup(R); 682 } 683 684 /// Returns true if all clusters in the given memspace should be initially 685 /// included in the cluster analysis. Subclasses may provide their 686 /// own implementation. 687 bool includeEntireMemorySpace(const MemRegion *Base) { 688 return false; 689 } 690 691 public: 692 ClusterAnalysis(RegionStoreManager &rm, ProgramStateManager &StateMgr, 693 RegionBindingsRef b) 694 : RM(rm), Ctx(StateMgr.getContext()), 695 svalBuilder(StateMgr.getSValBuilder()), B(std::move(b)) {} 696 697 RegionBindingsRef getRegionBindings() const { return B; } 698 699 bool isVisited(const MemRegion *R) { 700 return Visited.count(getCluster(R)); 701 } 702 703 void GenerateClusters() { 704 // Scan the entire set of bindings and record the region clusters. 705 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); 706 RI != RE; ++RI){ 707 const MemRegion *Base = RI.getKey(); 708 709 const ClusterBindings &Cluster = RI.getData(); 710 assert(!Cluster.isEmpty() && "Empty clusters should be removed"); 711 static_cast<DERIVED*>(this)->VisitAddedToCluster(Base, Cluster); 712 713 // If the base's memspace should be entirely invalidated, add the cluster 714 // to the workspace up front. 715 if (static_cast<DERIVED*>(this)->includeEntireMemorySpace(Base)) 716 AddToWorkList(WorkListElement(Base), &Cluster); 717 } 718 } 719 720 bool AddToWorkList(WorkListElement E, const ClusterBindings *C) { 721 if (C && !Visited.insert(C).second) 722 return false; 723 WL.push_back(E); 724 return true; 725 } 726 727 bool AddToWorkList(const MemRegion *R) { 728 return static_cast<DERIVED*>(this)->AddToWorkList(R); 729 } 730 731 void RunWorkList() { 732 while (!WL.empty()) { 733 WorkListElement E = WL.pop_back_val(); 734 const MemRegion *BaseR = E; 735 736 static_cast<DERIVED*>(this)->VisitCluster(BaseR, getCluster(BaseR)); 737 } 738 } 739 740 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C) {} 741 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C) {} 742 743 void VisitCluster(const MemRegion *BaseR, const ClusterBindings *C, 744 bool Flag) { 745 static_cast<DERIVED*>(this)->VisitCluster(BaseR, C); 746 } 747 }; 748 } 749 750 //===----------------------------------------------------------------------===// 751 // Binding invalidation. 752 //===----------------------------------------------------------------------===// 753 754 bool RegionStoreManager::scanReachableSymbols(Store S, const MemRegion *R, 755 ScanReachableSymbols &Callbacks) { 756 assert(R == R->getBaseRegion() && "Should only be called for base regions"); 757 RegionBindingsRef B = getRegionBindings(S); 758 const ClusterBindings *Cluster = B.lookup(R); 759 760 if (!Cluster) 761 return true; 762 763 for (ClusterBindings::iterator RI = Cluster->begin(), RE = Cluster->end(); 764 RI != RE; ++RI) { 765 if (!Callbacks.scan(RI.getData())) 766 return false; 767 } 768 769 return true; 770 } 771 772 static inline bool isUnionField(const FieldRegion *FR) { 773 return FR->getDecl()->getParent()->isUnion(); 774 } 775 776 typedef SmallVector<const FieldDecl *, 8> FieldVector; 777 778 static void getSymbolicOffsetFields(BindingKey K, FieldVector &Fields) { 779 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 780 781 const MemRegion *Base = K.getConcreteOffsetRegion(); 782 const MemRegion *R = K.getRegion(); 783 784 while (R != Base) { 785 if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) 786 if (!isUnionField(FR)) 787 Fields.push_back(FR->getDecl()); 788 789 R = cast<SubRegion>(R)->getSuperRegion(); 790 } 791 } 792 793 static bool isCompatibleWithFields(BindingKey K, const FieldVector &Fields) { 794 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 795 796 if (Fields.empty()) 797 return true; 798 799 FieldVector FieldsInBindingKey; 800 getSymbolicOffsetFields(K, FieldsInBindingKey); 801 802 ptrdiff_t Delta = FieldsInBindingKey.size() - Fields.size(); 803 if (Delta >= 0) 804 return std::equal(FieldsInBindingKey.begin() + Delta, 805 FieldsInBindingKey.end(), 806 Fields.begin()); 807 else 808 return std::equal(FieldsInBindingKey.begin(), FieldsInBindingKey.end(), 809 Fields.begin() - Delta); 810 } 811 812 /// Collects all bindings in \p Cluster that may refer to bindings within 813 /// \p Top. 814 /// 815 /// Each binding is a pair whose \c first is the key (a BindingKey) and whose 816 /// \c second is the value (an SVal). 817 /// 818 /// The \p IncludeAllDefaultBindings parameter specifies whether to include 819 /// default bindings that may extend beyond \p Top itself, e.g. if \p Top is 820 /// an aggregate within a larger aggregate with a default binding. 821 static void 822 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 823 SValBuilder &SVB, const ClusterBindings &Cluster, 824 const SubRegion *Top, BindingKey TopKey, 825 bool IncludeAllDefaultBindings) { 826 FieldVector FieldsInSymbolicSubregions; 827 if (TopKey.hasSymbolicOffset()) { 828 getSymbolicOffsetFields(TopKey, FieldsInSymbolicSubregions); 829 Top = TopKey.getConcreteOffsetRegion(); 830 TopKey = BindingKey::Make(Top, BindingKey::Default); 831 } 832 833 // Find the length (in bits) of the region being invalidated. 834 uint64_t Length = UINT64_MAX; 835 SVal Extent = Top->getExtent(SVB); 836 if (Optional<nonloc::ConcreteInt> ExtentCI = 837 Extent.getAs<nonloc::ConcreteInt>()) { 838 const llvm::APSInt &ExtentInt = ExtentCI->getValue(); 839 assert(ExtentInt.isNonNegative() || ExtentInt.isUnsigned()); 840 // Extents are in bytes but region offsets are in bits. Be careful! 841 Length = ExtentInt.getLimitedValue() * SVB.getContext().getCharWidth(); 842 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(Top)) { 843 if (FR->getDecl()->isBitField()) 844 Length = FR->getDecl()->getBitWidthValue(SVB.getContext()); 845 } 846 847 for (ClusterBindings::iterator I = Cluster.begin(), E = Cluster.end(); 848 I != E; ++I) { 849 BindingKey NextKey = I.getKey(); 850 if (NextKey.getRegion() == TopKey.getRegion()) { 851 // FIXME: This doesn't catch the case where we're really invalidating a 852 // region with a symbolic offset. Example: 853 // R: points[i].y 854 // Next: points[0].x 855 856 if (NextKey.getOffset() > TopKey.getOffset() && 857 NextKey.getOffset() - TopKey.getOffset() < Length) { 858 // Case 1: The next binding is inside the region we're invalidating. 859 // Include it. 860 Bindings.push_back(*I); 861 862 } else if (NextKey.getOffset() == TopKey.getOffset()) { 863 // Case 2: The next binding is at the same offset as the region we're 864 // invalidating. In this case, we need to leave default bindings alone, 865 // since they may be providing a default value for a regions beyond what 866 // we're invalidating. 867 // FIXME: This is probably incorrect; consider invalidating an outer 868 // struct whose first field is bound to a LazyCompoundVal. 869 if (IncludeAllDefaultBindings || NextKey.isDirect()) 870 Bindings.push_back(*I); 871 } 872 873 } else if (NextKey.hasSymbolicOffset()) { 874 const MemRegion *Base = NextKey.getConcreteOffsetRegion(); 875 if (Top->isSubRegionOf(Base) && Top != Base) { 876 // Case 3: The next key is symbolic and we just changed something within 877 // its concrete region. We don't know if the binding is still valid, so 878 // we'll be conservative and include it. 879 if (IncludeAllDefaultBindings || NextKey.isDirect()) 880 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 881 Bindings.push_back(*I); 882 } else if (const SubRegion *BaseSR = dyn_cast<SubRegion>(Base)) { 883 // Case 4: The next key is symbolic, but we changed a known 884 // super-region. In this case the binding is certainly included. 885 if (BaseSR->isSubRegionOf(Top)) 886 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 887 Bindings.push_back(*I); 888 } 889 } 890 } 891 } 892 893 static void 894 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 895 SValBuilder &SVB, const ClusterBindings &Cluster, 896 const SubRegion *Top, bool IncludeAllDefaultBindings) { 897 collectSubRegionBindings(Bindings, SVB, Cluster, Top, 898 BindingKey::Make(Top, BindingKey::Default), 899 IncludeAllDefaultBindings); 900 } 901 902 RegionBindingsRef 903 RegionStoreManager::removeSubRegionBindings(RegionBindingsConstRef B, 904 const SubRegion *Top) { 905 BindingKey TopKey = BindingKey::Make(Top, BindingKey::Default); 906 const MemRegion *ClusterHead = TopKey.getBaseRegion(); 907 908 if (Top == ClusterHead) { 909 // We can remove an entire cluster's bindings all in one go. 910 return B.remove(Top); 911 } 912 913 const ClusterBindings *Cluster = B.lookup(ClusterHead); 914 if (!Cluster) { 915 // If we're invalidating a region with a symbolic offset, we need to make 916 // sure we don't treat the base region as uninitialized anymore. 917 if (TopKey.hasSymbolicOffset()) { 918 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 919 return B.addBinding(Concrete, BindingKey::Default, UnknownVal()); 920 } 921 return B; 922 } 923 924 SmallVector<BindingPair, 32> Bindings; 925 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, Top, TopKey, 926 /*IncludeAllDefaultBindings=*/false); 927 928 ClusterBindingsRef Result(*Cluster, CBFactory); 929 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 930 E = Bindings.end(); 931 I != E; ++I) 932 Result = Result.remove(I->first); 933 934 // If we're invalidating a region with a symbolic offset, we need to make sure 935 // we don't treat the base region as uninitialized anymore. 936 // FIXME: This isn't very precise; see the example in 937 // collectSubRegionBindings. 938 if (TopKey.hasSymbolicOffset()) { 939 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 940 Result = Result.add(BindingKey::Make(Concrete, BindingKey::Default), 941 UnknownVal()); 942 } 943 944 if (Result.isEmpty()) 945 return B.remove(ClusterHead); 946 return B.add(ClusterHead, Result.asImmutableMap()); 947 } 948 949 namespace { 950 class InvalidateRegionsWorker : public ClusterAnalysis<InvalidateRegionsWorker> 951 { 952 const Expr *Ex; 953 unsigned Count; 954 const LocationContext *LCtx; 955 InvalidatedSymbols &IS; 956 RegionAndSymbolInvalidationTraits &ITraits; 957 StoreManager::InvalidatedRegions *Regions; 958 GlobalsFilterKind GlobalsFilter; 959 public: 960 InvalidateRegionsWorker(RegionStoreManager &rm, 961 ProgramStateManager &stateMgr, 962 RegionBindingsRef b, 963 const Expr *ex, unsigned count, 964 const LocationContext *lctx, 965 InvalidatedSymbols &is, 966 RegionAndSymbolInvalidationTraits &ITraitsIn, 967 StoreManager::InvalidatedRegions *r, 968 GlobalsFilterKind GFK) 969 : ClusterAnalysis<InvalidateRegionsWorker>(rm, stateMgr, b), 970 Ex(ex), Count(count), LCtx(lctx), IS(is), ITraits(ITraitsIn), Regions(r), 971 GlobalsFilter(GFK) {} 972 973 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 974 void VisitBinding(SVal V); 975 976 using ClusterAnalysis::AddToWorkList; 977 978 bool AddToWorkList(const MemRegion *R); 979 980 /// Returns true if all clusters in the memory space for \p Base should be 981 /// be invalidated. 982 bool includeEntireMemorySpace(const MemRegion *Base); 983 984 /// Returns true if the memory space of the given region is one of the global 985 /// regions specially included at the start of invalidation. 986 bool isInitiallyIncludedGlobalRegion(const MemRegion *R); 987 }; 988 } 989 990 bool InvalidateRegionsWorker::AddToWorkList(const MemRegion *R) { 991 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 992 R, RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 993 const MemRegion *BaseR = doNotInvalidateSuperRegion ? R : R->getBaseRegion(); 994 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 995 } 996 997 void InvalidateRegionsWorker::VisitBinding(SVal V) { 998 // A symbol? Mark it touched by the invalidation. 999 if (SymbolRef Sym = V.getAsSymbol()) 1000 IS.insert(Sym); 1001 1002 if (const MemRegion *R = V.getAsRegion()) { 1003 AddToWorkList(R); 1004 return; 1005 } 1006 1007 // Is it a LazyCompoundVal? All references get invalidated as well. 1008 if (Optional<nonloc::LazyCompoundVal> LCS = 1009 V.getAs<nonloc::LazyCompoundVal>()) { 1010 1011 const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS); 1012 1013 for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(), 1014 E = Vals.end(); 1015 I != E; ++I) 1016 VisitBinding(*I); 1017 1018 return; 1019 } 1020 } 1021 1022 void InvalidateRegionsWorker::VisitCluster(const MemRegion *baseR, 1023 const ClusterBindings *C) { 1024 1025 bool PreserveRegionsContents = 1026 ITraits.hasTrait(baseR, 1027 RegionAndSymbolInvalidationTraits::TK_PreserveContents); 1028 1029 if (C) { 1030 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) 1031 VisitBinding(I.getData()); 1032 1033 // Invalidate regions contents. 1034 if (!PreserveRegionsContents) 1035 B = B.remove(baseR); 1036 } 1037 1038 if (const auto *TO = dyn_cast<TypedValueRegion>(baseR)) { 1039 if (const auto *RD = TO->getValueType()->getAsCXXRecordDecl()) { 1040 1041 // Lambdas can affect all static local variables without explicitly 1042 // capturing those. 1043 // We invalidate all static locals referenced inside the lambda body. 1044 if (RD->isLambda() && RD->getLambdaCallOperator()->getBody()) { 1045 using namespace ast_matchers; 1046 1047 const char *DeclBind = "DeclBind"; 1048 StatementMatcher RefToStatic = stmt(hasDescendant(declRefExpr( 1049 to(varDecl(hasStaticStorageDuration()).bind(DeclBind))))); 1050 auto Matches = 1051 match(RefToStatic, *RD->getLambdaCallOperator()->getBody(), 1052 RD->getASTContext()); 1053 1054 for (BoundNodes &Match : Matches) { 1055 auto *VD = Match.getNodeAs<VarDecl>(DeclBind); 1056 const VarRegion *ToInvalidate = 1057 RM.getRegionManager().getVarRegion(VD, LCtx); 1058 AddToWorkList(ToInvalidate); 1059 } 1060 } 1061 } 1062 } 1063 1064 // BlockDataRegion? If so, invalidate captured variables that are passed 1065 // by reference. 1066 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(baseR)) { 1067 for (BlockDataRegion::referenced_vars_iterator 1068 BI = BR->referenced_vars_begin(), BE = BR->referenced_vars_end() ; 1069 BI != BE; ++BI) { 1070 const VarRegion *VR = BI.getCapturedRegion(); 1071 const VarDecl *VD = VR->getDecl(); 1072 if (VD->hasAttr<BlocksAttr>() || !VD->hasLocalStorage()) { 1073 AddToWorkList(VR); 1074 } 1075 else if (Loc::isLocType(VR->getValueType())) { 1076 // Map the current bindings to a Store to retrieve the value 1077 // of the binding. If that binding itself is a region, we should 1078 // invalidate that region. This is because a block may capture 1079 // a pointer value, but the thing pointed by that pointer may 1080 // get invalidated. 1081 SVal V = RM.getBinding(B, loc::MemRegionVal(VR)); 1082 if (Optional<Loc> L = V.getAs<Loc>()) { 1083 if (const MemRegion *LR = L->getAsRegion()) 1084 AddToWorkList(LR); 1085 } 1086 } 1087 } 1088 return; 1089 } 1090 1091 // Symbolic region? 1092 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) 1093 IS.insert(SR->getSymbol()); 1094 1095 // Nothing else should be done in the case when we preserve regions context. 1096 if (PreserveRegionsContents) 1097 return; 1098 1099 // Otherwise, we have a normal data region. Record that we touched the region. 1100 if (Regions) 1101 Regions->push_back(baseR); 1102 1103 if (isa<AllocaRegion>(baseR) || isa<SymbolicRegion>(baseR)) { 1104 // Invalidate the region by setting its default value to 1105 // conjured symbol. The type of the symbol is irrelevant. 1106 DefinedOrUnknownSVal V = 1107 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, Ctx.IntTy, Count); 1108 B = B.addBinding(baseR, BindingKey::Default, V); 1109 return; 1110 } 1111 1112 if (!baseR->isBoundable()) 1113 return; 1114 1115 const TypedValueRegion *TR = cast<TypedValueRegion>(baseR); 1116 QualType T = TR->getValueType(); 1117 1118 if (isInitiallyIncludedGlobalRegion(baseR)) { 1119 // If the region is a global and we are invalidating all globals, 1120 // erasing the entry is good enough. This causes all globals to be lazily 1121 // symbolicated from the same base symbol. 1122 return; 1123 } 1124 1125 if (T->isRecordType()) { 1126 // Invalidate the region by setting its default value to 1127 // conjured symbol. The type of the symbol is irrelevant. 1128 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1129 Ctx.IntTy, Count); 1130 B = B.addBinding(baseR, BindingKey::Default, V); 1131 return; 1132 } 1133 1134 if (const ArrayType *AT = Ctx.getAsArrayType(T)) { 1135 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 1136 baseR, 1137 RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 1138 1139 if (doNotInvalidateSuperRegion) { 1140 // We are not doing blank invalidation of the whole array region so we 1141 // have to manually invalidate each elements. 1142 Optional<uint64_t> NumElements; 1143 1144 // Compute lower and upper offsets for region within array. 1145 if (const ConstantArrayType *CAT = dyn_cast<ConstantArrayType>(AT)) 1146 NumElements = CAT->getSize().getZExtValue(); 1147 if (!NumElements) // We are not dealing with a constant size array 1148 goto conjure_default; 1149 QualType ElementTy = AT->getElementType(); 1150 uint64_t ElemSize = Ctx.getTypeSize(ElementTy); 1151 const RegionOffset &RO = baseR->getAsOffset(); 1152 const MemRegion *SuperR = baseR->getBaseRegion(); 1153 if (RO.hasSymbolicOffset()) { 1154 // If base region has a symbolic offset, 1155 // we revert to invalidating the super region. 1156 if (SuperR) 1157 AddToWorkList(SuperR); 1158 goto conjure_default; 1159 } 1160 1161 uint64_t LowerOffset = RO.getOffset(); 1162 uint64_t UpperOffset = LowerOffset + *NumElements * ElemSize; 1163 bool UpperOverflow = UpperOffset < LowerOffset; 1164 1165 // Invalidate regions which are within array boundaries, 1166 // or have a symbolic offset. 1167 if (!SuperR) 1168 goto conjure_default; 1169 1170 const ClusterBindings *C = B.lookup(SuperR); 1171 if (!C) 1172 goto conjure_default; 1173 1174 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; 1175 ++I) { 1176 const BindingKey &BK = I.getKey(); 1177 Optional<uint64_t> ROffset = 1178 BK.hasSymbolicOffset() ? Optional<uint64_t>() : BK.getOffset(); 1179 1180 // Check offset is not symbolic and within array's boundaries. 1181 // Handles arrays of 0 elements and of 0-sized elements as well. 1182 if (!ROffset || 1183 ((*ROffset >= LowerOffset && *ROffset < UpperOffset) || 1184 (UpperOverflow && 1185 (*ROffset >= LowerOffset || *ROffset < UpperOffset)) || 1186 (LowerOffset == UpperOffset && *ROffset == LowerOffset))) { 1187 B = B.removeBinding(I.getKey()); 1188 // Bound symbolic regions need to be invalidated for dead symbol 1189 // detection. 1190 SVal V = I.getData(); 1191 const MemRegion *R = V.getAsRegion(); 1192 if (R && isa<SymbolicRegion>(R)) 1193 VisitBinding(V); 1194 } 1195 } 1196 } 1197 conjure_default: 1198 // Set the default value of the array to conjured symbol. 1199 DefinedOrUnknownSVal V = 1200 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1201 AT->getElementType(), Count); 1202 B = B.addBinding(baseR, BindingKey::Default, V); 1203 return; 1204 } 1205 1206 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1207 T,Count); 1208 assert(SymbolManager::canSymbolicate(T) || V.isUnknown()); 1209 B = B.addBinding(baseR, BindingKey::Direct, V); 1210 } 1211 1212 bool InvalidateRegionsWorker::isInitiallyIncludedGlobalRegion( 1213 const MemRegion *R) { 1214 switch (GlobalsFilter) { 1215 case GFK_None: 1216 return false; 1217 case GFK_SystemOnly: 1218 return isa<GlobalSystemSpaceRegion>(R->getMemorySpace()); 1219 case GFK_All: 1220 return isa<NonStaticGlobalSpaceRegion>(R->getMemorySpace()); 1221 } 1222 1223 llvm_unreachable("unknown globals filter"); 1224 } 1225 1226 bool InvalidateRegionsWorker::includeEntireMemorySpace(const MemRegion *Base) { 1227 if (isInitiallyIncludedGlobalRegion(Base)) 1228 return true; 1229 1230 const MemSpaceRegion *MemSpace = Base->getMemorySpace(); 1231 return ITraits.hasTrait(MemSpace, 1232 RegionAndSymbolInvalidationTraits::TK_EntireMemSpace); 1233 } 1234 1235 RegionBindingsRef 1236 RegionStoreManager::invalidateGlobalRegion(MemRegion::Kind K, 1237 const Expr *Ex, 1238 unsigned Count, 1239 const LocationContext *LCtx, 1240 RegionBindingsRef B, 1241 InvalidatedRegions *Invalidated) { 1242 // Bind the globals memory space to a new symbol that we will use to derive 1243 // the bindings for all globals. 1244 const GlobalsSpaceRegion *GS = MRMgr.getGlobalsRegion(K); 1245 SVal V = svalBuilder.conjureSymbolVal(/* SymbolTag = */ (const void*) GS, Ex, LCtx, 1246 /* type does not matter */ Ctx.IntTy, 1247 Count); 1248 1249 B = B.removeBinding(GS) 1250 .addBinding(BindingKey::Make(GS, BindingKey::Default), V); 1251 1252 // Even if there are no bindings in the global scope, we still need to 1253 // record that we touched it. 1254 if (Invalidated) 1255 Invalidated->push_back(GS); 1256 1257 return B; 1258 } 1259 1260 void RegionStoreManager::populateWorkList(InvalidateRegionsWorker &W, 1261 ArrayRef<SVal> Values, 1262 InvalidatedRegions *TopLevelRegions) { 1263 for (ArrayRef<SVal>::iterator I = Values.begin(), 1264 E = Values.end(); I != E; ++I) { 1265 SVal V = *I; 1266 if (Optional<nonloc::LazyCompoundVal> LCS = 1267 V.getAs<nonloc::LazyCompoundVal>()) { 1268 1269 const SValListTy &Vals = getInterestingValues(*LCS); 1270 1271 for (SValListTy::const_iterator I = Vals.begin(), 1272 E = Vals.end(); I != E; ++I) { 1273 // Note: the last argument is false here because these are 1274 // non-top-level regions. 1275 if (const MemRegion *R = (*I).getAsRegion()) 1276 W.AddToWorkList(R); 1277 } 1278 continue; 1279 } 1280 1281 if (const MemRegion *R = V.getAsRegion()) { 1282 if (TopLevelRegions) 1283 TopLevelRegions->push_back(R); 1284 W.AddToWorkList(R); 1285 continue; 1286 } 1287 } 1288 } 1289 1290 StoreRef 1291 RegionStoreManager::invalidateRegions(Store store, 1292 ArrayRef<SVal> Values, 1293 const Expr *Ex, unsigned Count, 1294 const LocationContext *LCtx, 1295 const CallEvent *Call, 1296 InvalidatedSymbols &IS, 1297 RegionAndSymbolInvalidationTraits &ITraits, 1298 InvalidatedRegions *TopLevelRegions, 1299 InvalidatedRegions *Invalidated) { 1300 GlobalsFilterKind GlobalsFilter; 1301 if (Call) { 1302 if (Call->isInSystemHeader()) 1303 GlobalsFilter = GFK_SystemOnly; 1304 else 1305 GlobalsFilter = GFK_All; 1306 } else { 1307 GlobalsFilter = GFK_None; 1308 } 1309 1310 RegionBindingsRef B = getRegionBindings(store); 1311 InvalidateRegionsWorker W(*this, StateMgr, B, Ex, Count, LCtx, IS, ITraits, 1312 Invalidated, GlobalsFilter); 1313 1314 // Scan the bindings and generate the clusters. 1315 W.GenerateClusters(); 1316 1317 // Add the regions to the worklist. 1318 populateWorkList(W, Values, TopLevelRegions); 1319 1320 W.RunWorkList(); 1321 1322 // Return the new bindings. 1323 B = W.getRegionBindings(); 1324 1325 // For calls, determine which global regions should be invalidated and 1326 // invalidate them. (Note that function-static and immutable globals are never 1327 // invalidated by this.) 1328 // TODO: This could possibly be more precise with modules. 1329 switch (GlobalsFilter) { 1330 case GFK_All: 1331 B = invalidateGlobalRegion(MemRegion::GlobalInternalSpaceRegionKind, 1332 Ex, Count, LCtx, B, Invalidated); 1333 LLVM_FALLTHROUGH; 1334 case GFK_SystemOnly: 1335 B = invalidateGlobalRegion(MemRegion::GlobalSystemSpaceRegionKind, 1336 Ex, Count, LCtx, B, Invalidated); 1337 LLVM_FALLTHROUGH; 1338 case GFK_None: 1339 break; 1340 } 1341 1342 return StoreRef(B.asStore(), *this); 1343 } 1344 1345 //===----------------------------------------------------------------------===// 1346 // Extents for regions. 1347 //===----------------------------------------------------------------------===// 1348 1349 DefinedOrUnknownSVal 1350 RegionStoreManager::getSizeInElements(ProgramStateRef state, 1351 const MemRegion *R, 1352 QualType EleTy) { 1353 SVal Size = cast<SubRegion>(R)->getExtent(svalBuilder); 1354 const llvm::APSInt *SizeInt = svalBuilder.getKnownValue(state, Size); 1355 if (!SizeInt) 1356 return UnknownVal(); 1357 1358 CharUnits RegionSize = CharUnits::fromQuantity(SizeInt->getSExtValue()); 1359 1360 if (Ctx.getAsVariableArrayType(EleTy)) { 1361 // FIXME: We need to track extra state to properly record the size 1362 // of VLAs. Returning UnknownVal here, however, is a stop-gap so that 1363 // we don't have a divide-by-zero below. 1364 return UnknownVal(); 1365 } 1366 1367 CharUnits EleSize = Ctx.getTypeSizeInChars(EleTy); 1368 1369 // If a variable is reinterpreted as a type that doesn't fit into a larger 1370 // type evenly, round it down. 1371 // This is a signed value, since it's used in arithmetic with signed indices. 1372 return svalBuilder.makeIntVal(RegionSize / EleSize, 1373 svalBuilder.getArrayIndexType()); 1374 } 1375 1376 //===----------------------------------------------------------------------===// 1377 // Location and region casting. 1378 //===----------------------------------------------------------------------===// 1379 1380 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 1381 /// type. 'Array' represents the lvalue of the array being decayed 1382 /// to a pointer, and the returned SVal represents the decayed 1383 /// version of that lvalue (i.e., a pointer to the first element of 1384 /// the array). This is called by ExprEngine when evaluating casts 1385 /// from arrays to pointers. 1386 SVal RegionStoreManager::ArrayToPointer(Loc Array, QualType T) { 1387 if (Array.getAs<loc::ConcreteInt>()) 1388 return Array; 1389 1390 if (!Array.getAs<loc::MemRegionVal>()) 1391 return UnknownVal(); 1392 1393 const SubRegion *R = 1394 cast<SubRegion>(Array.castAs<loc::MemRegionVal>().getRegion()); 1395 NonLoc ZeroIdx = svalBuilder.makeZeroArrayIndex(); 1396 return loc::MemRegionVal(MRMgr.getElementRegion(T, ZeroIdx, R, Ctx)); 1397 } 1398 1399 //===----------------------------------------------------------------------===// 1400 // Loading values from regions. 1401 //===----------------------------------------------------------------------===// 1402 1403 SVal RegionStoreManager::getBinding(RegionBindingsConstRef B, Loc L, QualType T) { 1404 assert(!L.getAs<UnknownVal>() && "location unknown"); 1405 assert(!L.getAs<UndefinedVal>() && "location undefined"); 1406 1407 // For access to concrete addresses, return UnknownVal. Checks 1408 // for null dereferences (and similar errors) are done by checkers, not 1409 // the Store. 1410 // FIXME: We can consider lazily symbolicating such memory, but we really 1411 // should defer this when we can reason easily about symbolicating arrays 1412 // of bytes. 1413 if (L.getAs<loc::ConcreteInt>()) { 1414 return UnknownVal(); 1415 } 1416 if (!L.getAs<loc::MemRegionVal>()) { 1417 return UnknownVal(); 1418 } 1419 1420 const MemRegion *MR = L.castAs<loc::MemRegionVal>().getRegion(); 1421 1422 if (isa<BlockDataRegion>(MR)) { 1423 return UnknownVal(); 1424 } 1425 1426 if (!isa<TypedValueRegion>(MR)) { 1427 if (T.isNull()) { 1428 if (const TypedRegion *TR = dyn_cast<TypedRegion>(MR)) 1429 T = TR->getLocationType()->getPointeeType(); 1430 else if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(MR)) 1431 T = SR->getSymbol()->getType()->getPointeeType(); 1432 } 1433 assert(!T.isNull() && "Unable to auto-detect binding type!"); 1434 assert(!T->isVoidType() && "Attempting to dereference a void pointer!"); 1435 MR = GetElementZeroRegion(cast<SubRegion>(MR), T); 1436 } else { 1437 T = cast<TypedValueRegion>(MR)->getValueType(); 1438 } 1439 1440 // FIXME: Perhaps this method should just take a 'const MemRegion*' argument 1441 // instead of 'Loc', and have the other Loc cases handled at a higher level. 1442 const TypedValueRegion *R = cast<TypedValueRegion>(MR); 1443 QualType RTy = R->getValueType(); 1444 1445 // FIXME: we do not yet model the parts of a complex type, so treat the 1446 // whole thing as "unknown". 1447 if (RTy->isAnyComplexType()) 1448 return UnknownVal(); 1449 1450 // FIXME: We should eventually handle funny addressing. e.g.: 1451 // 1452 // int x = ...; 1453 // int *p = &x; 1454 // char *q = (char*) p; 1455 // char c = *q; // returns the first byte of 'x'. 1456 // 1457 // Such funny addressing will occur due to layering of regions. 1458 if (RTy->isStructureOrClassType()) 1459 return getBindingForStruct(B, R); 1460 1461 // FIXME: Handle unions. 1462 if (RTy->isUnionType()) 1463 return createLazyBinding(B, R); 1464 1465 if (RTy->isArrayType()) { 1466 if (RTy->isConstantArrayType()) 1467 return getBindingForArray(B, R); 1468 else 1469 return UnknownVal(); 1470 } 1471 1472 // FIXME: handle Vector types. 1473 if (RTy->isVectorType()) 1474 return UnknownVal(); 1475 1476 if (const FieldRegion* FR = dyn_cast<FieldRegion>(R)) 1477 return CastRetrievedVal(getBindingForField(B, FR), FR, T); 1478 1479 if (const ElementRegion* ER = dyn_cast<ElementRegion>(R)) { 1480 // FIXME: Here we actually perform an implicit conversion from the loaded 1481 // value to the element type. Eventually we want to compose these values 1482 // more intelligently. For example, an 'element' can encompass multiple 1483 // bound regions (e.g., several bound bytes), or could be a subset of 1484 // a larger value. 1485 return CastRetrievedVal(getBindingForElement(B, ER), ER, T); 1486 } 1487 1488 if (const ObjCIvarRegion *IVR = dyn_cast<ObjCIvarRegion>(R)) { 1489 // FIXME: Here we actually perform an implicit conversion from the loaded 1490 // value to the ivar type. What we should model is stores to ivars 1491 // that blow past the extent of the ivar. If the address of the ivar is 1492 // reinterpretted, it is possible we stored a different value that could 1493 // fit within the ivar. Either we need to cast these when storing them 1494 // or reinterpret them lazily (as we do here). 1495 return CastRetrievedVal(getBindingForObjCIvar(B, IVR), IVR, T); 1496 } 1497 1498 if (const VarRegion *VR = dyn_cast<VarRegion>(R)) { 1499 // FIXME: Here we actually perform an implicit conversion from the loaded 1500 // value to the variable type. What we should model is stores to variables 1501 // that blow past the extent of the variable. If the address of the 1502 // variable is reinterpretted, it is possible we stored a different value 1503 // that could fit within the variable. Either we need to cast these when 1504 // storing them or reinterpret them lazily (as we do here). 1505 return CastRetrievedVal(getBindingForVar(B, VR), VR, T); 1506 } 1507 1508 const SVal *V = B.lookup(R, BindingKey::Direct); 1509 1510 // Check if the region has a binding. 1511 if (V) 1512 return *V; 1513 1514 // The location does not have a bound value. This means that it has 1515 // the value it had upon its creation and/or entry to the analyzed 1516 // function/method. These are either symbolic values or 'undefined'. 1517 if (R->hasStackNonParametersStorage()) { 1518 // All stack variables are considered to have undefined values 1519 // upon creation. All heap allocated blocks are considered to 1520 // have undefined values as well unless they are explicitly bound 1521 // to specific values. 1522 return UndefinedVal(); 1523 } 1524 1525 // All other values are symbolic. 1526 return svalBuilder.getRegionValueSymbolVal(R); 1527 } 1528 1529 static QualType getUnderlyingType(const SubRegion *R) { 1530 QualType RegionTy; 1531 if (const TypedValueRegion *TVR = dyn_cast<TypedValueRegion>(R)) 1532 RegionTy = TVR->getValueType(); 1533 1534 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) 1535 RegionTy = SR->getSymbol()->getType(); 1536 1537 return RegionTy; 1538 } 1539 1540 /// Checks to see if store \p B has a lazy binding for region \p R. 1541 /// 1542 /// If \p AllowSubregionBindings is \c false, a lazy binding will be rejected 1543 /// if there are additional bindings within \p R. 1544 /// 1545 /// Note that unlike RegionStoreManager::findLazyBinding, this will not search 1546 /// for lazy bindings for super-regions of \p R. 1547 static Optional<nonloc::LazyCompoundVal> 1548 getExistingLazyBinding(SValBuilder &SVB, RegionBindingsConstRef B, 1549 const SubRegion *R, bool AllowSubregionBindings) { 1550 Optional<SVal> V = B.getDefaultBinding(R); 1551 if (!V) 1552 return None; 1553 1554 Optional<nonloc::LazyCompoundVal> LCV = V->getAs<nonloc::LazyCompoundVal>(); 1555 if (!LCV) 1556 return None; 1557 1558 // If the LCV is for a subregion, the types might not match, and we shouldn't 1559 // reuse the binding. 1560 QualType RegionTy = getUnderlyingType(R); 1561 if (!RegionTy.isNull() && 1562 !RegionTy->isVoidPointerType()) { 1563 QualType SourceRegionTy = LCV->getRegion()->getValueType(); 1564 if (!SVB.getContext().hasSameUnqualifiedType(RegionTy, SourceRegionTy)) 1565 return None; 1566 } 1567 1568 if (!AllowSubregionBindings) { 1569 // If there are any other bindings within this region, we shouldn't reuse 1570 // the top-level binding. 1571 SmallVector<BindingPair, 16> Bindings; 1572 collectSubRegionBindings(Bindings, SVB, *B.lookup(R->getBaseRegion()), R, 1573 /*IncludeAllDefaultBindings=*/true); 1574 if (Bindings.size() > 1) 1575 return None; 1576 } 1577 1578 return *LCV; 1579 } 1580 1581 1582 std::pair<Store, const SubRegion *> 1583 RegionStoreManager::findLazyBinding(RegionBindingsConstRef B, 1584 const SubRegion *R, 1585 const SubRegion *originalRegion) { 1586 if (originalRegion != R) { 1587 if (Optional<nonloc::LazyCompoundVal> V = 1588 getExistingLazyBinding(svalBuilder, B, R, true)) 1589 return std::make_pair(V->getStore(), V->getRegion()); 1590 } 1591 1592 typedef std::pair<Store, const SubRegion *> StoreRegionPair; 1593 StoreRegionPair Result = StoreRegionPair(); 1594 1595 if (const ElementRegion *ER = dyn_cast<ElementRegion>(R)) { 1596 Result = findLazyBinding(B, cast<SubRegion>(ER->getSuperRegion()), 1597 originalRegion); 1598 1599 if (Result.second) 1600 Result.second = MRMgr.getElementRegionWithSuper(ER, Result.second); 1601 1602 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) { 1603 Result = findLazyBinding(B, cast<SubRegion>(FR->getSuperRegion()), 1604 originalRegion); 1605 1606 if (Result.second) 1607 Result.second = MRMgr.getFieldRegionWithSuper(FR, Result.second); 1608 1609 } else if (const CXXBaseObjectRegion *BaseReg = 1610 dyn_cast<CXXBaseObjectRegion>(R)) { 1611 // C++ base object region is another kind of region that we should blast 1612 // through to look for lazy compound value. It is like a field region. 1613 Result = findLazyBinding(B, cast<SubRegion>(BaseReg->getSuperRegion()), 1614 originalRegion); 1615 1616 if (Result.second) 1617 Result.second = MRMgr.getCXXBaseObjectRegionWithSuper(BaseReg, 1618 Result.second); 1619 } 1620 1621 return Result; 1622 } 1623 1624 SVal RegionStoreManager::getBindingForElement(RegionBindingsConstRef B, 1625 const ElementRegion* R) { 1626 // We do not currently model bindings of the CompoundLiteralregion. 1627 if (isa<CompoundLiteralRegion>(R->getBaseRegion())) 1628 return UnknownVal(); 1629 1630 // Check if the region has a binding. 1631 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1632 return *V; 1633 1634 const MemRegion* superR = R->getSuperRegion(); 1635 1636 // Check if the region is an element region of a string literal. 1637 if (const StringRegion *StrR = dyn_cast<StringRegion>(superR)) { 1638 // FIXME: Handle loads from strings where the literal is treated as 1639 // an integer, e.g., *((unsigned int*)"hello") 1640 QualType T = Ctx.getAsArrayType(StrR->getValueType())->getElementType(); 1641 if (!Ctx.hasSameUnqualifiedType(T, R->getElementType())) 1642 return UnknownVal(); 1643 1644 const StringLiteral *Str = StrR->getStringLiteral(); 1645 SVal Idx = R->getIndex(); 1646 if (Optional<nonloc::ConcreteInt> CI = Idx.getAs<nonloc::ConcreteInt>()) { 1647 int64_t i = CI->getValue().getSExtValue(); 1648 // Abort on string underrun. This can be possible by arbitrary 1649 // clients of getBindingForElement(). 1650 if (i < 0) 1651 return UndefinedVal(); 1652 int64_t length = Str->getLength(); 1653 // Technically, only i == length is guaranteed to be null. 1654 // However, such overflows should be caught before reaching this point; 1655 // the only time such an access would be made is if a string literal was 1656 // used to initialize a larger array. 1657 char c = (i >= length) ? '\0' : Str->getCodeUnit(i); 1658 return svalBuilder.makeIntVal(c, T); 1659 } 1660 } else if (const VarRegion *VR = dyn_cast<VarRegion>(superR)) { 1661 // Check if the containing array is const and has an initialized value. 1662 const VarDecl *VD = VR->getDecl(); 1663 // Either the array or the array element has to be const. 1664 if (VD->getType().isConstQualified() || R->getElementType().isConstQualified()) { 1665 if (const Expr *Init = VD->getInit()) { 1666 if (const auto *InitList = dyn_cast<InitListExpr>(Init)) { 1667 // The array index has to be known. 1668 if (auto CI = R->getIndex().getAs<nonloc::ConcreteInt>()) { 1669 int64_t i = CI->getValue().getSExtValue(); 1670 // If it is known that the index is out of bounds, we can return 1671 // an undefined value. 1672 if (i < 0) 1673 return UndefinedVal(); 1674 1675 if (auto CAT = Ctx.getAsConstantArrayType(VD->getType())) 1676 if (CAT->getSize().sle(i)) 1677 return UndefinedVal(); 1678 1679 // If there is a list, but no init, it must be zero. 1680 if (i >= InitList->getNumInits()) 1681 return svalBuilder.makeZeroVal(R->getElementType()); 1682 1683 if (const Expr *ElemInit = InitList->getInit(i)) 1684 if (Optional<SVal> V = svalBuilder.getConstantVal(ElemInit)) 1685 return *V; 1686 } 1687 } 1688 } 1689 } 1690 } 1691 1692 // Check for loads from a code text region. For such loads, just give up. 1693 if (isa<CodeTextRegion>(superR)) 1694 return UnknownVal(); 1695 1696 // Handle the case where we are indexing into a larger scalar object. 1697 // For example, this handles: 1698 // int x = ... 1699 // char *y = &x; 1700 // return *y; 1701 // FIXME: This is a hack, and doesn't do anything really intelligent yet. 1702 const RegionRawOffset &O = R->getAsArrayOffset(); 1703 1704 // If we cannot reason about the offset, return an unknown value. 1705 if (!O.getRegion()) 1706 return UnknownVal(); 1707 1708 if (const TypedValueRegion *baseR = 1709 dyn_cast_or_null<TypedValueRegion>(O.getRegion())) { 1710 QualType baseT = baseR->getValueType(); 1711 if (baseT->isScalarType()) { 1712 QualType elemT = R->getElementType(); 1713 if (elemT->isScalarType()) { 1714 if (Ctx.getTypeSizeInChars(baseT) >= Ctx.getTypeSizeInChars(elemT)) { 1715 if (const Optional<SVal> &V = B.getDirectBinding(superR)) { 1716 if (SymbolRef parentSym = V->getAsSymbol()) 1717 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1718 1719 if (V->isUnknownOrUndef()) 1720 return *V; 1721 // Other cases: give up. We are indexing into a larger object 1722 // that has some value, but we don't know how to handle that yet. 1723 return UnknownVal(); 1724 } 1725 } 1726 } 1727 } 1728 } 1729 return getBindingForFieldOrElementCommon(B, R, R->getElementType()); 1730 } 1731 1732 SVal RegionStoreManager::getBindingForField(RegionBindingsConstRef B, 1733 const FieldRegion* R) { 1734 1735 // Check if the region has a binding. 1736 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1737 return *V; 1738 1739 // Is the field declared constant and has an in-class initializer? 1740 const FieldDecl *FD = R->getDecl(); 1741 QualType Ty = FD->getType(); 1742 if (Ty.isConstQualified()) 1743 if (const Expr *Init = FD->getInClassInitializer()) 1744 if (Optional<SVal> V = svalBuilder.getConstantVal(Init)) 1745 return *V; 1746 1747 // If the containing record was initialized, try to get its constant value. 1748 const MemRegion* superR = R->getSuperRegion(); 1749 if (const auto *VR = dyn_cast<VarRegion>(superR)) { 1750 const VarDecl *VD = VR->getDecl(); 1751 QualType RecordVarTy = VD->getType(); 1752 unsigned Index = FD->getFieldIndex(); 1753 // Either the record variable or the field has to be const qualified. 1754 if (RecordVarTy.isConstQualified() || Ty.isConstQualified()) 1755 if (const Expr *Init = VD->getInit()) 1756 if (const auto *InitList = dyn_cast<InitListExpr>(Init)) { 1757 if (Index < InitList->getNumInits()) { 1758 if (const Expr *FieldInit = InitList->getInit(Index)) 1759 if (Optional<SVal> V = svalBuilder.getConstantVal(FieldInit)) 1760 return *V; 1761 } else { 1762 return svalBuilder.makeZeroVal(Ty); 1763 } 1764 } 1765 } 1766 1767 return getBindingForFieldOrElementCommon(B, R, Ty); 1768 } 1769 1770 Optional<SVal> 1771 RegionStoreManager::getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 1772 const MemRegion *superR, 1773 const TypedValueRegion *R, 1774 QualType Ty) { 1775 1776 if (const Optional<SVal> &D = B.getDefaultBinding(superR)) { 1777 const SVal &val = D.getValue(); 1778 if (SymbolRef parentSym = val.getAsSymbol()) 1779 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1780 1781 if (val.isZeroConstant()) 1782 return svalBuilder.makeZeroVal(Ty); 1783 1784 if (val.isUnknownOrUndef()) 1785 return val; 1786 1787 // Lazy bindings are usually handled through getExistingLazyBinding(). 1788 // We should unify these two code paths at some point. 1789 if (val.getAs<nonloc::LazyCompoundVal>() || 1790 val.getAs<nonloc::CompoundVal>()) 1791 return val; 1792 1793 llvm_unreachable("Unknown default value"); 1794 } 1795 1796 return None; 1797 } 1798 1799 SVal RegionStoreManager::getLazyBinding(const SubRegion *LazyBindingRegion, 1800 RegionBindingsRef LazyBinding) { 1801 SVal Result; 1802 if (const ElementRegion *ER = dyn_cast<ElementRegion>(LazyBindingRegion)) 1803 Result = getBindingForElement(LazyBinding, ER); 1804 else 1805 Result = getBindingForField(LazyBinding, 1806 cast<FieldRegion>(LazyBindingRegion)); 1807 1808 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 1809 // default value for /part/ of an aggregate from a default value for the 1810 // /entire/ aggregate. The most common case of this is when struct Outer 1811 // has as its first member a struct Inner, which is copied in from a stack 1812 // variable. In this case, even if the Outer's default value is symbolic, 0, 1813 // or unknown, it gets overridden by the Inner's default value of undefined. 1814 // 1815 // This is a general problem -- if the Inner is zero-initialized, the Outer 1816 // will now look zero-initialized. The proper way to solve this is with a 1817 // new version of RegionStore that tracks the extent of a binding as well 1818 // as the offset. 1819 // 1820 // This hack only takes care of the undefined case because that can very 1821 // quickly result in a warning. 1822 if (Result.isUndef()) 1823 Result = UnknownVal(); 1824 1825 return Result; 1826 } 1827 1828 SVal 1829 RegionStoreManager::getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 1830 const TypedValueRegion *R, 1831 QualType Ty) { 1832 1833 // At this point we have already checked in either getBindingForElement or 1834 // getBindingForField if 'R' has a direct binding. 1835 1836 // Lazy binding? 1837 Store lazyBindingStore = nullptr; 1838 const SubRegion *lazyBindingRegion = nullptr; 1839 std::tie(lazyBindingStore, lazyBindingRegion) = findLazyBinding(B, R, R); 1840 if (lazyBindingRegion) 1841 return getLazyBinding(lazyBindingRegion, 1842 getRegionBindings(lazyBindingStore)); 1843 1844 // Record whether or not we see a symbolic index. That can completely 1845 // be out of scope of our lookup. 1846 bool hasSymbolicIndex = false; 1847 1848 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 1849 // default value for /part/ of an aggregate from a default value for the 1850 // /entire/ aggregate. The most common case of this is when struct Outer 1851 // has as its first member a struct Inner, which is copied in from a stack 1852 // variable. In this case, even if the Outer's default value is symbolic, 0, 1853 // or unknown, it gets overridden by the Inner's default value of undefined. 1854 // 1855 // This is a general problem -- if the Inner is zero-initialized, the Outer 1856 // will now look zero-initialized. The proper way to solve this is with a 1857 // new version of RegionStore that tracks the extent of a binding as well 1858 // as the offset. 1859 // 1860 // This hack only takes care of the undefined case because that can very 1861 // quickly result in a warning. 1862 bool hasPartialLazyBinding = false; 1863 1864 const SubRegion *SR = R; 1865 while (SR) { 1866 const MemRegion *Base = SR->getSuperRegion(); 1867 if (Optional<SVal> D = getBindingForDerivedDefaultValue(B, Base, R, Ty)) { 1868 if (D->getAs<nonloc::LazyCompoundVal>()) { 1869 hasPartialLazyBinding = true; 1870 break; 1871 } 1872 1873 return *D; 1874 } 1875 1876 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Base)) { 1877 NonLoc index = ER->getIndex(); 1878 if (!index.isConstant()) 1879 hasSymbolicIndex = true; 1880 } 1881 1882 // If our super region is a field or element itself, walk up the region 1883 // hierarchy to see if there is a default value installed in an ancestor. 1884 SR = dyn_cast<SubRegion>(Base); 1885 } 1886 1887 if (R->hasStackNonParametersStorage()) { 1888 if (isa<ElementRegion>(R)) { 1889 // Currently we don't reason specially about Clang-style vectors. Check 1890 // if superR is a vector and if so return Unknown. 1891 if (const TypedValueRegion *typedSuperR = 1892 dyn_cast<TypedValueRegion>(R->getSuperRegion())) { 1893 if (typedSuperR->getValueType()->isVectorType()) 1894 return UnknownVal(); 1895 } 1896 } 1897 1898 // FIXME: We also need to take ElementRegions with symbolic indexes into 1899 // account. This case handles both directly accessing an ElementRegion 1900 // with a symbolic offset, but also fields within an element with 1901 // a symbolic offset. 1902 if (hasSymbolicIndex) 1903 return UnknownVal(); 1904 1905 if (!hasPartialLazyBinding) 1906 return UndefinedVal(); 1907 } 1908 1909 // All other values are symbolic. 1910 return svalBuilder.getRegionValueSymbolVal(R); 1911 } 1912 1913 SVal RegionStoreManager::getBindingForObjCIvar(RegionBindingsConstRef B, 1914 const ObjCIvarRegion* R) { 1915 // Check if the region has a binding. 1916 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1917 return *V; 1918 1919 const MemRegion *superR = R->getSuperRegion(); 1920 1921 // Check if the super region has a default binding. 1922 if (const Optional<SVal> &V = B.getDefaultBinding(superR)) { 1923 if (SymbolRef parentSym = V->getAsSymbol()) 1924 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1925 1926 // Other cases: give up. 1927 return UnknownVal(); 1928 } 1929 1930 return getBindingForLazySymbol(R); 1931 } 1932 1933 SVal RegionStoreManager::getBindingForVar(RegionBindingsConstRef B, 1934 const VarRegion *R) { 1935 1936 // Check if the region has a binding. 1937 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1938 return *V; 1939 1940 // Lazily derive a value for the VarRegion. 1941 const VarDecl *VD = R->getDecl(); 1942 const MemSpaceRegion *MS = R->getMemorySpace(); 1943 1944 // Arguments are always symbolic. 1945 if (isa<StackArgumentsSpaceRegion>(MS)) 1946 return svalBuilder.getRegionValueSymbolVal(R); 1947 1948 // Is 'VD' declared constant? If so, retrieve the constant value. 1949 if (VD->getType().isConstQualified()) { 1950 if (const Expr *Init = VD->getInit()) { 1951 if (Optional<SVal> V = svalBuilder.getConstantVal(Init)) 1952 return *V; 1953 1954 // If the variable is const qualified and has an initializer but 1955 // we couldn't evaluate initializer to a value, treat the value as 1956 // unknown. 1957 return UnknownVal(); 1958 } 1959 } 1960 1961 // This must come after the check for constants because closure-captured 1962 // constant variables may appear in UnknownSpaceRegion. 1963 if (isa<UnknownSpaceRegion>(MS)) 1964 return svalBuilder.getRegionValueSymbolVal(R); 1965 1966 if (isa<GlobalsSpaceRegion>(MS)) { 1967 QualType T = VD->getType(); 1968 1969 // Function-scoped static variables are default-initialized to 0; if they 1970 // have an initializer, it would have been processed by now. 1971 // FIXME: This is only true when we're starting analysis from main(). 1972 // We're losing a lot of coverage here. 1973 if (isa<StaticGlobalSpaceRegion>(MS)) 1974 return svalBuilder.makeZeroVal(T); 1975 1976 if (Optional<SVal> V = getBindingForDerivedDefaultValue(B, MS, R, T)) { 1977 assert(!V->getAs<nonloc::LazyCompoundVal>()); 1978 return V.getValue(); 1979 } 1980 1981 return svalBuilder.getRegionValueSymbolVal(R); 1982 } 1983 1984 return UndefinedVal(); 1985 } 1986 1987 SVal RegionStoreManager::getBindingForLazySymbol(const TypedValueRegion *R) { 1988 // All other values are symbolic. 1989 return svalBuilder.getRegionValueSymbolVal(R); 1990 } 1991 1992 const RegionStoreManager::SValListTy & 1993 RegionStoreManager::getInterestingValues(nonloc::LazyCompoundVal LCV) { 1994 // First, check the cache. 1995 LazyBindingsMapTy::iterator I = LazyBindingsMap.find(LCV.getCVData()); 1996 if (I != LazyBindingsMap.end()) 1997 return I->second; 1998 1999 // If we don't have a list of values cached, start constructing it. 2000 SValListTy List; 2001 2002 const SubRegion *LazyR = LCV.getRegion(); 2003 RegionBindingsRef B = getRegionBindings(LCV.getStore()); 2004 2005 // If this region had /no/ bindings at the time, there are no interesting 2006 // values to return. 2007 const ClusterBindings *Cluster = B.lookup(LazyR->getBaseRegion()); 2008 if (!Cluster) 2009 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 2010 2011 SmallVector<BindingPair, 32> Bindings; 2012 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, LazyR, 2013 /*IncludeAllDefaultBindings=*/true); 2014 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 2015 E = Bindings.end(); 2016 I != E; ++I) { 2017 SVal V = I->second; 2018 if (V.isUnknownOrUndef() || V.isConstant()) 2019 continue; 2020 2021 if (Optional<nonloc::LazyCompoundVal> InnerLCV = 2022 V.getAs<nonloc::LazyCompoundVal>()) { 2023 const SValListTy &InnerList = getInterestingValues(*InnerLCV); 2024 List.insert(List.end(), InnerList.begin(), InnerList.end()); 2025 continue; 2026 } 2027 2028 List.push_back(V); 2029 } 2030 2031 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 2032 } 2033 2034 NonLoc RegionStoreManager::createLazyBinding(RegionBindingsConstRef B, 2035 const TypedValueRegion *R) { 2036 if (Optional<nonloc::LazyCompoundVal> V = 2037 getExistingLazyBinding(svalBuilder, B, R, false)) 2038 return *V; 2039 2040 return svalBuilder.makeLazyCompoundVal(StoreRef(B.asStore(), *this), R); 2041 } 2042 2043 static bool isRecordEmpty(const RecordDecl *RD) { 2044 if (!RD->field_empty()) 2045 return false; 2046 if (const CXXRecordDecl *CRD = dyn_cast<CXXRecordDecl>(RD)) 2047 return CRD->getNumBases() == 0; 2048 return true; 2049 } 2050 2051 SVal RegionStoreManager::getBindingForStruct(RegionBindingsConstRef B, 2052 const TypedValueRegion *R) { 2053 const RecordDecl *RD = R->getValueType()->castAs<RecordType>()->getDecl(); 2054 if (!RD->getDefinition() || isRecordEmpty(RD)) 2055 return UnknownVal(); 2056 2057 return createLazyBinding(B, R); 2058 } 2059 2060 SVal RegionStoreManager::getBindingForArray(RegionBindingsConstRef B, 2061 const TypedValueRegion *R) { 2062 assert(Ctx.getAsConstantArrayType(R->getValueType()) && 2063 "Only constant array types can have compound bindings."); 2064 2065 return createLazyBinding(B, R); 2066 } 2067 2068 bool RegionStoreManager::includedInBindings(Store store, 2069 const MemRegion *region) const { 2070 RegionBindingsRef B = getRegionBindings(store); 2071 region = region->getBaseRegion(); 2072 2073 // Quick path: if the base is the head of a cluster, the region is live. 2074 if (B.lookup(region)) 2075 return true; 2076 2077 // Slow path: if the region is the VALUE of any binding, it is live. 2078 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); RI != RE; ++RI) { 2079 const ClusterBindings &Cluster = RI.getData(); 2080 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 2081 CI != CE; ++CI) { 2082 const SVal &D = CI.getData(); 2083 if (const MemRegion *R = D.getAsRegion()) 2084 if (R->getBaseRegion() == region) 2085 return true; 2086 } 2087 } 2088 2089 return false; 2090 } 2091 2092 //===----------------------------------------------------------------------===// 2093 // Binding values to regions. 2094 //===----------------------------------------------------------------------===// 2095 2096 StoreRef RegionStoreManager::killBinding(Store ST, Loc L) { 2097 if (Optional<loc::MemRegionVal> LV = L.getAs<loc::MemRegionVal>()) 2098 if (const MemRegion* R = LV->getRegion()) 2099 return StoreRef(getRegionBindings(ST).removeBinding(R) 2100 .asImmutableMap() 2101 .getRootWithoutRetain(), 2102 *this); 2103 2104 return StoreRef(ST, *this); 2105 } 2106 2107 RegionBindingsRef 2108 RegionStoreManager::bind(RegionBindingsConstRef B, Loc L, SVal V) { 2109 if (L.getAs<loc::ConcreteInt>()) 2110 return B; 2111 2112 // If we get here, the location should be a region. 2113 const MemRegion *R = L.castAs<loc::MemRegionVal>().getRegion(); 2114 2115 // Check if the region is a struct region. 2116 if (const TypedValueRegion* TR = dyn_cast<TypedValueRegion>(R)) { 2117 QualType Ty = TR->getValueType(); 2118 if (Ty->isArrayType()) 2119 return bindArray(B, TR, V); 2120 if (Ty->isStructureOrClassType()) 2121 return bindStruct(B, TR, V); 2122 if (Ty->isVectorType()) 2123 return bindVector(B, TR, V); 2124 if (Ty->isUnionType()) 2125 return bindAggregate(B, TR, V); 2126 } 2127 2128 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) { 2129 // Binding directly to a symbolic region should be treated as binding 2130 // to element 0. 2131 QualType T = SR->getSymbol()->getType(); 2132 if (T->isAnyPointerType() || T->isReferenceType()) 2133 T = T->getPointeeType(); 2134 2135 R = GetElementZeroRegion(SR, T); 2136 } 2137 2138 assert((!isa<CXXThisRegion>(R) || !B.lookup(R)) && 2139 "'this' pointer is not an l-value and is not assignable"); 2140 2141 // Clear out bindings that may overlap with this binding. 2142 RegionBindingsRef NewB = removeSubRegionBindings(B, cast<SubRegion>(R)); 2143 return NewB.addBinding(BindingKey::Make(R, BindingKey::Direct), V); 2144 } 2145 2146 RegionBindingsRef 2147 RegionStoreManager::setImplicitDefaultValue(RegionBindingsConstRef B, 2148 const MemRegion *R, 2149 QualType T) { 2150 SVal V; 2151 2152 if (Loc::isLocType(T)) 2153 V = svalBuilder.makeNull(); 2154 else if (T->isIntegralOrEnumerationType()) 2155 V = svalBuilder.makeZeroVal(T); 2156 else if (T->isStructureOrClassType() || T->isArrayType()) { 2157 // Set the default value to a zero constant when it is a structure 2158 // or array. The type doesn't really matter. 2159 V = svalBuilder.makeZeroVal(Ctx.IntTy); 2160 } 2161 else { 2162 // We can't represent values of this type, but we still need to set a value 2163 // to record that the region has been initialized. 2164 // If this assertion ever fires, a new case should be added above -- we 2165 // should know how to default-initialize any value we can symbolicate. 2166 assert(!SymbolManager::canSymbolicate(T) && "This type is representable"); 2167 V = UnknownVal(); 2168 } 2169 2170 return B.addBinding(R, BindingKey::Default, V); 2171 } 2172 2173 RegionBindingsRef 2174 RegionStoreManager::bindArray(RegionBindingsConstRef B, 2175 const TypedValueRegion* R, 2176 SVal Init) { 2177 2178 const ArrayType *AT =cast<ArrayType>(Ctx.getCanonicalType(R->getValueType())); 2179 QualType ElementTy = AT->getElementType(); 2180 Optional<uint64_t> Size; 2181 2182 if (const ConstantArrayType* CAT = dyn_cast<ConstantArrayType>(AT)) 2183 Size = CAT->getSize().getZExtValue(); 2184 2185 // Check if the init expr is a literal. If so, bind the rvalue instead. 2186 // FIXME: It's not responsibility of the Store to transform this lvalue 2187 // to rvalue. ExprEngine or maybe even CFG should do this before binding. 2188 if (Optional<loc::MemRegionVal> MRV = Init.getAs<loc::MemRegionVal>()) { 2189 SVal V = getBinding(B.asStore(), *MRV, R->getValueType()); 2190 return bindAggregate(B, R, V); 2191 } 2192 2193 // Handle lazy compound values. 2194 if (Init.getAs<nonloc::LazyCompoundVal>()) 2195 return bindAggregate(B, R, Init); 2196 2197 if (Init.isUnknown()) 2198 return bindAggregate(B, R, UnknownVal()); 2199 2200 // Remaining case: explicit compound values. 2201 const nonloc::CompoundVal& CV = Init.castAs<nonloc::CompoundVal>(); 2202 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2203 uint64_t i = 0; 2204 2205 RegionBindingsRef NewB(B); 2206 2207 for (; Size.hasValue() ? i < Size.getValue() : true ; ++i, ++VI) { 2208 // The init list might be shorter than the array length. 2209 if (VI == VE) 2210 break; 2211 2212 const NonLoc &Idx = svalBuilder.makeArrayIndex(i); 2213 const ElementRegion *ER = MRMgr.getElementRegion(ElementTy, Idx, R, Ctx); 2214 2215 if (ElementTy->isStructureOrClassType()) 2216 NewB = bindStruct(NewB, ER, *VI); 2217 else if (ElementTy->isArrayType()) 2218 NewB = bindArray(NewB, ER, *VI); 2219 else 2220 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2221 } 2222 2223 // If the init list is shorter than the array length (or the array has 2224 // variable length), set the array default value. Values that are already set 2225 // are not overwritten. 2226 if (!Size.hasValue() || i < Size.getValue()) 2227 NewB = setImplicitDefaultValue(NewB, R, ElementTy); 2228 2229 return NewB; 2230 } 2231 2232 RegionBindingsRef RegionStoreManager::bindVector(RegionBindingsConstRef B, 2233 const TypedValueRegion* R, 2234 SVal V) { 2235 QualType T = R->getValueType(); 2236 assert(T->isVectorType()); 2237 const VectorType *VT = T->getAs<VectorType>(); // Use getAs for typedefs. 2238 2239 // Handle lazy compound values and symbolic values. 2240 if (V.getAs<nonloc::LazyCompoundVal>() || V.getAs<nonloc::SymbolVal>()) 2241 return bindAggregate(B, R, V); 2242 2243 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2244 // that we are binding symbolic struct value. Kill the field values, and if 2245 // the value is symbolic go and bind it as a "default" binding. 2246 if (!V.getAs<nonloc::CompoundVal>()) { 2247 return bindAggregate(B, R, UnknownVal()); 2248 } 2249 2250 QualType ElemType = VT->getElementType(); 2251 nonloc::CompoundVal CV = V.castAs<nonloc::CompoundVal>(); 2252 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2253 unsigned index = 0, numElements = VT->getNumElements(); 2254 RegionBindingsRef NewB(B); 2255 2256 for ( ; index != numElements ; ++index) { 2257 if (VI == VE) 2258 break; 2259 2260 NonLoc Idx = svalBuilder.makeArrayIndex(index); 2261 const ElementRegion *ER = MRMgr.getElementRegion(ElemType, Idx, R, Ctx); 2262 2263 if (ElemType->isArrayType()) 2264 NewB = bindArray(NewB, ER, *VI); 2265 else if (ElemType->isStructureOrClassType()) 2266 NewB = bindStruct(NewB, ER, *VI); 2267 else 2268 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2269 } 2270 return NewB; 2271 } 2272 2273 Optional<RegionBindingsRef> 2274 RegionStoreManager::tryBindSmallStruct(RegionBindingsConstRef B, 2275 const TypedValueRegion *R, 2276 const RecordDecl *RD, 2277 nonloc::LazyCompoundVal LCV) { 2278 FieldVector Fields; 2279 2280 if (const CXXRecordDecl *Class = dyn_cast<CXXRecordDecl>(RD)) 2281 if (Class->getNumBases() != 0 || Class->getNumVBases() != 0) 2282 return None; 2283 2284 for (const auto *FD : RD->fields()) { 2285 if (FD->isUnnamedBitfield()) 2286 continue; 2287 2288 // If there are too many fields, or if any of the fields are aggregates, 2289 // just use the LCV as a default binding. 2290 if (Fields.size() == SmallStructLimit) 2291 return None; 2292 2293 QualType Ty = FD->getType(); 2294 if (!(Ty->isScalarType() || Ty->isReferenceType())) 2295 return None; 2296 2297 Fields.push_back(FD); 2298 } 2299 2300 RegionBindingsRef NewB = B; 2301 2302 for (FieldVector::iterator I = Fields.begin(), E = Fields.end(); I != E; ++I){ 2303 const FieldRegion *SourceFR = MRMgr.getFieldRegion(*I, LCV.getRegion()); 2304 SVal V = getBindingForField(getRegionBindings(LCV.getStore()), SourceFR); 2305 2306 const FieldRegion *DestFR = MRMgr.getFieldRegion(*I, R); 2307 NewB = bind(NewB, loc::MemRegionVal(DestFR), V); 2308 } 2309 2310 return NewB; 2311 } 2312 2313 RegionBindingsRef RegionStoreManager::bindStruct(RegionBindingsConstRef B, 2314 const TypedValueRegion* R, 2315 SVal V) { 2316 if (!Features.supportsFields()) 2317 return B; 2318 2319 QualType T = R->getValueType(); 2320 assert(T->isStructureOrClassType()); 2321 2322 const RecordType* RT = T->getAs<RecordType>(); 2323 const RecordDecl *RD = RT->getDecl(); 2324 2325 if (!RD->isCompleteDefinition()) 2326 return B; 2327 2328 // Handle lazy compound values and symbolic values. 2329 if (Optional<nonloc::LazyCompoundVal> LCV = 2330 V.getAs<nonloc::LazyCompoundVal>()) { 2331 if (Optional<RegionBindingsRef> NewB = tryBindSmallStruct(B, R, RD, *LCV)) 2332 return *NewB; 2333 return bindAggregate(B, R, V); 2334 } 2335 if (V.getAs<nonloc::SymbolVal>()) 2336 return bindAggregate(B, R, V); 2337 2338 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2339 // that we are binding symbolic struct value. Kill the field values, and if 2340 // the value is symbolic go and bind it as a "default" binding. 2341 if (V.isUnknown() || !V.getAs<nonloc::CompoundVal>()) 2342 return bindAggregate(B, R, UnknownVal()); 2343 2344 const nonloc::CompoundVal& CV = V.castAs<nonloc::CompoundVal>(); 2345 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2346 2347 RecordDecl::field_iterator FI, FE; 2348 RegionBindingsRef NewB(B); 2349 2350 for (FI = RD->field_begin(), FE = RD->field_end(); FI != FE; ++FI) { 2351 2352 if (VI == VE) 2353 break; 2354 2355 // Skip any unnamed bitfields to stay in sync with the initializers. 2356 if (FI->isUnnamedBitfield()) 2357 continue; 2358 2359 QualType FTy = FI->getType(); 2360 const FieldRegion* FR = MRMgr.getFieldRegion(*FI, R); 2361 2362 if (FTy->isArrayType()) 2363 NewB = bindArray(NewB, FR, *VI); 2364 else if (FTy->isStructureOrClassType()) 2365 NewB = bindStruct(NewB, FR, *VI); 2366 else 2367 NewB = bind(NewB, loc::MemRegionVal(FR), *VI); 2368 ++VI; 2369 } 2370 2371 // There may be fewer values in the initialize list than the fields of struct. 2372 if (FI != FE) { 2373 NewB = NewB.addBinding(R, BindingKey::Default, 2374 svalBuilder.makeIntVal(0, false)); 2375 } 2376 2377 return NewB; 2378 } 2379 2380 RegionBindingsRef 2381 RegionStoreManager::bindAggregate(RegionBindingsConstRef B, 2382 const TypedRegion *R, 2383 SVal Val) { 2384 // Remove the old bindings, using 'R' as the root of all regions 2385 // we will invalidate. Then add the new binding. 2386 return removeSubRegionBindings(B, R).addBinding(R, BindingKey::Default, Val); 2387 } 2388 2389 //===----------------------------------------------------------------------===// 2390 // State pruning. 2391 //===----------------------------------------------------------------------===// 2392 2393 namespace { 2394 class RemoveDeadBindingsWorker 2395 : public ClusterAnalysis<RemoveDeadBindingsWorker> { 2396 using ChildrenListTy = SmallVector<const SymbolDerived *, 4>; 2397 using MapParentsToDerivedTy = llvm::DenseMap<SymbolRef, ChildrenListTy>; 2398 2399 MapParentsToDerivedTy ParentsToDerived; 2400 SymbolReaper &SymReaper; 2401 const StackFrameContext *CurrentLCtx; 2402 2403 public: 2404 RemoveDeadBindingsWorker(RegionStoreManager &rm, 2405 ProgramStateManager &stateMgr, 2406 RegionBindingsRef b, SymbolReaper &symReaper, 2407 const StackFrameContext *LCtx) 2408 : ClusterAnalysis<RemoveDeadBindingsWorker>(rm, stateMgr, b), 2409 SymReaper(symReaper), CurrentLCtx(LCtx) {} 2410 2411 // Called by ClusterAnalysis. 2412 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C); 2413 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 2414 using ClusterAnalysis<RemoveDeadBindingsWorker>::VisitCluster; 2415 2416 using ClusterAnalysis::AddToWorkList; 2417 2418 bool AddToWorkList(const MemRegion *R); 2419 2420 void VisitBinding(SVal V); 2421 2422 private: 2423 void populateWorklistFromSymbol(SymbolRef s); 2424 }; 2425 } 2426 2427 bool RemoveDeadBindingsWorker::AddToWorkList(const MemRegion *R) { 2428 const MemRegion *BaseR = R->getBaseRegion(); 2429 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 2430 } 2431 2432 void RemoveDeadBindingsWorker::VisitAddedToCluster(const MemRegion *baseR, 2433 const ClusterBindings &C) { 2434 2435 if (const VarRegion *VR = dyn_cast<VarRegion>(baseR)) { 2436 if (SymReaper.isLive(VR)) 2437 AddToWorkList(baseR, &C); 2438 2439 return; 2440 } 2441 2442 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) { 2443 if (SymReaper.isLive(SR->getSymbol())) { 2444 AddToWorkList(SR, &C); 2445 } else if (const auto *SD = dyn_cast<SymbolDerived>(SR->getSymbol())) { 2446 ParentsToDerived[SD->getParentSymbol()].push_back(SD); 2447 } 2448 2449 return; 2450 } 2451 2452 if (isa<NonStaticGlobalSpaceRegion>(baseR)) { 2453 AddToWorkList(baseR, &C); 2454 return; 2455 } 2456 2457 // CXXThisRegion in the current or parent location context is live. 2458 if (const CXXThisRegion *TR = dyn_cast<CXXThisRegion>(baseR)) { 2459 const auto *StackReg = 2460 cast<StackArgumentsSpaceRegion>(TR->getSuperRegion()); 2461 const StackFrameContext *RegCtx = StackReg->getStackFrame(); 2462 if (CurrentLCtx && 2463 (RegCtx == CurrentLCtx || RegCtx->isParentOf(CurrentLCtx))) 2464 AddToWorkList(TR, &C); 2465 } 2466 } 2467 2468 void RemoveDeadBindingsWorker::VisitCluster(const MemRegion *baseR, 2469 const ClusterBindings *C) { 2470 if (!C) 2471 return; 2472 2473 // Mark the symbol for any SymbolicRegion with live bindings as live itself. 2474 // This means we should continue to track that symbol. 2475 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(baseR)) 2476 SymReaper.markLive(SymR->getSymbol()); 2477 2478 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) { 2479 // Element index of a binding key is live. 2480 SymReaper.markElementIndicesLive(I.getKey().getRegion()); 2481 2482 VisitBinding(I.getData()); 2483 } 2484 } 2485 2486 void RemoveDeadBindingsWorker::VisitBinding(SVal V) { 2487 // Is it a LazyCompoundVal? All referenced regions are live as well. 2488 if (Optional<nonloc::LazyCompoundVal> LCS = 2489 V.getAs<nonloc::LazyCompoundVal>()) { 2490 2491 const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS); 2492 2493 for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(), 2494 E = Vals.end(); 2495 I != E; ++I) 2496 VisitBinding(*I); 2497 2498 return; 2499 } 2500 2501 // If V is a region, then add it to the worklist. 2502 if (const MemRegion *R = V.getAsRegion()) { 2503 AddToWorkList(R); 2504 2505 if (const auto *TVR = dyn_cast<TypedValueRegion>(R)) { 2506 DefinedOrUnknownSVal RVS = 2507 RM.getSValBuilder().getRegionValueSymbolVal(TVR); 2508 if (const MemRegion *SR = RVS.getAsRegion()) { 2509 AddToWorkList(SR); 2510 } 2511 } 2512 2513 SymReaper.markLive(R); 2514 2515 // All regions captured by a block are also live. 2516 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(R)) { 2517 BlockDataRegion::referenced_vars_iterator I = BR->referenced_vars_begin(), 2518 E = BR->referenced_vars_end(); 2519 for ( ; I != E; ++I) 2520 AddToWorkList(I.getCapturedRegion()); 2521 } 2522 } 2523 2524 2525 // Update the set of live symbols. 2526 for (auto SI = V.symbol_begin(), SE = V.symbol_end(); SI != SE; ++SI) { 2527 populateWorklistFromSymbol(*SI); 2528 2529 for (const auto *SD : ParentsToDerived[*SI]) 2530 populateWorklistFromSymbol(SD); 2531 2532 SymReaper.markLive(*SI); 2533 } 2534 } 2535 2536 void RemoveDeadBindingsWorker::populateWorklistFromSymbol(SymbolRef S) { 2537 if (const auto *SD = dyn_cast<SymbolData>(S)) { 2538 if (Loc::isLocType(SD->getType()) && !SymReaper.isLive(SD)) { 2539 const SymbolicRegion *SR = RM.getRegionManager().getSymbolicRegion(SD); 2540 2541 if (B.contains(SR)) 2542 AddToWorkList(SR); 2543 2544 const SymbolicRegion *SHR = 2545 RM.getRegionManager().getSymbolicHeapRegion(SD); 2546 if (B.contains(SHR)) 2547 AddToWorkList(SHR); 2548 } 2549 } 2550 } 2551 2552 StoreRef RegionStoreManager::removeDeadBindings(Store store, 2553 const StackFrameContext *LCtx, 2554 SymbolReaper& SymReaper) { 2555 RegionBindingsRef B = getRegionBindings(store); 2556 RemoveDeadBindingsWorker W(*this, StateMgr, B, SymReaper, LCtx); 2557 W.GenerateClusters(); 2558 2559 // Enqueue the region roots onto the worklist. 2560 for (SymbolReaper::region_iterator I = SymReaper.region_begin(), 2561 E = SymReaper.region_end(); I != E; ++I) { 2562 W.AddToWorkList(*I); 2563 } 2564 2565 W.RunWorkList(); 2566 2567 // We have now scanned the store, marking reachable regions and symbols 2568 // as live. We now remove all the regions that are dead from the store 2569 // as well as update DSymbols with the set symbols that are now dead. 2570 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 2571 const MemRegion *Base = I.getKey(); 2572 2573 // If the cluster has been visited, we know the region has been marked. 2574 if (W.isVisited(Base)) 2575 continue; 2576 2577 // Remove the dead entry. 2578 B = B.remove(Base); 2579 2580 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(Base)) 2581 SymReaper.maybeDead(SymR->getSymbol()); 2582 2583 // Mark all non-live symbols that this binding references as dead. 2584 const ClusterBindings &Cluster = I.getData(); 2585 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 2586 CI != CE; ++CI) { 2587 SVal X = CI.getData(); 2588 SymExpr::symbol_iterator SI = X.symbol_begin(), SE = X.symbol_end(); 2589 for (; SI != SE; ++SI) 2590 SymReaper.maybeDead(*SI); 2591 } 2592 } 2593 2594 return StoreRef(B.asStore(), *this); 2595 } 2596 2597 //===----------------------------------------------------------------------===// 2598 // Utility methods. 2599 //===----------------------------------------------------------------------===// 2600 2601 void RegionStoreManager::print(Store store, raw_ostream &OS, 2602 const char* nl) { 2603 RegionBindingsRef B = getRegionBindings(store); 2604 OS << "Store (direct and default bindings), " 2605 << B.asStore() 2606 << " :" << nl; 2607 B.dump(OS, nl); 2608 } 2609