1 //== RegionStore.cpp - Field-sensitive store model --------------*- C++ -*--==// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines a basic region store model. In this model, we do have field 11 // sensitivity. But we assume nothing about the heap shape. So recursive data 12 // structures are largely ignored. Basically we do 1-limiting analysis. 13 // Parameter pointers are assumed with no aliasing. Pointee objects of 14 // parameters are created lazily. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "clang/AST/Attr.h" 19 #include "clang/AST/CharUnits.h" 20 #include "clang/ASTMatchers/ASTMatchFinder.h" 21 #include "clang/Analysis/Analyses/LiveVariables.h" 22 #include "clang/Analysis/AnalysisDeclContext.h" 23 #include "clang/Basic/TargetInfo.h" 24 #include "clang/StaticAnalyzer/Core/PathSensitive/AnalysisManager.h" 25 #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h" 26 #include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h" 27 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h" 28 #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramStateTrait.h" 29 #include "clang/StaticAnalyzer/Core/PathSensitive/SubEngine.h" 30 #include "llvm/ADT/ImmutableMap.h" 31 #include "llvm/ADT/Optional.h" 32 #include "llvm/Support/raw_ostream.h" 33 #include <utility> 34 35 using namespace clang; 36 using namespace ento; 37 38 //===----------------------------------------------------------------------===// 39 // Representation of binding keys. 40 //===----------------------------------------------------------------------===// 41 42 namespace { 43 class BindingKey { 44 public: 45 enum Kind { Default = 0x0, Direct = 0x1 }; 46 private: 47 enum { Symbolic = 0x2 }; 48 49 llvm::PointerIntPair<const MemRegion *, 2> P; 50 uint64_t Data; 51 52 /// Create a key for a binding to region \p r, which has a symbolic offset 53 /// from region \p Base. 54 explicit BindingKey(const SubRegion *r, const SubRegion *Base, Kind k) 55 : P(r, k | Symbolic), Data(reinterpret_cast<uintptr_t>(Base)) { 56 assert(r && Base && "Must have known regions."); 57 assert(getConcreteOffsetRegion() == Base && "Failed to store base region"); 58 } 59 60 /// Create a key for a binding at \p offset from base region \p r. 61 explicit BindingKey(const MemRegion *r, uint64_t offset, Kind k) 62 : P(r, k), Data(offset) { 63 assert(r && "Must have known regions."); 64 assert(getOffset() == offset && "Failed to store offset"); 65 assert((r == r->getBaseRegion() || isa<ObjCIvarRegion>(r) || 66 isa <CXXDerivedObjectRegion>(r)) && 67 "Not a base"); 68 } 69 public: 70 71 bool isDirect() const { return P.getInt() & Direct; } 72 bool hasSymbolicOffset() const { return P.getInt() & Symbolic; } 73 74 const MemRegion *getRegion() const { return P.getPointer(); } 75 uint64_t getOffset() const { 76 assert(!hasSymbolicOffset()); 77 return Data; 78 } 79 80 const SubRegion *getConcreteOffsetRegion() const { 81 assert(hasSymbolicOffset()); 82 return reinterpret_cast<const SubRegion *>(static_cast<uintptr_t>(Data)); 83 } 84 85 const MemRegion *getBaseRegion() const { 86 if (hasSymbolicOffset()) 87 return getConcreteOffsetRegion()->getBaseRegion(); 88 return getRegion()->getBaseRegion(); 89 } 90 91 void Profile(llvm::FoldingSetNodeID& ID) const { 92 ID.AddPointer(P.getOpaqueValue()); 93 ID.AddInteger(Data); 94 } 95 96 static BindingKey Make(const MemRegion *R, Kind k); 97 98 bool operator<(const BindingKey &X) const { 99 if (P.getOpaqueValue() < X.P.getOpaqueValue()) 100 return true; 101 if (P.getOpaqueValue() > X.P.getOpaqueValue()) 102 return false; 103 return Data < X.Data; 104 } 105 106 bool operator==(const BindingKey &X) const { 107 return P.getOpaqueValue() == X.P.getOpaqueValue() && 108 Data == X.Data; 109 } 110 111 void dump() const; 112 }; 113 } // end anonymous namespace 114 115 BindingKey BindingKey::Make(const MemRegion *R, Kind k) { 116 const RegionOffset &RO = R->getAsOffset(); 117 if (RO.hasSymbolicOffset()) 118 return BindingKey(cast<SubRegion>(R), cast<SubRegion>(RO.getRegion()), k); 119 120 return BindingKey(RO.getRegion(), RO.getOffset(), k); 121 } 122 123 namespace llvm { 124 static inline 125 raw_ostream &operator<<(raw_ostream &os, BindingKey K) { 126 os << '(' << K.getRegion(); 127 if (!K.hasSymbolicOffset()) 128 os << ',' << K.getOffset(); 129 os << ',' << (K.isDirect() ? "direct" : "default") 130 << ')'; 131 return os; 132 } 133 134 template <typename T> struct isPodLike; 135 template <> struct isPodLike<BindingKey> { 136 static const bool value = true; 137 }; 138 } // end llvm namespace 139 140 #ifndef NDEBUG 141 LLVM_DUMP_METHOD void BindingKey::dump() const { llvm::errs() << *this; } 142 #endif 143 144 //===----------------------------------------------------------------------===// 145 // Actual Store type. 146 //===----------------------------------------------------------------------===// 147 148 typedef llvm::ImmutableMap<BindingKey, SVal> ClusterBindings; 149 typedef llvm::ImmutableMapRef<BindingKey, SVal> ClusterBindingsRef; 150 typedef std::pair<BindingKey, SVal> BindingPair; 151 152 typedef llvm::ImmutableMap<const MemRegion *, ClusterBindings> 153 RegionBindings; 154 155 namespace { 156 class RegionBindingsRef : public llvm::ImmutableMapRef<const MemRegion *, 157 ClusterBindings> { 158 ClusterBindings::Factory *CBFactory; 159 160 public: 161 typedef llvm::ImmutableMapRef<const MemRegion *, ClusterBindings> 162 ParentTy; 163 164 RegionBindingsRef(ClusterBindings::Factory &CBFactory, 165 const RegionBindings::TreeTy *T, 166 RegionBindings::TreeTy::Factory *F) 167 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(T, F), 168 CBFactory(&CBFactory) {} 169 170 RegionBindingsRef(const ParentTy &P, ClusterBindings::Factory &CBFactory) 171 : llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>(P), 172 CBFactory(&CBFactory) {} 173 174 RegionBindingsRef add(key_type_ref K, data_type_ref D) const { 175 return RegionBindingsRef(static_cast<const ParentTy *>(this)->add(K, D), 176 *CBFactory); 177 } 178 179 RegionBindingsRef remove(key_type_ref K) const { 180 return RegionBindingsRef(static_cast<const ParentTy *>(this)->remove(K), 181 *CBFactory); 182 } 183 184 RegionBindingsRef addBinding(BindingKey K, SVal V) const; 185 186 RegionBindingsRef addBinding(const MemRegion *R, 187 BindingKey::Kind k, SVal V) const; 188 189 const SVal *lookup(BindingKey K) const; 190 const SVal *lookup(const MemRegion *R, BindingKey::Kind k) const; 191 using llvm::ImmutableMapRef<const MemRegion *, ClusterBindings>::lookup; 192 193 RegionBindingsRef removeBinding(BindingKey K); 194 195 RegionBindingsRef removeBinding(const MemRegion *R, 196 BindingKey::Kind k); 197 198 RegionBindingsRef removeBinding(const MemRegion *R) { 199 return removeBinding(R, BindingKey::Direct). 200 removeBinding(R, BindingKey::Default); 201 } 202 203 Optional<SVal> getDirectBinding(const MemRegion *R) const; 204 205 /// getDefaultBinding - Returns an SVal* representing an optional default 206 /// binding associated with a region and its subregions. 207 Optional<SVal> getDefaultBinding(const MemRegion *R) const; 208 209 /// Return the internal tree as a Store. 210 Store asStore() const { 211 return asImmutableMap().getRootWithoutRetain(); 212 } 213 214 void dump(raw_ostream &OS, const char *nl) const { 215 for (iterator I = begin(), E = end(); I != E; ++I) { 216 const ClusterBindings &Cluster = I.getData(); 217 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 218 CI != CE; ++CI) { 219 OS << ' ' << CI.getKey() << " : " << CI.getData() << nl; 220 } 221 OS << nl; 222 } 223 } 224 225 LLVM_DUMP_METHOD void dump() const { dump(llvm::errs(), "\n"); } 226 }; 227 } // end anonymous namespace 228 229 typedef const RegionBindingsRef& RegionBindingsConstRef; 230 231 Optional<SVal> RegionBindingsRef::getDirectBinding(const MemRegion *R) const { 232 return Optional<SVal>::create(lookup(R, BindingKey::Direct)); 233 } 234 235 Optional<SVal> RegionBindingsRef::getDefaultBinding(const MemRegion *R) const { 236 return Optional<SVal>::create(lookup(R, BindingKey::Default)); 237 } 238 239 RegionBindingsRef RegionBindingsRef::addBinding(BindingKey K, SVal V) const { 240 const MemRegion *Base = K.getBaseRegion(); 241 242 const ClusterBindings *ExistingCluster = lookup(Base); 243 ClusterBindings Cluster = 244 (ExistingCluster ? *ExistingCluster : CBFactory->getEmptyMap()); 245 246 ClusterBindings NewCluster = CBFactory->add(Cluster, K, V); 247 return add(Base, NewCluster); 248 } 249 250 251 RegionBindingsRef RegionBindingsRef::addBinding(const MemRegion *R, 252 BindingKey::Kind k, 253 SVal V) const { 254 return addBinding(BindingKey::Make(R, k), V); 255 } 256 257 const SVal *RegionBindingsRef::lookup(BindingKey K) const { 258 const ClusterBindings *Cluster = lookup(K.getBaseRegion()); 259 if (!Cluster) 260 return nullptr; 261 return Cluster->lookup(K); 262 } 263 264 const SVal *RegionBindingsRef::lookup(const MemRegion *R, 265 BindingKey::Kind k) const { 266 return lookup(BindingKey::Make(R, k)); 267 } 268 269 RegionBindingsRef RegionBindingsRef::removeBinding(BindingKey K) { 270 const MemRegion *Base = K.getBaseRegion(); 271 const ClusterBindings *Cluster = lookup(Base); 272 if (!Cluster) 273 return *this; 274 275 ClusterBindings NewCluster = CBFactory->remove(*Cluster, K); 276 if (NewCluster.isEmpty()) 277 return remove(Base); 278 return add(Base, NewCluster); 279 } 280 281 RegionBindingsRef RegionBindingsRef::removeBinding(const MemRegion *R, 282 BindingKey::Kind k){ 283 return removeBinding(BindingKey::Make(R, k)); 284 } 285 286 //===----------------------------------------------------------------------===// 287 // Fine-grained control of RegionStoreManager. 288 //===----------------------------------------------------------------------===// 289 290 namespace { 291 struct minimal_features_tag {}; 292 struct maximal_features_tag {}; 293 294 class RegionStoreFeatures { 295 bool SupportsFields; 296 public: 297 RegionStoreFeatures(minimal_features_tag) : 298 SupportsFields(false) {} 299 300 RegionStoreFeatures(maximal_features_tag) : 301 SupportsFields(true) {} 302 303 void enableFields(bool t) { SupportsFields = t; } 304 305 bool supportsFields() const { return SupportsFields; } 306 }; 307 } 308 309 //===----------------------------------------------------------------------===// 310 // Main RegionStore logic. 311 //===----------------------------------------------------------------------===// 312 313 namespace { 314 class InvalidateRegionsWorker; 315 316 class RegionStoreManager : public StoreManager { 317 public: 318 const RegionStoreFeatures Features; 319 320 RegionBindings::Factory RBFactory; 321 mutable ClusterBindings::Factory CBFactory; 322 323 typedef std::vector<SVal> SValListTy; 324 private: 325 typedef llvm::DenseMap<const LazyCompoundValData *, 326 SValListTy> LazyBindingsMapTy; 327 LazyBindingsMapTy LazyBindingsMap; 328 329 /// The largest number of fields a struct can have and still be 330 /// considered "small". 331 /// 332 /// This is currently used to decide whether or not it is worth "forcing" a 333 /// LazyCompoundVal on bind. 334 /// 335 /// This is controlled by 'region-store-small-struct-limit' option. 336 /// To disable all small-struct-dependent behavior, set the option to "0". 337 unsigned SmallStructLimit; 338 339 /// A helper used to populate the work list with the given set of 340 /// regions. 341 void populateWorkList(InvalidateRegionsWorker &W, 342 ArrayRef<SVal> Values, 343 InvalidatedRegions *TopLevelRegions); 344 345 public: 346 RegionStoreManager(ProgramStateManager& mgr, const RegionStoreFeatures &f) 347 : StoreManager(mgr), Features(f), 348 RBFactory(mgr.getAllocator()), CBFactory(mgr.getAllocator()), 349 SmallStructLimit(0) { 350 if (SubEngine *Eng = StateMgr.getOwningEngine()) { 351 AnalyzerOptions &Options = Eng->getAnalysisManager().options; 352 SmallStructLimit = 353 Options.getOptionAsInteger("region-store-small-struct-limit", 2); 354 } 355 } 356 357 358 /// setImplicitDefaultValue - Set the default binding for the provided 359 /// MemRegion to the value implicitly defined for compound literals when 360 /// the value is not specified. 361 RegionBindingsRef setImplicitDefaultValue(RegionBindingsConstRef B, 362 const MemRegion *R, QualType T); 363 364 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 365 /// type. 'Array' represents the lvalue of the array being decayed 366 /// to a pointer, and the returned SVal represents the decayed 367 /// version of that lvalue (i.e., a pointer to the first element of 368 /// the array). This is called by ExprEngine when evaluating 369 /// casts from arrays to pointers. 370 SVal ArrayToPointer(Loc Array, QualType ElementTy) override; 371 372 StoreRef getInitialStore(const LocationContext *InitLoc) override { 373 return StoreRef(RBFactory.getEmptyMap().getRootWithoutRetain(), *this); 374 } 375 376 //===-------------------------------------------------------------------===// 377 // Binding values to regions. 378 //===-------------------------------------------------------------------===// 379 RegionBindingsRef invalidateGlobalRegion(MemRegion::Kind K, 380 const Expr *Ex, 381 unsigned Count, 382 const LocationContext *LCtx, 383 RegionBindingsRef B, 384 InvalidatedRegions *Invalidated); 385 386 StoreRef invalidateRegions(Store store, 387 ArrayRef<SVal> Values, 388 const Expr *E, unsigned Count, 389 const LocationContext *LCtx, 390 const CallEvent *Call, 391 InvalidatedSymbols &IS, 392 RegionAndSymbolInvalidationTraits &ITraits, 393 InvalidatedRegions *Invalidated, 394 InvalidatedRegions *InvalidatedTopLevel) override; 395 396 bool scanReachableSymbols(Store S, const MemRegion *R, 397 ScanReachableSymbols &Callbacks) override; 398 399 RegionBindingsRef removeSubRegionBindings(RegionBindingsConstRef B, 400 const SubRegion *R); 401 402 public: // Part of public interface to class. 403 404 StoreRef Bind(Store store, Loc LV, SVal V) override { 405 return StoreRef(bind(getRegionBindings(store), LV, V).asStore(), *this); 406 } 407 408 RegionBindingsRef bind(RegionBindingsConstRef B, Loc LV, SVal V); 409 410 // BindDefaultInitial is only used to initialize a region with 411 // a default value. 412 StoreRef BindDefaultInitial(Store store, const MemRegion *R, 413 SVal V) override { 414 RegionBindingsRef B = getRegionBindings(store); 415 // Use other APIs when you have to wipe the region that was initialized 416 // earlier. 417 assert(!(B.getDefaultBinding(R) || B.getDirectBinding(R)) && 418 "Double initialization!"); 419 B = B.addBinding(BindingKey::Make(R, BindingKey::Default), V); 420 return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this); 421 } 422 423 // BindDefaultZero is used for zeroing constructors that may accidentally 424 // overwrite existing bindings. 425 StoreRef BindDefaultZero(Store store, const MemRegion *R) override { 426 // FIXME: The offsets of empty bases can be tricky because of 427 // of the so called "empty base class optimization". 428 // If a base class has been optimized out 429 // we should not try to create a binding, otherwise we should. 430 // Unfortunately, at the moment ASTRecordLayout doesn't expose 431 // the actual sizes of the empty bases 432 // and trying to infer them from offsets/alignments 433 // seems to be error-prone and non-trivial because of the trailing padding. 434 // As a temporary mitigation we don't create bindings for empty bases. 435 if (const auto *BR = dyn_cast<CXXBaseObjectRegion>(R)) 436 if (BR->getDecl()->isEmpty()) 437 return StoreRef(store, *this); 438 439 RegionBindingsRef B = getRegionBindings(store); 440 SVal V = svalBuilder.makeZeroVal(Ctx.CharTy); 441 B = removeSubRegionBindings(B, cast<SubRegion>(R)); 442 B = B.addBinding(BindingKey::Make(R, BindingKey::Default), V); 443 return StoreRef(B.asImmutableMap().getRootWithoutRetain(), *this); 444 } 445 446 /// Attempt to extract the fields of \p LCV and bind them to the struct region 447 /// \p R. 448 /// 449 /// This path is used when it seems advantageous to "force" loading the values 450 /// within a LazyCompoundVal to bind memberwise to the struct region, rather 451 /// than using a Default binding at the base of the entire region. This is a 452 /// heuristic attempting to avoid building long chains of LazyCompoundVals. 453 /// 454 /// \returns The updated store bindings, or \c None if binding non-lazily 455 /// would be too expensive. 456 Optional<RegionBindingsRef> tryBindSmallStruct(RegionBindingsConstRef B, 457 const TypedValueRegion *R, 458 const RecordDecl *RD, 459 nonloc::LazyCompoundVal LCV); 460 461 /// BindStruct - Bind a compound value to a structure. 462 RegionBindingsRef bindStruct(RegionBindingsConstRef B, 463 const TypedValueRegion* R, SVal V); 464 465 /// BindVector - Bind a compound value to a vector. 466 RegionBindingsRef bindVector(RegionBindingsConstRef B, 467 const TypedValueRegion* R, SVal V); 468 469 RegionBindingsRef bindArray(RegionBindingsConstRef B, 470 const TypedValueRegion* R, 471 SVal V); 472 473 /// Clears out all bindings in the given region and assigns a new value 474 /// as a Default binding. 475 RegionBindingsRef bindAggregate(RegionBindingsConstRef B, 476 const TypedRegion *R, 477 SVal DefaultVal); 478 479 /// Create a new store with the specified binding removed. 480 /// \param ST the original store, that is the basis for the new store. 481 /// \param L the location whose binding should be removed. 482 StoreRef killBinding(Store ST, Loc L) override; 483 484 void incrementReferenceCount(Store store) override { 485 getRegionBindings(store).manualRetain(); 486 } 487 488 /// If the StoreManager supports it, decrement the reference count of 489 /// the specified Store object. If the reference count hits 0, the memory 490 /// associated with the object is recycled. 491 void decrementReferenceCount(Store store) override { 492 getRegionBindings(store).manualRelease(); 493 } 494 495 bool includedInBindings(Store store, const MemRegion *region) const override; 496 497 /// Return the value bound to specified location in a given state. 498 /// 499 /// The high level logic for this method is this: 500 /// getBinding (L) 501 /// if L has binding 502 /// return L's binding 503 /// else if L is in killset 504 /// return unknown 505 /// else 506 /// if L is on stack or heap 507 /// return undefined 508 /// else 509 /// return symbolic 510 SVal getBinding(Store S, Loc L, QualType T) override { 511 return getBinding(getRegionBindings(S), L, T); 512 } 513 514 Optional<SVal> getDefaultBinding(Store S, const MemRegion *R) override { 515 RegionBindingsRef B = getRegionBindings(S); 516 // Default bindings are always applied over a base region so look up the 517 // base region's default binding, otherwise the lookup will fail when R 518 // is at an offset from R->getBaseRegion(). 519 return B.getDefaultBinding(R->getBaseRegion()); 520 } 521 522 SVal getBinding(RegionBindingsConstRef B, Loc L, QualType T = QualType()); 523 524 SVal getBindingForElement(RegionBindingsConstRef B, const ElementRegion *R); 525 526 SVal getBindingForField(RegionBindingsConstRef B, const FieldRegion *R); 527 528 SVal getBindingForObjCIvar(RegionBindingsConstRef B, const ObjCIvarRegion *R); 529 530 SVal getBindingForVar(RegionBindingsConstRef B, const VarRegion *R); 531 532 SVal getBindingForLazySymbol(const TypedValueRegion *R); 533 534 SVal getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 535 const TypedValueRegion *R, 536 QualType Ty); 537 538 SVal getLazyBinding(const SubRegion *LazyBindingRegion, 539 RegionBindingsRef LazyBinding); 540 541 /// Get bindings for the values in a struct and return a CompoundVal, used 542 /// when doing struct copy: 543 /// struct s x, y; 544 /// x = y; 545 /// y's value is retrieved by this method. 546 SVal getBindingForStruct(RegionBindingsConstRef B, const TypedValueRegion *R); 547 SVal getBindingForArray(RegionBindingsConstRef B, const TypedValueRegion *R); 548 NonLoc createLazyBinding(RegionBindingsConstRef B, const TypedValueRegion *R); 549 550 /// Used to lazily generate derived symbols for bindings that are defined 551 /// implicitly by default bindings in a super region. 552 /// 553 /// Note that callers may need to specially handle LazyCompoundVals, which 554 /// are returned as is in case the caller needs to treat them differently. 555 Optional<SVal> getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 556 const MemRegion *superR, 557 const TypedValueRegion *R, 558 QualType Ty); 559 560 /// Get the state and region whose binding this region \p R corresponds to. 561 /// 562 /// If there is no lazy binding for \p R, the returned value will have a null 563 /// \c second. Note that a null pointer can represents a valid Store. 564 std::pair<Store, const SubRegion *> 565 findLazyBinding(RegionBindingsConstRef B, const SubRegion *R, 566 const SubRegion *originalRegion); 567 568 /// Returns the cached set of interesting SVals contained within a lazy 569 /// binding. 570 /// 571 /// The precise value of "interesting" is determined for the purposes of 572 /// RegionStore's internal analysis. It must always contain all regions and 573 /// symbols, but may omit constants and other kinds of SVal. 574 const SValListTy &getInterestingValues(nonloc::LazyCompoundVal LCV); 575 576 //===------------------------------------------------------------------===// 577 // State pruning. 578 //===------------------------------------------------------------------===// 579 580 /// removeDeadBindings - Scans the RegionStore of 'state' for dead values. 581 /// It returns a new Store with these values removed. 582 StoreRef removeDeadBindings(Store store, const StackFrameContext *LCtx, 583 SymbolReaper& SymReaper) override; 584 585 //===------------------------------------------------------------------===// 586 // Region "extents". 587 //===------------------------------------------------------------------===// 588 589 // FIXME: This method will soon be eliminated; see the note in Store.h. 590 DefinedOrUnknownSVal getSizeInElements(ProgramStateRef state, 591 const MemRegion* R, 592 QualType EleTy) override; 593 594 //===------------------------------------------------------------------===// 595 // Utility methods. 596 //===------------------------------------------------------------------===// 597 598 RegionBindingsRef getRegionBindings(Store store) const { 599 return RegionBindingsRef(CBFactory, 600 static_cast<const RegionBindings::TreeTy*>(store), 601 RBFactory.getTreeFactory()); 602 } 603 604 void print(Store store, raw_ostream &Out, const char* nl, 605 const char *sep) override; 606 607 void iterBindings(Store store, BindingsHandler& f) override { 608 RegionBindingsRef B = getRegionBindings(store); 609 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 610 const ClusterBindings &Cluster = I.getData(); 611 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 612 CI != CE; ++CI) { 613 const BindingKey &K = CI.getKey(); 614 if (!K.isDirect()) 615 continue; 616 if (const SubRegion *R = dyn_cast<SubRegion>(K.getRegion())) { 617 // FIXME: Possibly incorporate the offset? 618 if (!f.HandleBinding(*this, store, R, CI.getData())) 619 return; 620 } 621 } 622 } 623 } 624 }; 625 626 } // end anonymous namespace 627 628 //===----------------------------------------------------------------------===// 629 // RegionStore creation. 630 //===----------------------------------------------------------------------===// 631 632 std::unique_ptr<StoreManager> 633 ento::CreateRegionStoreManager(ProgramStateManager &StMgr) { 634 RegionStoreFeatures F = maximal_features_tag(); 635 return llvm::make_unique<RegionStoreManager>(StMgr, F); 636 } 637 638 std::unique_ptr<StoreManager> 639 ento::CreateFieldsOnlyRegionStoreManager(ProgramStateManager &StMgr) { 640 RegionStoreFeatures F = minimal_features_tag(); 641 F.enableFields(true); 642 return llvm::make_unique<RegionStoreManager>(StMgr, F); 643 } 644 645 646 //===----------------------------------------------------------------------===// 647 // Region Cluster analysis. 648 //===----------------------------------------------------------------------===// 649 650 namespace { 651 /// Used to determine which global regions are automatically included in the 652 /// initial worklist of a ClusterAnalysis. 653 enum GlobalsFilterKind { 654 /// Don't include any global regions. 655 GFK_None, 656 /// Only include system globals. 657 GFK_SystemOnly, 658 /// Include all global regions. 659 GFK_All 660 }; 661 662 template <typename DERIVED> 663 class ClusterAnalysis { 664 protected: 665 typedef llvm::DenseMap<const MemRegion *, const ClusterBindings *> ClusterMap; 666 typedef const MemRegion * WorkListElement; 667 typedef SmallVector<WorkListElement, 10> WorkList; 668 669 llvm::SmallPtrSet<const ClusterBindings *, 16> Visited; 670 671 WorkList WL; 672 673 RegionStoreManager &RM; 674 ASTContext &Ctx; 675 SValBuilder &svalBuilder; 676 677 RegionBindingsRef B; 678 679 680 protected: 681 const ClusterBindings *getCluster(const MemRegion *R) { 682 return B.lookup(R); 683 } 684 685 /// Returns true if all clusters in the given memspace should be initially 686 /// included in the cluster analysis. Subclasses may provide their 687 /// own implementation. 688 bool includeEntireMemorySpace(const MemRegion *Base) { 689 return false; 690 } 691 692 public: 693 ClusterAnalysis(RegionStoreManager &rm, ProgramStateManager &StateMgr, 694 RegionBindingsRef b) 695 : RM(rm), Ctx(StateMgr.getContext()), 696 svalBuilder(StateMgr.getSValBuilder()), B(std::move(b)) {} 697 698 RegionBindingsRef getRegionBindings() const { return B; } 699 700 bool isVisited(const MemRegion *R) { 701 return Visited.count(getCluster(R)); 702 } 703 704 void GenerateClusters() { 705 // Scan the entire set of bindings and record the region clusters. 706 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); 707 RI != RE; ++RI){ 708 const MemRegion *Base = RI.getKey(); 709 710 const ClusterBindings &Cluster = RI.getData(); 711 assert(!Cluster.isEmpty() && "Empty clusters should be removed"); 712 static_cast<DERIVED*>(this)->VisitAddedToCluster(Base, Cluster); 713 714 // If the base's memspace should be entirely invalidated, add the cluster 715 // to the workspace up front. 716 if (static_cast<DERIVED*>(this)->includeEntireMemorySpace(Base)) 717 AddToWorkList(WorkListElement(Base), &Cluster); 718 } 719 } 720 721 bool AddToWorkList(WorkListElement E, const ClusterBindings *C) { 722 if (C && !Visited.insert(C).second) 723 return false; 724 WL.push_back(E); 725 return true; 726 } 727 728 bool AddToWorkList(const MemRegion *R) { 729 return static_cast<DERIVED*>(this)->AddToWorkList(R); 730 } 731 732 void RunWorkList() { 733 while (!WL.empty()) { 734 WorkListElement E = WL.pop_back_val(); 735 const MemRegion *BaseR = E; 736 737 static_cast<DERIVED*>(this)->VisitCluster(BaseR, getCluster(BaseR)); 738 } 739 } 740 741 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C) {} 742 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C) {} 743 744 void VisitCluster(const MemRegion *BaseR, const ClusterBindings *C, 745 bool Flag) { 746 static_cast<DERIVED*>(this)->VisitCluster(BaseR, C); 747 } 748 }; 749 } 750 751 //===----------------------------------------------------------------------===// 752 // Binding invalidation. 753 //===----------------------------------------------------------------------===// 754 755 bool RegionStoreManager::scanReachableSymbols(Store S, const MemRegion *R, 756 ScanReachableSymbols &Callbacks) { 757 assert(R == R->getBaseRegion() && "Should only be called for base regions"); 758 RegionBindingsRef B = getRegionBindings(S); 759 const ClusterBindings *Cluster = B.lookup(R); 760 761 if (!Cluster) 762 return true; 763 764 for (ClusterBindings::iterator RI = Cluster->begin(), RE = Cluster->end(); 765 RI != RE; ++RI) { 766 if (!Callbacks.scan(RI.getData())) 767 return false; 768 } 769 770 return true; 771 } 772 773 static inline bool isUnionField(const FieldRegion *FR) { 774 return FR->getDecl()->getParent()->isUnion(); 775 } 776 777 typedef SmallVector<const FieldDecl *, 8> FieldVector; 778 779 static void getSymbolicOffsetFields(BindingKey K, FieldVector &Fields) { 780 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 781 782 const MemRegion *Base = K.getConcreteOffsetRegion(); 783 const MemRegion *R = K.getRegion(); 784 785 while (R != Base) { 786 if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) 787 if (!isUnionField(FR)) 788 Fields.push_back(FR->getDecl()); 789 790 R = cast<SubRegion>(R)->getSuperRegion(); 791 } 792 } 793 794 static bool isCompatibleWithFields(BindingKey K, const FieldVector &Fields) { 795 assert(K.hasSymbolicOffset() && "Not implemented for concrete offset keys"); 796 797 if (Fields.empty()) 798 return true; 799 800 FieldVector FieldsInBindingKey; 801 getSymbolicOffsetFields(K, FieldsInBindingKey); 802 803 ptrdiff_t Delta = FieldsInBindingKey.size() - Fields.size(); 804 if (Delta >= 0) 805 return std::equal(FieldsInBindingKey.begin() + Delta, 806 FieldsInBindingKey.end(), 807 Fields.begin()); 808 else 809 return std::equal(FieldsInBindingKey.begin(), FieldsInBindingKey.end(), 810 Fields.begin() - Delta); 811 } 812 813 /// Collects all bindings in \p Cluster that may refer to bindings within 814 /// \p Top. 815 /// 816 /// Each binding is a pair whose \c first is the key (a BindingKey) and whose 817 /// \c second is the value (an SVal). 818 /// 819 /// The \p IncludeAllDefaultBindings parameter specifies whether to include 820 /// default bindings that may extend beyond \p Top itself, e.g. if \p Top is 821 /// an aggregate within a larger aggregate with a default binding. 822 static void 823 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 824 SValBuilder &SVB, const ClusterBindings &Cluster, 825 const SubRegion *Top, BindingKey TopKey, 826 bool IncludeAllDefaultBindings) { 827 FieldVector FieldsInSymbolicSubregions; 828 if (TopKey.hasSymbolicOffset()) { 829 getSymbolicOffsetFields(TopKey, FieldsInSymbolicSubregions); 830 Top = TopKey.getConcreteOffsetRegion(); 831 TopKey = BindingKey::Make(Top, BindingKey::Default); 832 } 833 834 // Find the length (in bits) of the region being invalidated. 835 uint64_t Length = UINT64_MAX; 836 SVal Extent = Top->getExtent(SVB); 837 if (Optional<nonloc::ConcreteInt> ExtentCI = 838 Extent.getAs<nonloc::ConcreteInt>()) { 839 const llvm::APSInt &ExtentInt = ExtentCI->getValue(); 840 assert(ExtentInt.isNonNegative() || ExtentInt.isUnsigned()); 841 // Extents are in bytes but region offsets are in bits. Be careful! 842 Length = ExtentInt.getLimitedValue() * SVB.getContext().getCharWidth(); 843 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(Top)) { 844 if (FR->getDecl()->isBitField()) 845 Length = FR->getDecl()->getBitWidthValue(SVB.getContext()); 846 } 847 848 for (ClusterBindings::iterator I = Cluster.begin(), E = Cluster.end(); 849 I != E; ++I) { 850 BindingKey NextKey = I.getKey(); 851 if (NextKey.getRegion() == TopKey.getRegion()) { 852 // FIXME: This doesn't catch the case where we're really invalidating a 853 // region with a symbolic offset. Example: 854 // R: points[i].y 855 // Next: points[0].x 856 857 if (NextKey.getOffset() > TopKey.getOffset() && 858 NextKey.getOffset() - TopKey.getOffset() < Length) { 859 // Case 1: The next binding is inside the region we're invalidating. 860 // Include it. 861 Bindings.push_back(*I); 862 863 } else if (NextKey.getOffset() == TopKey.getOffset()) { 864 // Case 2: The next binding is at the same offset as the region we're 865 // invalidating. In this case, we need to leave default bindings alone, 866 // since they may be providing a default value for a regions beyond what 867 // we're invalidating. 868 // FIXME: This is probably incorrect; consider invalidating an outer 869 // struct whose first field is bound to a LazyCompoundVal. 870 if (IncludeAllDefaultBindings || NextKey.isDirect()) 871 Bindings.push_back(*I); 872 } 873 874 } else if (NextKey.hasSymbolicOffset()) { 875 const MemRegion *Base = NextKey.getConcreteOffsetRegion(); 876 if (Top->isSubRegionOf(Base) && Top != Base) { 877 // Case 3: The next key is symbolic and we just changed something within 878 // its concrete region. We don't know if the binding is still valid, so 879 // we'll be conservative and include it. 880 if (IncludeAllDefaultBindings || NextKey.isDirect()) 881 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 882 Bindings.push_back(*I); 883 } else if (const SubRegion *BaseSR = dyn_cast<SubRegion>(Base)) { 884 // Case 4: The next key is symbolic, but we changed a known 885 // super-region. In this case the binding is certainly included. 886 if (BaseSR->isSubRegionOf(Top)) 887 if (isCompatibleWithFields(NextKey, FieldsInSymbolicSubregions)) 888 Bindings.push_back(*I); 889 } 890 } 891 } 892 } 893 894 static void 895 collectSubRegionBindings(SmallVectorImpl<BindingPair> &Bindings, 896 SValBuilder &SVB, const ClusterBindings &Cluster, 897 const SubRegion *Top, bool IncludeAllDefaultBindings) { 898 collectSubRegionBindings(Bindings, SVB, Cluster, Top, 899 BindingKey::Make(Top, BindingKey::Default), 900 IncludeAllDefaultBindings); 901 } 902 903 RegionBindingsRef 904 RegionStoreManager::removeSubRegionBindings(RegionBindingsConstRef B, 905 const SubRegion *Top) { 906 BindingKey TopKey = BindingKey::Make(Top, BindingKey::Default); 907 const MemRegion *ClusterHead = TopKey.getBaseRegion(); 908 909 if (Top == ClusterHead) { 910 // We can remove an entire cluster's bindings all in one go. 911 return B.remove(Top); 912 } 913 914 const ClusterBindings *Cluster = B.lookup(ClusterHead); 915 if (!Cluster) { 916 // If we're invalidating a region with a symbolic offset, we need to make 917 // sure we don't treat the base region as uninitialized anymore. 918 if (TopKey.hasSymbolicOffset()) { 919 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 920 return B.addBinding(Concrete, BindingKey::Default, UnknownVal()); 921 } 922 return B; 923 } 924 925 SmallVector<BindingPair, 32> Bindings; 926 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, Top, TopKey, 927 /*IncludeAllDefaultBindings=*/false); 928 929 ClusterBindingsRef Result(*Cluster, CBFactory); 930 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 931 E = Bindings.end(); 932 I != E; ++I) 933 Result = Result.remove(I->first); 934 935 // If we're invalidating a region with a symbolic offset, we need to make sure 936 // we don't treat the base region as uninitialized anymore. 937 // FIXME: This isn't very precise; see the example in 938 // collectSubRegionBindings. 939 if (TopKey.hasSymbolicOffset()) { 940 const SubRegion *Concrete = TopKey.getConcreteOffsetRegion(); 941 Result = Result.add(BindingKey::Make(Concrete, BindingKey::Default), 942 UnknownVal()); 943 } 944 945 if (Result.isEmpty()) 946 return B.remove(ClusterHead); 947 return B.add(ClusterHead, Result.asImmutableMap()); 948 } 949 950 namespace { 951 class InvalidateRegionsWorker : public ClusterAnalysis<InvalidateRegionsWorker> 952 { 953 const Expr *Ex; 954 unsigned Count; 955 const LocationContext *LCtx; 956 InvalidatedSymbols &IS; 957 RegionAndSymbolInvalidationTraits &ITraits; 958 StoreManager::InvalidatedRegions *Regions; 959 GlobalsFilterKind GlobalsFilter; 960 public: 961 InvalidateRegionsWorker(RegionStoreManager &rm, 962 ProgramStateManager &stateMgr, 963 RegionBindingsRef b, 964 const Expr *ex, unsigned count, 965 const LocationContext *lctx, 966 InvalidatedSymbols &is, 967 RegionAndSymbolInvalidationTraits &ITraitsIn, 968 StoreManager::InvalidatedRegions *r, 969 GlobalsFilterKind GFK) 970 : ClusterAnalysis<InvalidateRegionsWorker>(rm, stateMgr, b), 971 Ex(ex), Count(count), LCtx(lctx), IS(is), ITraits(ITraitsIn), Regions(r), 972 GlobalsFilter(GFK) {} 973 974 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 975 void VisitBinding(SVal V); 976 977 using ClusterAnalysis::AddToWorkList; 978 979 bool AddToWorkList(const MemRegion *R); 980 981 /// Returns true if all clusters in the memory space for \p Base should be 982 /// be invalidated. 983 bool includeEntireMemorySpace(const MemRegion *Base); 984 985 /// Returns true if the memory space of the given region is one of the global 986 /// regions specially included at the start of invalidation. 987 bool isInitiallyIncludedGlobalRegion(const MemRegion *R); 988 }; 989 } 990 991 bool InvalidateRegionsWorker::AddToWorkList(const MemRegion *R) { 992 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 993 R, RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 994 const MemRegion *BaseR = doNotInvalidateSuperRegion ? R : R->getBaseRegion(); 995 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 996 } 997 998 void InvalidateRegionsWorker::VisitBinding(SVal V) { 999 // A symbol? Mark it touched by the invalidation. 1000 if (SymbolRef Sym = V.getAsSymbol()) 1001 IS.insert(Sym); 1002 1003 if (const MemRegion *R = V.getAsRegion()) { 1004 AddToWorkList(R); 1005 return; 1006 } 1007 1008 // Is it a LazyCompoundVal? All references get invalidated as well. 1009 if (Optional<nonloc::LazyCompoundVal> LCS = 1010 V.getAs<nonloc::LazyCompoundVal>()) { 1011 1012 const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS); 1013 1014 for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(), 1015 E = Vals.end(); 1016 I != E; ++I) 1017 VisitBinding(*I); 1018 1019 return; 1020 } 1021 } 1022 1023 void InvalidateRegionsWorker::VisitCluster(const MemRegion *baseR, 1024 const ClusterBindings *C) { 1025 1026 bool PreserveRegionsContents = 1027 ITraits.hasTrait(baseR, 1028 RegionAndSymbolInvalidationTraits::TK_PreserveContents); 1029 1030 if (C) { 1031 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) 1032 VisitBinding(I.getData()); 1033 1034 // Invalidate regions contents. 1035 if (!PreserveRegionsContents) 1036 B = B.remove(baseR); 1037 } 1038 1039 if (const auto *TO = dyn_cast<TypedValueRegion>(baseR)) { 1040 if (const auto *RD = TO->getValueType()->getAsCXXRecordDecl()) { 1041 1042 // Lambdas can affect all static local variables without explicitly 1043 // capturing those. 1044 // We invalidate all static locals referenced inside the lambda body. 1045 if (RD->isLambda() && RD->getLambdaCallOperator()->getBody()) { 1046 using namespace ast_matchers; 1047 1048 const char *DeclBind = "DeclBind"; 1049 StatementMatcher RefToStatic = stmt(hasDescendant(declRefExpr( 1050 to(varDecl(hasStaticStorageDuration()).bind(DeclBind))))); 1051 auto Matches = 1052 match(RefToStatic, *RD->getLambdaCallOperator()->getBody(), 1053 RD->getASTContext()); 1054 1055 for (BoundNodes &Match : Matches) { 1056 auto *VD = Match.getNodeAs<VarDecl>(DeclBind); 1057 const VarRegion *ToInvalidate = 1058 RM.getRegionManager().getVarRegion(VD, LCtx); 1059 AddToWorkList(ToInvalidate); 1060 } 1061 } 1062 } 1063 } 1064 1065 // BlockDataRegion? If so, invalidate captured variables that are passed 1066 // by reference. 1067 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(baseR)) { 1068 for (BlockDataRegion::referenced_vars_iterator 1069 BI = BR->referenced_vars_begin(), BE = BR->referenced_vars_end() ; 1070 BI != BE; ++BI) { 1071 const VarRegion *VR = BI.getCapturedRegion(); 1072 const VarDecl *VD = VR->getDecl(); 1073 if (VD->hasAttr<BlocksAttr>() || !VD->hasLocalStorage()) { 1074 AddToWorkList(VR); 1075 } 1076 else if (Loc::isLocType(VR->getValueType())) { 1077 // Map the current bindings to a Store to retrieve the value 1078 // of the binding. If that binding itself is a region, we should 1079 // invalidate that region. This is because a block may capture 1080 // a pointer value, but the thing pointed by that pointer may 1081 // get invalidated. 1082 SVal V = RM.getBinding(B, loc::MemRegionVal(VR)); 1083 if (Optional<Loc> L = V.getAs<Loc>()) { 1084 if (const MemRegion *LR = L->getAsRegion()) 1085 AddToWorkList(LR); 1086 } 1087 } 1088 } 1089 return; 1090 } 1091 1092 // Symbolic region? 1093 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) 1094 IS.insert(SR->getSymbol()); 1095 1096 // Nothing else should be done in the case when we preserve regions context. 1097 if (PreserveRegionsContents) 1098 return; 1099 1100 // Otherwise, we have a normal data region. Record that we touched the region. 1101 if (Regions) 1102 Regions->push_back(baseR); 1103 1104 if (isa<AllocaRegion>(baseR) || isa<SymbolicRegion>(baseR)) { 1105 // Invalidate the region by setting its default value to 1106 // conjured symbol. The type of the symbol is irrelevant. 1107 DefinedOrUnknownSVal V = 1108 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, Ctx.IntTy, Count); 1109 B = B.addBinding(baseR, BindingKey::Default, V); 1110 return; 1111 } 1112 1113 if (!baseR->isBoundable()) 1114 return; 1115 1116 const TypedValueRegion *TR = cast<TypedValueRegion>(baseR); 1117 QualType T = TR->getValueType(); 1118 1119 if (isInitiallyIncludedGlobalRegion(baseR)) { 1120 // If the region is a global and we are invalidating all globals, 1121 // erasing the entry is good enough. This causes all globals to be lazily 1122 // symbolicated from the same base symbol. 1123 return; 1124 } 1125 1126 if (T->isRecordType()) { 1127 // Invalidate the region by setting its default value to 1128 // conjured symbol. The type of the symbol is irrelevant. 1129 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1130 Ctx.IntTy, Count); 1131 B = B.addBinding(baseR, BindingKey::Default, V); 1132 return; 1133 } 1134 1135 if (const ArrayType *AT = Ctx.getAsArrayType(T)) { 1136 bool doNotInvalidateSuperRegion = ITraits.hasTrait( 1137 baseR, 1138 RegionAndSymbolInvalidationTraits::TK_DoNotInvalidateSuperRegion); 1139 1140 if (doNotInvalidateSuperRegion) { 1141 // We are not doing blank invalidation of the whole array region so we 1142 // have to manually invalidate each elements. 1143 Optional<uint64_t> NumElements; 1144 1145 // Compute lower and upper offsets for region within array. 1146 if (const ConstantArrayType *CAT = dyn_cast<ConstantArrayType>(AT)) 1147 NumElements = CAT->getSize().getZExtValue(); 1148 if (!NumElements) // We are not dealing with a constant size array 1149 goto conjure_default; 1150 QualType ElementTy = AT->getElementType(); 1151 uint64_t ElemSize = Ctx.getTypeSize(ElementTy); 1152 const RegionOffset &RO = baseR->getAsOffset(); 1153 const MemRegion *SuperR = baseR->getBaseRegion(); 1154 if (RO.hasSymbolicOffset()) { 1155 // If base region has a symbolic offset, 1156 // we revert to invalidating the super region. 1157 if (SuperR) 1158 AddToWorkList(SuperR); 1159 goto conjure_default; 1160 } 1161 1162 uint64_t LowerOffset = RO.getOffset(); 1163 uint64_t UpperOffset = LowerOffset + *NumElements * ElemSize; 1164 bool UpperOverflow = UpperOffset < LowerOffset; 1165 1166 // Invalidate regions which are within array boundaries, 1167 // or have a symbolic offset. 1168 if (!SuperR) 1169 goto conjure_default; 1170 1171 const ClusterBindings *C = B.lookup(SuperR); 1172 if (!C) 1173 goto conjure_default; 1174 1175 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; 1176 ++I) { 1177 const BindingKey &BK = I.getKey(); 1178 Optional<uint64_t> ROffset = 1179 BK.hasSymbolicOffset() ? Optional<uint64_t>() : BK.getOffset(); 1180 1181 // Check offset is not symbolic and within array's boundaries. 1182 // Handles arrays of 0 elements and of 0-sized elements as well. 1183 if (!ROffset || 1184 ((*ROffset >= LowerOffset && *ROffset < UpperOffset) || 1185 (UpperOverflow && 1186 (*ROffset >= LowerOffset || *ROffset < UpperOffset)) || 1187 (LowerOffset == UpperOffset && *ROffset == LowerOffset))) { 1188 B = B.removeBinding(I.getKey()); 1189 // Bound symbolic regions need to be invalidated for dead symbol 1190 // detection. 1191 SVal V = I.getData(); 1192 const MemRegion *R = V.getAsRegion(); 1193 if (R && isa<SymbolicRegion>(R)) 1194 VisitBinding(V); 1195 } 1196 } 1197 } 1198 conjure_default: 1199 // Set the default value of the array to conjured symbol. 1200 DefinedOrUnknownSVal V = 1201 svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1202 AT->getElementType(), Count); 1203 B = B.addBinding(baseR, BindingKey::Default, V); 1204 return; 1205 } 1206 1207 DefinedOrUnknownSVal V = svalBuilder.conjureSymbolVal(baseR, Ex, LCtx, 1208 T,Count); 1209 assert(SymbolManager::canSymbolicate(T) || V.isUnknown()); 1210 B = B.addBinding(baseR, BindingKey::Direct, V); 1211 } 1212 1213 bool InvalidateRegionsWorker::isInitiallyIncludedGlobalRegion( 1214 const MemRegion *R) { 1215 switch (GlobalsFilter) { 1216 case GFK_None: 1217 return false; 1218 case GFK_SystemOnly: 1219 return isa<GlobalSystemSpaceRegion>(R->getMemorySpace()); 1220 case GFK_All: 1221 return isa<NonStaticGlobalSpaceRegion>(R->getMemorySpace()); 1222 } 1223 1224 llvm_unreachable("unknown globals filter"); 1225 } 1226 1227 bool InvalidateRegionsWorker::includeEntireMemorySpace(const MemRegion *Base) { 1228 if (isInitiallyIncludedGlobalRegion(Base)) 1229 return true; 1230 1231 const MemSpaceRegion *MemSpace = Base->getMemorySpace(); 1232 return ITraits.hasTrait(MemSpace, 1233 RegionAndSymbolInvalidationTraits::TK_EntireMemSpace); 1234 } 1235 1236 RegionBindingsRef 1237 RegionStoreManager::invalidateGlobalRegion(MemRegion::Kind K, 1238 const Expr *Ex, 1239 unsigned Count, 1240 const LocationContext *LCtx, 1241 RegionBindingsRef B, 1242 InvalidatedRegions *Invalidated) { 1243 // Bind the globals memory space to a new symbol that we will use to derive 1244 // the bindings for all globals. 1245 const GlobalsSpaceRegion *GS = MRMgr.getGlobalsRegion(K); 1246 SVal V = svalBuilder.conjureSymbolVal(/* SymbolTag = */ (const void*) GS, Ex, LCtx, 1247 /* type does not matter */ Ctx.IntTy, 1248 Count); 1249 1250 B = B.removeBinding(GS) 1251 .addBinding(BindingKey::Make(GS, BindingKey::Default), V); 1252 1253 // Even if there are no bindings in the global scope, we still need to 1254 // record that we touched it. 1255 if (Invalidated) 1256 Invalidated->push_back(GS); 1257 1258 return B; 1259 } 1260 1261 void RegionStoreManager::populateWorkList(InvalidateRegionsWorker &W, 1262 ArrayRef<SVal> Values, 1263 InvalidatedRegions *TopLevelRegions) { 1264 for (ArrayRef<SVal>::iterator I = Values.begin(), 1265 E = Values.end(); I != E; ++I) { 1266 SVal V = *I; 1267 if (Optional<nonloc::LazyCompoundVal> LCS = 1268 V.getAs<nonloc::LazyCompoundVal>()) { 1269 1270 const SValListTy &Vals = getInterestingValues(*LCS); 1271 1272 for (SValListTy::const_iterator I = Vals.begin(), 1273 E = Vals.end(); I != E; ++I) { 1274 // Note: the last argument is false here because these are 1275 // non-top-level regions. 1276 if (const MemRegion *R = (*I).getAsRegion()) 1277 W.AddToWorkList(R); 1278 } 1279 continue; 1280 } 1281 1282 if (const MemRegion *R = V.getAsRegion()) { 1283 if (TopLevelRegions) 1284 TopLevelRegions->push_back(R); 1285 W.AddToWorkList(R); 1286 continue; 1287 } 1288 } 1289 } 1290 1291 StoreRef 1292 RegionStoreManager::invalidateRegions(Store store, 1293 ArrayRef<SVal> Values, 1294 const Expr *Ex, unsigned Count, 1295 const LocationContext *LCtx, 1296 const CallEvent *Call, 1297 InvalidatedSymbols &IS, 1298 RegionAndSymbolInvalidationTraits &ITraits, 1299 InvalidatedRegions *TopLevelRegions, 1300 InvalidatedRegions *Invalidated) { 1301 GlobalsFilterKind GlobalsFilter; 1302 if (Call) { 1303 if (Call->isInSystemHeader()) 1304 GlobalsFilter = GFK_SystemOnly; 1305 else 1306 GlobalsFilter = GFK_All; 1307 } else { 1308 GlobalsFilter = GFK_None; 1309 } 1310 1311 RegionBindingsRef B = getRegionBindings(store); 1312 InvalidateRegionsWorker W(*this, StateMgr, B, Ex, Count, LCtx, IS, ITraits, 1313 Invalidated, GlobalsFilter); 1314 1315 // Scan the bindings and generate the clusters. 1316 W.GenerateClusters(); 1317 1318 // Add the regions to the worklist. 1319 populateWorkList(W, Values, TopLevelRegions); 1320 1321 W.RunWorkList(); 1322 1323 // Return the new bindings. 1324 B = W.getRegionBindings(); 1325 1326 // For calls, determine which global regions should be invalidated and 1327 // invalidate them. (Note that function-static and immutable globals are never 1328 // invalidated by this.) 1329 // TODO: This could possibly be more precise with modules. 1330 switch (GlobalsFilter) { 1331 case GFK_All: 1332 B = invalidateGlobalRegion(MemRegion::GlobalInternalSpaceRegionKind, 1333 Ex, Count, LCtx, B, Invalidated); 1334 // FALLTHROUGH 1335 case GFK_SystemOnly: 1336 B = invalidateGlobalRegion(MemRegion::GlobalSystemSpaceRegionKind, 1337 Ex, Count, LCtx, B, Invalidated); 1338 // FALLTHROUGH 1339 case GFK_None: 1340 break; 1341 } 1342 1343 return StoreRef(B.asStore(), *this); 1344 } 1345 1346 //===----------------------------------------------------------------------===// 1347 // Extents for regions. 1348 //===----------------------------------------------------------------------===// 1349 1350 DefinedOrUnknownSVal 1351 RegionStoreManager::getSizeInElements(ProgramStateRef state, 1352 const MemRegion *R, 1353 QualType EleTy) { 1354 SVal Size = cast<SubRegion>(R)->getExtent(svalBuilder); 1355 const llvm::APSInt *SizeInt = svalBuilder.getKnownValue(state, Size); 1356 if (!SizeInt) 1357 return UnknownVal(); 1358 1359 CharUnits RegionSize = CharUnits::fromQuantity(SizeInt->getSExtValue()); 1360 1361 if (Ctx.getAsVariableArrayType(EleTy)) { 1362 // FIXME: We need to track extra state to properly record the size 1363 // of VLAs. Returning UnknownVal here, however, is a stop-gap so that 1364 // we don't have a divide-by-zero below. 1365 return UnknownVal(); 1366 } 1367 1368 CharUnits EleSize = Ctx.getTypeSizeInChars(EleTy); 1369 1370 // If a variable is reinterpreted as a type that doesn't fit into a larger 1371 // type evenly, round it down. 1372 // This is a signed value, since it's used in arithmetic with signed indices. 1373 return svalBuilder.makeIntVal(RegionSize / EleSize, 1374 svalBuilder.getArrayIndexType()); 1375 } 1376 1377 //===----------------------------------------------------------------------===// 1378 // Location and region casting. 1379 //===----------------------------------------------------------------------===// 1380 1381 /// ArrayToPointer - Emulates the "decay" of an array to a pointer 1382 /// type. 'Array' represents the lvalue of the array being decayed 1383 /// to a pointer, and the returned SVal represents the decayed 1384 /// version of that lvalue (i.e., a pointer to the first element of 1385 /// the array). This is called by ExprEngine when evaluating casts 1386 /// from arrays to pointers. 1387 SVal RegionStoreManager::ArrayToPointer(Loc Array, QualType T) { 1388 if (Array.getAs<loc::ConcreteInt>()) 1389 return Array; 1390 1391 if (!Array.getAs<loc::MemRegionVal>()) 1392 return UnknownVal(); 1393 1394 const SubRegion *R = 1395 cast<SubRegion>(Array.castAs<loc::MemRegionVal>().getRegion()); 1396 NonLoc ZeroIdx = svalBuilder.makeZeroArrayIndex(); 1397 return loc::MemRegionVal(MRMgr.getElementRegion(T, ZeroIdx, R, Ctx)); 1398 } 1399 1400 //===----------------------------------------------------------------------===// 1401 // Loading values from regions. 1402 //===----------------------------------------------------------------------===// 1403 1404 SVal RegionStoreManager::getBinding(RegionBindingsConstRef B, Loc L, QualType T) { 1405 assert(!L.getAs<UnknownVal>() && "location unknown"); 1406 assert(!L.getAs<UndefinedVal>() && "location undefined"); 1407 1408 // For access to concrete addresses, return UnknownVal. Checks 1409 // for null dereferences (and similar errors) are done by checkers, not 1410 // the Store. 1411 // FIXME: We can consider lazily symbolicating such memory, but we really 1412 // should defer this when we can reason easily about symbolicating arrays 1413 // of bytes. 1414 if (L.getAs<loc::ConcreteInt>()) { 1415 return UnknownVal(); 1416 } 1417 if (!L.getAs<loc::MemRegionVal>()) { 1418 return UnknownVal(); 1419 } 1420 1421 const MemRegion *MR = L.castAs<loc::MemRegionVal>().getRegion(); 1422 1423 if (isa<BlockDataRegion>(MR)) { 1424 return UnknownVal(); 1425 } 1426 1427 if (!isa<TypedValueRegion>(MR)) { 1428 if (T.isNull()) { 1429 if (const TypedRegion *TR = dyn_cast<TypedRegion>(MR)) 1430 T = TR->getLocationType()->getPointeeType(); 1431 else if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(MR)) 1432 T = SR->getSymbol()->getType()->getPointeeType(); 1433 } 1434 assert(!T.isNull() && "Unable to auto-detect binding type!"); 1435 assert(!T->isVoidType() && "Attempting to dereference a void pointer!"); 1436 MR = GetElementZeroRegion(cast<SubRegion>(MR), T); 1437 } else { 1438 T = cast<TypedValueRegion>(MR)->getValueType(); 1439 } 1440 1441 // FIXME: Perhaps this method should just take a 'const MemRegion*' argument 1442 // instead of 'Loc', and have the other Loc cases handled at a higher level. 1443 const TypedValueRegion *R = cast<TypedValueRegion>(MR); 1444 QualType RTy = R->getValueType(); 1445 1446 // FIXME: we do not yet model the parts of a complex type, so treat the 1447 // whole thing as "unknown". 1448 if (RTy->isAnyComplexType()) 1449 return UnknownVal(); 1450 1451 // FIXME: We should eventually handle funny addressing. e.g.: 1452 // 1453 // int x = ...; 1454 // int *p = &x; 1455 // char *q = (char*) p; 1456 // char c = *q; // returns the first byte of 'x'. 1457 // 1458 // Such funny addressing will occur due to layering of regions. 1459 if (RTy->isStructureOrClassType()) 1460 return getBindingForStruct(B, R); 1461 1462 // FIXME: Handle unions. 1463 if (RTy->isUnionType()) 1464 return createLazyBinding(B, R); 1465 1466 if (RTy->isArrayType()) { 1467 if (RTy->isConstantArrayType()) 1468 return getBindingForArray(B, R); 1469 else 1470 return UnknownVal(); 1471 } 1472 1473 // FIXME: handle Vector types. 1474 if (RTy->isVectorType()) 1475 return UnknownVal(); 1476 1477 if (const FieldRegion* FR = dyn_cast<FieldRegion>(R)) 1478 return CastRetrievedVal(getBindingForField(B, FR), FR, T); 1479 1480 if (const ElementRegion* ER = dyn_cast<ElementRegion>(R)) { 1481 // FIXME: Here we actually perform an implicit conversion from the loaded 1482 // value to the element type. Eventually we want to compose these values 1483 // more intelligently. For example, an 'element' can encompass multiple 1484 // bound regions (e.g., several bound bytes), or could be a subset of 1485 // a larger value. 1486 return CastRetrievedVal(getBindingForElement(B, ER), ER, T); 1487 } 1488 1489 if (const ObjCIvarRegion *IVR = dyn_cast<ObjCIvarRegion>(R)) { 1490 // FIXME: Here we actually perform an implicit conversion from the loaded 1491 // value to the ivar type. What we should model is stores to ivars 1492 // that blow past the extent of the ivar. If the address of the ivar is 1493 // reinterpretted, it is possible we stored a different value that could 1494 // fit within the ivar. Either we need to cast these when storing them 1495 // or reinterpret them lazily (as we do here). 1496 return CastRetrievedVal(getBindingForObjCIvar(B, IVR), IVR, T); 1497 } 1498 1499 if (const VarRegion *VR = dyn_cast<VarRegion>(R)) { 1500 // FIXME: Here we actually perform an implicit conversion from the loaded 1501 // value to the variable type. What we should model is stores to variables 1502 // that blow past the extent of the variable. If the address of the 1503 // variable is reinterpretted, it is possible we stored a different value 1504 // that could fit within the variable. Either we need to cast these when 1505 // storing them or reinterpret them lazily (as we do here). 1506 return CastRetrievedVal(getBindingForVar(B, VR), VR, T); 1507 } 1508 1509 const SVal *V = B.lookup(R, BindingKey::Direct); 1510 1511 // Check if the region has a binding. 1512 if (V) 1513 return *V; 1514 1515 // The location does not have a bound value. This means that it has 1516 // the value it had upon its creation and/or entry to the analyzed 1517 // function/method. These are either symbolic values or 'undefined'. 1518 if (R->hasStackNonParametersStorage()) { 1519 // All stack variables are considered to have undefined values 1520 // upon creation. All heap allocated blocks are considered to 1521 // have undefined values as well unless they are explicitly bound 1522 // to specific values. 1523 return UndefinedVal(); 1524 } 1525 1526 // All other values are symbolic. 1527 return svalBuilder.getRegionValueSymbolVal(R); 1528 } 1529 1530 static QualType getUnderlyingType(const SubRegion *R) { 1531 QualType RegionTy; 1532 if (const TypedValueRegion *TVR = dyn_cast<TypedValueRegion>(R)) 1533 RegionTy = TVR->getValueType(); 1534 1535 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) 1536 RegionTy = SR->getSymbol()->getType(); 1537 1538 return RegionTy; 1539 } 1540 1541 /// Checks to see if store \p B has a lazy binding for region \p R. 1542 /// 1543 /// If \p AllowSubregionBindings is \c false, a lazy binding will be rejected 1544 /// if there are additional bindings within \p R. 1545 /// 1546 /// Note that unlike RegionStoreManager::findLazyBinding, this will not search 1547 /// for lazy bindings for super-regions of \p R. 1548 static Optional<nonloc::LazyCompoundVal> 1549 getExistingLazyBinding(SValBuilder &SVB, RegionBindingsConstRef B, 1550 const SubRegion *R, bool AllowSubregionBindings) { 1551 Optional<SVal> V = B.getDefaultBinding(R); 1552 if (!V) 1553 return None; 1554 1555 Optional<nonloc::LazyCompoundVal> LCV = V->getAs<nonloc::LazyCompoundVal>(); 1556 if (!LCV) 1557 return None; 1558 1559 // If the LCV is for a subregion, the types might not match, and we shouldn't 1560 // reuse the binding. 1561 QualType RegionTy = getUnderlyingType(R); 1562 if (!RegionTy.isNull() && 1563 !RegionTy->isVoidPointerType()) { 1564 QualType SourceRegionTy = LCV->getRegion()->getValueType(); 1565 if (!SVB.getContext().hasSameUnqualifiedType(RegionTy, SourceRegionTy)) 1566 return None; 1567 } 1568 1569 if (!AllowSubregionBindings) { 1570 // If there are any other bindings within this region, we shouldn't reuse 1571 // the top-level binding. 1572 SmallVector<BindingPair, 16> Bindings; 1573 collectSubRegionBindings(Bindings, SVB, *B.lookup(R->getBaseRegion()), R, 1574 /*IncludeAllDefaultBindings=*/true); 1575 if (Bindings.size() > 1) 1576 return None; 1577 } 1578 1579 return *LCV; 1580 } 1581 1582 1583 std::pair<Store, const SubRegion *> 1584 RegionStoreManager::findLazyBinding(RegionBindingsConstRef B, 1585 const SubRegion *R, 1586 const SubRegion *originalRegion) { 1587 if (originalRegion != R) { 1588 if (Optional<nonloc::LazyCompoundVal> V = 1589 getExistingLazyBinding(svalBuilder, B, R, true)) 1590 return std::make_pair(V->getStore(), V->getRegion()); 1591 } 1592 1593 typedef std::pair<Store, const SubRegion *> StoreRegionPair; 1594 StoreRegionPair Result = StoreRegionPair(); 1595 1596 if (const ElementRegion *ER = dyn_cast<ElementRegion>(R)) { 1597 Result = findLazyBinding(B, cast<SubRegion>(ER->getSuperRegion()), 1598 originalRegion); 1599 1600 if (Result.second) 1601 Result.second = MRMgr.getElementRegionWithSuper(ER, Result.second); 1602 1603 } else if (const FieldRegion *FR = dyn_cast<FieldRegion>(R)) { 1604 Result = findLazyBinding(B, cast<SubRegion>(FR->getSuperRegion()), 1605 originalRegion); 1606 1607 if (Result.second) 1608 Result.second = MRMgr.getFieldRegionWithSuper(FR, Result.second); 1609 1610 } else if (const CXXBaseObjectRegion *BaseReg = 1611 dyn_cast<CXXBaseObjectRegion>(R)) { 1612 // C++ base object region is another kind of region that we should blast 1613 // through to look for lazy compound value. It is like a field region. 1614 Result = findLazyBinding(B, cast<SubRegion>(BaseReg->getSuperRegion()), 1615 originalRegion); 1616 1617 if (Result.second) 1618 Result.second = MRMgr.getCXXBaseObjectRegionWithSuper(BaseReg, 1619 Result.second); 1620 } 1621 1622 return Result; 1623 } 1624 1625 SVal RegionStoreManager::getBindingForElement(RegionBindingsConstRef B, 1626 const ElementRegion* R) { 1627 // We do not currently model bindings of the CompoundLiteralregion. 1628 if (isa<CompoundLiteralRegion>(R->getBaseRegion())) 1629 return UnknownVal(); 1630 1631 // Check if the region has a binding. 1632 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1633 return *V; 1634 1635 const MemRegion* superR = R->getSuperRegion(); 1636 1637 // Check if the region is an element region of a string literal. 1638 if (const StringRegion *StrR = dyn_cast<StringRegion>(superR)) { 1639 // FIXME: Handle loads from strings where the literal is treated as 1640 // an integer, e.g., *((unsigned int*)"hello") 1641 QualType T = Ctx.getAsArrayType(StrR->getValueType())->getElementType(); 1642 if (!Ctx.hasSameUnqualifiedType(T, R->getElementType())) 1643 return UnknownVal(); 1644 1645 const StringLiteral *Str = StrR->getStringLiteral(); 1646 SVal Idx = R->getIndex(); 1647 if (Optional<nonloc::ConcreteInt> CI = Idx.getAs<nonloc::ConcreteInt>()) { 1648 int64_t i = CI->getValue().getSExtValue(); 1649 // Abort on string underrun. This can be possible by arbitrary 1650 // clients of getBindingForElement(). 1651 if (i < 0) 1652 return UndefinedVal(); 1653 int64_t length = Str->getLength(); 1654 // Technically, only i == length is guaranteed to be null. 1655 // However, such overflows should be caught before reaching this point; 1656 // the only time such an access would be made is if a string literal was 1657 // used to initialize a larger array. 1658 char c = (i >= length) ? '\0' : Str->getCodeUnit(i); 1659 return svalBuilder.makeIntVal(c, T); 1660 } 1661 } else if (const VarRegion *VR = dyn_cast<VarRegion>(superR)) { 1662 // Check if the containing array is const and has an initialized value. 1663 const VarDecl *VD = VR->getDecl(); 1664 // Either the array or the array element has to be const. 1665 if (VD->getType().isConstQualified() || R->getElementType().isConstQualified()) { 1666 if (const Expr *Init = VD->getInit()) { 1667 if (const auto *InitList = dyn_cast<InitListExpr>(Init)) { 1668 // The array index has to be known. 1669 if (auto CI = R->getIndex().getAs<nonloc::ConcreteInt>()) { 1670 int64_t i = CI->getValue().getSExtValue(); 1671 // If it is known that the index is out of bounds, we can return 1672 // an undefined value. 1673 if (i < 0) 1674 return UndefinedVal(); 1675 1676 if (auto CAT = Ctx.getAsConstantArrayType(VD->getType())) 1677 if (CAT->getSize().sle(i)) 1678 return UndefinedVal(); 1679 1680 // If there is a list, but no init, it must be zero. 1681 if (i >= InitList->getNumInits()) 1682 return svalBuilder.makeZeroVal(R->getElementType()); 1683 1684 if (const Expr *ElemInit = InitList->getInit(i)) 1685 if (Optional<SVal> V = svalBuilder.getConstantVal(ElemInit)) 1686 return *V; 1687 } 1688 } 1689 } 1690 } 1691 } 1692 1693 // Check for loads from a code text region. For such loads, just give up. 1694 if (isa<CodeTextRegion>(superR)) 1695 return UnknownVal(); 1696 1697 // Handle the case where we are indexing into a larger scalar object. 1698 // For example, this handles: 1699 // int x = ... 1700 // char *y = &x; 1701 // return *y; 1702 // FIXME: This is a hack, and doesn't do anything really intelligent yet. 1703 const RegionRawOffset &O = R->getAsArrayOffset(); 1704 1705 // If we cannot reason about the offset, return an unknown value. 1706 if (!O.getRegion()) 1707 return UnknownVal(); 1708 1709 if (const TypedValueRegion *baseR = 1710 dyn_cast_or_null<TypedValueRegion>(O.getRegion())) { 1711 QualType baseT = baseR->getValueType(); 1712 if (baseT->isScalarType()) { 1713 QualType elemT = R->getElementType(); 1714 if (elemT->isScalarType()) { 1715 if (Ctx.getTypeSizeInChars(baseT) >= Ctx.getTypeSizeInChars(elemT)) { 1716 if (const Optional<SVal> &V = B.getDirectBinding(superR)) { 1717 if (SymbolRef parentSym = V->getAsSymbol()) 1718 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1719 1720 if (V->isUnknownOrUndef()) 1721 return *V; 1722 // Other cases: give up. We are indexing into a larger object 1723 // that has some value, but we don't know how to handle that yet. 1724 return UnknownVal(); 1725 } 1726 } 1727 } 1728 } 1729 } 1730 return getBindingForFieldOrElementCommon(B, R, R->getElementType()); 1731 } 1732 1733 SVal RegionStoreManager::getBindingForField(RegionBindingsConstRef B, 1734 const FieldRegion* R) { 1735 1736 // Check if the region has a binding. 1737 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1738 return *V; 1739 1740 // Is the field declared constant and has an in-class initializer? 1741 const FieldDecl *FD = R->getDecl(); 1742 QualType Ty = FD->getType(); 1743 if (Ty.isConstQualified()) 1744 if (const Expr *Init = FD->getInClassInitializer()) 1745 if (Optional<SVal> V = svalBuilder.getConstantVal(Init)) 1746 return *V; 1747 1748 // If the containing record was initialized, try to get its constant value. 1749 const MemRegion* superR = R->getSuperRegion(); 1750 if (const auto *VR = dyn_cast<VarRegion>(superR)) { 1751 const VarDecl *VD = VR->getDecl(); 1752 QualType RecordVarTy = VD->getType(); 1753 unsigned Index = FD->getFieldIndex(); 1754 // Either the record variable or the field has to be const qualified. 1755 if (RecordVarTy.isConstQualified() || Ty.isConstQualified()) 1756 if (const Expr *Init = VD->getInit()) 1757 if (const auto *InitList = dyn_cast<InitListExpr>(Init)) { 1758 if (Index < InitList->getNumInits()) { 1759 if (const Expr *FieldInit = InitList->getInit(Index)) 1760 if (Optional<SVal> V = svalBuilder.getConstantVal(FieldInit)) 1761 return *V; 1762 } else { 1763 return svalBuilder.makeZeroVal(Ty); 1764 } 1765 } 1766 } 1767 1768 return getBindingForFieldOrElementCommon(B, R, Ty); 1769 } 1770 1771 Optional<SVal> 1772 RegionStoreManager::getBindingForDerivedDefaultValue(RegionBindingsConstRef B, 1773 const MemRegion *superR, 1774 const TypedValueRegion *R, 1775 QualType Ty) { 1776 1777 if (const Optional<SVal> &D = B.getDefaultBinding(superR)) { 1778 const SVal &val = D.getValue(); 1779 if (SymbolRef parentSym = val.getAsSymbol()) 1780 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1781 1782 if (val.isZeroConstant()) 1783 return svalBuilder.makeZeroVal(Ty); 1784 1785 if (val.isUnknownOrUndef()) 1786 return val; 1787 1788 // Lazy bindings are usually handled through getExistingLazyBinding(). 1789 // We should unify these two code paths at some point. 1790 if (val.getAs<nonloc::LazyCompoundVal>() || 1791 val.getAs<nonloc::CompoundVal>()) 1792 return val; 1793 1794 llvm_unreachable("Unknown default value"); 1795 } 1796 1797 return None; 1798 } 1799 1800 SVal RegionStoreManager::getLazyBinding(const SubRegion *LazyBindingRegion, 1801 RegionBindingsRef LazyBinding) { 1802 SVal Result; 1803 if (const ElementRegion *ER = dyn_cast<ElementRegion>(LazyBindingRegion)) 1804 Result = getBindingForElement(LazyBinding, ER); 1805 else 1806 Result = getBindingForField(LazyBinding, 1807 cast<FieldRegion>(LazyBindingRegion)); 1808 1809 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 1810 // default value for /part/ of an aggregate from a default value for the 1811 // /entire/ aggregate. The most common case of this is when struct Outer 1812 // has as its first member a struct Inner, which is copied in from a stack 1813 // variable. In this case, even if the Outer's default value is symbolic, 0, 1814 // or unknown, it gets overridden by the Inner's default value of undefined. 1815 // 1816 // This is a general problem -- if the Inner is zero-initialized, the Outer 1817 // will now look zero-initialized. The proper way to solve this is with a 1818 // new version of RegionStore that tracks the extent of a binding as well 1819 // as the offset. 1820 // 1821 // This hack only takes care of the undefined case because that can very 1822 // quickly result in a warning. 1823 if (Result.isUndef()) 1824 Result = UnknownVal(); 1825 1826 return Result; 1827 } 1828 1829 SVal 1830 RegionStoreManager::getBindingForFieldOrElementCommon(RegionBindingsConstRef B, 1831 const TypedValueRegion *R, 1832 QualType Ty) { 1833 1834 // At this point we have already checked in either getBindingForElement or 1835 // getBindingForField if 'R' has a direct binding. 1836 1837 // Lazy binding? 1838 Store lazyBindingStore = nullptr; 1839 const SubRegion *lazyBindingRegion = nullptr; 1840 std::tie(lazyBindingStore, lazyBindingRegion) = findLazyBinding(B, R, R); 1841 if (lazyBindingRegion) 1842 return getLazyBinding(lazyBindingRegion, 1843 getRegionBindings(lazyBindingStore)); 1844 1845 // Record whether or not we see a symbolic index. That can completely 1846 // be out of scope of our lookup. 1847 bool hasSymbolicIndex = false; 1848 1849 // FIXME: This is a hack to deal with RegionStore's inability to distinguish a 1850 // default value for /part/ of an aggregate from a default value for the 1851 // /entire/ aggregate. The most common case of this is when struct Outer 1852 // has as its first member a struct Inner, which is copied in from a stack 1853 // variable. In this case, even if the Outer's default value is symbolic, 0, 1854 // or unknown, it gets overridden by the Inner's default value of undefined. 1855 // 1856 // This is a general problem -- if the Inner is zero-initialized, the Outer 1857 // will now look zero-initialized. The proper way to solve this is with a 1858 // new version of RegionStore that tracks the extent of a binding as well 1859 // as the offset. 1860 // 1861 // This hack only takes care of the undefined case because that can very 1862 // quickly result in a warning. 1863 bool hasPartialLazyBinding = false; 1864 1865 const SubRegion *SR = R; 1866 while (SR) { 1867 const MemRegion *Base = SR->getSuperRegion(); 1868 if (Optional<SVal> D = getBindingForDerivedDefaultValue(B, Base, R, Ty)) { 1869 if (D->getAs<nonloc::LazyCompoundVal>()) { 1870 hasPartialLazyBinding = true; 1871 break; 1872 } 1873 1874 return *D; 1875 } 1876 1877 if (const ElementRegion *ER = dyn_cast<ElementRegion>(Base)) { 1878 NonLoc index = ER->getIndex(); 1879 if (!index.isConstant()) 1880 hasSymbolicIndex = true; 1881 } 1882 1883 // If our super region is a field or element itself, walk up the region 1884 // hierarchy to see if there is a default value installed in an ancestor. 1885 SR = dyn_cast<SubRegion>(Base); 1886 } 1887 1888 if (R->hasStackNonParametersStorage()) { 1889 if (isa<ElementRegion>(R)) { 1890 // Currently we don't reason specially about Clang-style vectors. Check 1891 // if superR is a vector and if so return Unknown. 1892 if (const TypedValueRegion *typedSuperR = 1893 dyn_cast<TypedValueRegion>(R->getSuperRegion())) { 1894 if (typedSuperR->getValueType()->isVectorType()) 1895 return UnknownVal(); 1896 } 1897 } 1898 1899 // FIXME: We also need to take ElementRegions with symbolic indexes into 1900 // account. This case handles both directly accessing an ElementRegion 1901 // with a symbolic offset, but also fields within an element with 1902 // a symbolic offset. 1903 if (hasSymbolicIndex) 1904 return UnknownVal(); 1905 1906 if (!hasPartialLazyBinding) 1907 return UndefinedVal(); 1908 } 1909 1910 // All other values are symbolic. 1911 return svalBuilder.getRegionValueSymbolVal(R); 1912 } 1913 1914 SVal RegionStoreManager::getBindingForObjCIvar(RegionBindingsConstRef B, 1915 const ObjCIvarRegion* R) { 1916 // Check if the region has a binding. 1917 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1918 return *V; 1919 1920 const MemRegion *superR = R->getSuperRegion(); 1921 1922 // Check if the super region has a default binding. 1923 if (const Optional<SVal> &V = B.getDefaultBinding(superR)) { 1924 if (SymbolRef parentSym = V->getAsSymbol()) 1925 return svalBuilder.getDerivedRegionValueSymbolVal(parentSym, R); 1926 1927 // Other cases: give up. 1928 return UnknownVal(); 1929 } 1930 1931 return getBindingForLazySymbol(R); 1932 } 1933 1934 SVal RegionStoreManager::getBindingForVar(RegionBindingsConstRef B, 1935 const VarRegion *R) { 1936 1937 // Check if the region has a binding. 1938 if (const Optional<SVal> &V = B.getDirectBinding(R)) 1939 return *V; 1940 1941 // Lazily derive a value for the VarRegion. 1942 const VarDecl *VD = R->getDecl(); 1943 const MemSpaceRegion *MS = R->getMemorySpace(); 1944 1945 // Arguments are always symbolic. 1946 if (isa<StackArgumentsSpaceRegion>(MS)) 1947 return svalBuilder.getRegionValueSymbolVal(R); 1948 1949 // Is 'VD' declared constant? If so, retrieve the constant value. 1950 if (VD->getType().isConstQualified()) { 1951 if (const Expr *Init = VD->getInit()) { 1952 if (Optional<SVal> V = svalBuilder.getConstantVal(Init)) 1953 return *V; 1954 1955 // If the variable is const qualified and has an initializer but 1956 // we couldn't evaluate initializer to a value, treat the value as 1957 // unknown. 1958 return UnknownVal(); 1959 } 1960 } 1961 1962 // This must come after the check for constants because closure-captured 1963 // constant variables may appear in UnknownSpaceRegion. 1964 if (isa<UnknownSpaceRegion>(MS)) 1965 return svalBuilder.getRegionValueSymbolVal(R); 1966 1967 if (isa<GlobalsSpaceRegion>(MS)) { 1968 QualType T = VD->getType(); 1969 1970 // Function-scoped static variables are default-initialized to 0; if they 1971 // have an initializer, it would have been processed by now. 1972 // FIXME: This is only true when we're starting analysis from main(). 1973 // We're losing a lot of coverage here. 1974 if (isa<StaticGlobalSpaceRegion>(MS)) 1975 return svalBuilder.makeZeroVal(T); 1976 1977 if (Optional<SVal> V = getBindingForDerivedDefaultValue(B, MS, R, T)) { 1978 assert(!V->getAs<nonloc::LazyCompoundVal>()); 1979 return V.getValue(); 1980 } 1981 1982 return svalBuilder.getRegionValueSymbolVal(R); 1983 } 1984 1985 return UndefinedVal(); 1986 } 1987 1988 SVal RegionStoreManager::getBindingForLazySymbol(const TypedValueRegion *R) { 1989 // All other values are symbolic. 1990 return svalBuilder.getRegionValueSymbolVal(R); 1991 } 1992 1993 const RegionStoreManager::SValListTy & 1994 RegionStoreManager::getInterestingValues(nonloc::LazyCompoundVal LCV) { 1995 // First, check the cache. 1996 LazyBindingsMapTy::iterator I = LazyBindingsMap.find(LCV.getCVData()); 1997 if (I != LazyBindingsMap.end()) 1998 return I->second; 1999 2000 // If we don't have a list of values cached, start constructing it. 2001 SValListTy List; 2002 2003 const SubRegion *LazyR = LCV.getRegion(); 2004 RegionBindingsRef B = getRegionBindings(LCV.getStore()); 2005 2006 // If this region had /no/ bindings at the time, there are no interesting 2007 // values to return. 2008 const ClusterBindings *Cluster = B.lookup(LazyR->getBaseRegion()); 2009 if (!Cluster) 2010 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 2011 2012 SmallVector<BindingPair, 32> Bindings; 2013 collectSubRegionBindings(Bindings, svalBuilder, *Cluster, LazyR, 2014 /*IncludeAllDefaultBindings=*/true); 2015 for (SmallVectorImpl<BindingPair>::const_iterator I = Bindings.begin(), 2016 E = Bindings.end(); 2017 I != E; ++I) { 2018 SVal V = I->second; 2019 if (V.isUnknownOrUndef() || V.isConstant()) 2020 continue; 2021 2022 if (Optional<nonloc::LazyCompoundVal> InnerLCV = 2023 V.getAs<nonloc::LazyCompoundVal>()) { 2024 const SValListTy &InnerList = getInterestingValues(*InnerLCV); 2025 List.insert(List.end(), InnerList.begin(), InnerList.end()); 2026 continue; 2027 } 2028 2029 List.push_back(V); 2030 } 2031 2032 return (LazyBindingsMap[LCV.getCVData()] = std::move(List)); 2033 } 2034 2035 NonLoc RegionStoreManager::createLazyBinding(RegionBindingsConstRef B, 2036 const TypedValueRegion *R) { 2037 if (Optional<nonloc::LazyCompoundVal> V = 2038 getExistingLazyBinding(svalBuilder, B, R, false)) 2039 return *V; 2040 2041 return svalBuilder.makeLazyCompoundVal(StoreRef(B.asStore(), *this), R); 2042 } 2043 2044 static bool isRecordEmpty(const RecordDecl *RD) { 2045 if (!RD->field_empty()) 2046 return false; 2047 if (const CXXRecordDecl *CRD = dyn_cast<CXXRecordDecl>(RD)) 2048 return CRD->getNumBases() == 0; 2049 return true; 2050 } 2051 2052 SVal RegionStoreManager::getBindingForStruct(RegionBindingsConstRef B, 2053 const TypedValueRegion *R) { 2054 const RecordDecl *RD = R->getValueType()->castAs<RecordType>()->getDecl(); 2055 if (!RD->getDefinition() || isRecordEmpty(RD)) 2056 return UnknownVal(); 2057 2058 return createLazyBinding(B, R); 2059 } 2060 2061 SVal RegionStoreManager::getBindingForArray(RegionBindingsConstRef B, 2062 const TypedValueRegion *R) { 2063 assert(Ctx.getAsConstantArrayType(R->getValueType()) && 2064 "Only constant array types can have compound bindings."); 2065 2066 return createLazyBinding(B, R); 2067 } 2068 2069 bool RegionStoreManager::includedInBindings(Store store, 2070 const MemRegion *region) const { 2071 RegionBindingsRef B = getRegionBindings(store); 2072 region = region->getBaseRegion(); 2073 2074 // Quick path: if the base is the head of a cluster, the region is live. 2075 if (B.lookup(region)) 2076 return true; 2077 2078 // Slow path: if the region is the VALUE of any binding, it is live. 2079 for (RegionBindingsRef::iterator RI = B.begin(), RE = B.end(); RI != RE; ++RI) { 2080 const ClusterBindings &Cluster = RI.getData(); 2081 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 2082 CI != CE; ++CI) { 2083 const SVal &D = CI.getData(); 2084 if (const MemRegion *R = D.getAsRegion()) 2085 if (R->getBaseRegion() == region) 2086 return true; 2087 } 2088 } 2089 2090 return false; 2091 } 2092 2093 //===----------------------------------------------------------------------===// 2094 // Binding values to regions. 2095 //===----------------------------------------------------------------------===// 2096 2097 StoreRef RegionStoreManager::killBinding(Store ST, Loc L) { 2098 if (Optional<loc::MemRegionVal> LV = L.getAs<loc::MemRegionVal>()) 2099 if (const MemRegion* R = LV->getRegion()) 2100 return StoreRef(getRegionBindings(ST).removeBinding(R) 2101 .asImmutableMap() 2102 .getRootWithoutRetain(), 2103 *this); 2104 2105 return StoreRef(ST, *this); 2106 } 2107 2108 RegionBindingsRef 2109 RegionStoreManager::bind(RegionBindingsConstRef B, Loc L, SVal V) { 2110 if (L.getAs<loc::ConcreteInt>()) 2111 return B; 2112 2113 // If we get here, the location should be a region. 2114 const MemRegion *R = L.castAs<loc::MemRegionVal>().getRegion(); 2115 2116 // Check if the region is a struct region. 2117 if (const TypedValueRegion* TR = dyn_cast<TypedValueRegion>(R)) { 2118 QualType Ty = TR->getValueType(); 2119 if (Ty->isArrayType()) 2120 return bindArray(B, TR, V); 2121 if (Ty->isStructureOrClassType()) 2122 return bindStruct(B, TR, V); 2123 if (Ty->isVectorType()) 2124 return bindVector(B, TR, V); 2125 if (Ty->isUnionType()) 2126 return bindAggregate(B, TR, V); 2127 } 2128 2129 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) { 2130 // Binding directly to a symbolic region should be treated as binding 2131 // to element 0. 2132 QualType T = SR->getSymbol()->getType(); 2133 if (T->isAnyPointerType() || T->isReferenceType()) 2134 T = T->getPointeeType(); 2135 2136 R = GetElementZeroRegion(SR, T); 2137 } 2138 2139 assert((!isa<CXXThisRegion>(R) || !B.lookup(R)) && 2140 "'this' pointer is not an l-value and is not assignable"); 2141 2142 // Clear out bindings that may overlap with this binding. 2143 RegionBindingsRef NewB = removeSubRegionBindings(B, cast<SubRegion>(R)); 2144 return NewB.addBinding(BindingKey::Make(R, BindingKey::Direct), V); 2145 } 2146 2147 RegionBindingsRef 2148 RegionStoreManager::setImplicitDefaultValue(RegionBindingsConstRef B, 2149 const MemRegion *R, 2150 QualType T) { 2151 SVal V; 2152 2153 if (Loc::isLocType(T)) 2154 V = svalBuilder.makeNull(); 2155 else if (T->isIntegralOrEnumerationType()) 2156 V = svalBuilder.makeZeroVal(T); 2157 else if (T->isStructureOrClassType() || T->isArrayType()) { 2158 // Set the default value to a zero constant when it is a structure 2159 // or array. The type doesn't really matter. 2160 V = svalBuilder.makeZeroVal(Ctx.IntTy); 2161 } 2162 else { 2163 // We can't represent values of this type, but we still need to set a value 2164 // to record that the region has been initialized. 2165 // If this assertion ever fires, a new case should be added above -- we 2166 // should know how to default-initialize any value we can symbolicate. 2167 assert(!SymbolManager::canSymbolicate(T) && "This type is representable"); 2168 V = UnknownVal(); 2169 } 2170 2171 return B.addBinding(R, BindingKey::Default, V); 2172 } 2173 2174 RegionBindingsRef 2175 RegionStoreManager::bindArray(RegionBindingsConstRef B, 2176 const TypedValueRegion* R, 2177 SVal Init) { 2178 2179 const ArrayType *AT =cast<ArrayType>(Ctx.getCanonicalType(R->getValueType())); 2180 QualType ElementTy = AT->getElementType(); 2181 Optional<uint64_t> Size; 2182 2183 if (const ConstantArrayType* CAT = dyn_cast<ConstantArrayType>(AT)) 2184 Size = CAT->getSize().getZExtValue(); 2185 2186 // Check if the init expr is a literal. If so, bind the rvalue instead. 2187 // FIXME: It's not responsibility of the Store to transform this lvalue 2188 // to rvalue. ExprEngine or maybe even CFG should do this before binding. 2189 if (Optional<loc::MemRegionVal> MRV = Init.getAs<loc::MemRegionVal>()) { 2190 SVal V = getBinding(B.asStore(), *MRV, R->getValueType()); 2191 return bindAggregate(B, R, V); 2192 } 2193 2194 // Handle lazy compound values. 2195 if (Init.getAs<nonloc::LazyCompoundVal>()) 2196 return bindAggregate(B, R, Init); 2197 2198 if (Init.isUnknown()) 2199 return bindAggregate(B, R, UnknownVal()); 2200 2201 // Remaining case: explicit compound values. 2202 const nonloc::CompoundVal& CV = Init.castAs<nonloc::CompoundVal>(); 2203 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2204 uint64_t i = 0; 2205 2206 RegionBindingsRef NewB(B); 2207 2208 for (; Size.hasValue() ? i < Size.getValue() : true ; ++i, ++VI) { 2209 // The init list might be shorter than the array length. 2210 if (VI == VE) 2211 break; 2212 2213 const NonLoc &Idx = svalBuilder.makeArrayIndex(i); 2214 const ElementRegion *ER = MRMgr.getElementRegion(ElementTy, Idx, R, Ctx); 2215 2216 if (ElementTy->isStructureOrClassType()) 2217 NewB = bindStruct(NewB, ER, *VI); 2218 else if (ElementTy->isArrayType()) 2219 NewB = bindArray(NewB, ER, *VI); 2220 else 2221 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2222 } 2223 2224 // If the init list is shorter than the array length (or the array has 2225 // variable length), set the array default value. Values that are already set 2226 // are not overwritten. 2227 if (!Size.hasValue() || i < Size.getValue()) 2228 NewB = setImplicitDefaultValue(NewB, R, ElementTy); 2229 2230 return NewB; 2231 } 2232 2233 RegionBindingsRef RegionStoreManager::bindVector(RegionBindingsConstRef B, 2234 const TypedValueRegion* R, 2235 SVal V) { 2236 QualType T = R->getValueType(); 2237 assert(T->isVectorType()); 2238 const VectorType *VT = T->getAs<VectorType>(); // Use getAs for typedefs. 2239 2240 // Handle lazy compound values and symbolic values. 2241 if (V.getAs<nonloc::LazyCompoundVal>() || V.getAs<nonloc::SymbolVal>()) 2242 return bindAggregate(B, R, V); 2243 2244 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2245 // that we are binding symbolic struct value. Kill the field values, and if 2246 // the value is symbolic go and bind it as a "default" binding. 2247 if (!V.getAs<nonloc::CompoundVal>()) { 2248 return bindAggregate(B, R, UnknownVal()); 2249 } 2250 2251 QualType ElemType = VT->getElementType(); 2252 nonloc::CompoundVal CV = V.castAs<nonloc::CompoundVal>(); 2253 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2254 unsigned index = 0, numElements = VT->getNumElements(); 2255 RegionBindingsRef NewB(B); 2256 2257 for ( ; index != numElements ; ++index) { 2258 if (VI == VE) 2259 break; 2260 2261 NonLoc Idx = svalBuilder.makeArrayIndex(index); 2262 const ElementRegion *ER = MRMgr.getElementRegion(ElemType, Idx, R, Ctx); 2263 2264 if (ElemType->isArrayType()) 2265 NewB = bindArray(NewB, ER, *VI); 2266 else if (ElemType->isStructureOrClassType()) 2267 NewB = bindStruct(NewB, ER, *VI); 2268 else 2269 NewB = bind(NewB, loc::MemRegionVal(ER), *VI); 2270 } 2271 return NewB; 2272 } 2273 2274 Optional<RegionBindingsRef> 2275 RegionStoreManager::tryBindSmallStruct(RegionBindingsConstRef B, 2276 const TypedValueRegion *R, 2277 const RecordDecl *RD, 2278 nonloc::LazyCompoundVal LCV) { 2279 FieldVector Fields; 2280 2281 if (const CXXRecordDecl *Class = dyn_cast<CXXRecordDecl>(RD)) 2282 if (Class->getNumBases() != 0 || Class->getNumVBases() != 0) 2283 return None; 2284 2285 for (const auto *FD : RD->fields()) { 2286 if (FD->isUnnamedBitfield()) 2287 continue; 2288 2289 // If there are too many fields, or if any of the fields are aggregates, 2290 // just use the LCV as a default binding. 2291 if (Fields.size() == SmallStructLimit) 2292 return None; 2293 2294 QualType Ty = FD->getType(); 2295 if (!(Ty->isScalarType() || Ty->isReferenceType())) 2296 return None; 2297 2298 Fields.push_back(FD); 2299 } 2300 2301 RegionBindingsRef NewB = B; 2302 2303 for (FieldVector::iterator I = Fields.begin(), E = Fields.end(); I != E; ++I){ 2304 const FieldRegion *SourceFR = MRMgr.getFieldRegion(*I, LCV.getRegion()); 2305 SVal V = getBindingForField(getRegionBindings(LCV.getStore()), SourceFR); 2306 2307 const FieldRegion *DestFR = MRMgr.getFieldRegion(*I, R); 2308 NewB = bind(NewB, loc::MemRegionVal(DestFR), V); 2309 } 2310 2311 return NewB; 2312 } 2313 2314 RegionBindingsRef RegionStoreManager::bindStruct(RegionBindingsConstRef B, 2315 const TypedValueRegion* R, 2316 SVal V) { 2317 if (!Features.supportsFields()) 2318 return B; 2319 2320 QualType T = R->getValueType(); 2321 assert(T->isStructureOrClassType()); 2322 2323 const RecordType* RT = T->getAs<RecordType>(); 2324 const RecordDecl *RD = RT->getDecl(); 2325 2326 if (!RD->isCompleteDefinition()) 2327 return B; 2328 2329 // Handle lazy compound values and symbolic values. 2330 if (Optional<nonloc::LazyCompoundVal> LCV = 2331 V.getAs<nonloc::LazyCompoundVal>()) { 2332 if (Optional<RegionBindingsRef> NewB = tryBindSmallStruct(B, R, RD, *LCV)) 2333 return *NewB; 2334 return bindAggregate(B, R, V); 2335 } 2336 if (V.getAs<nonloc::SymbolVal>()) 2337 return bindAggregate(B, R, V); 2338 2339 // We may get non-CompoundVal accidentally due to imprecise cast logic or 2340 // that we are binding symbolic struct value. Kill the field values, and if 2341 // the value is symbolic go and bind it as a "default" binding. 2342 if (V.isUnknown() || !V.getAs<nonloc::CompoundVal>()) 2343 return bindAggregate(B, R, UnknownVal()); 2344 2345 const nonloc::CompoundVal& CV = V.castAs<nonloc::CompoundVal>(); 2346 nonloc::CompoundVal::iterator VI = CV.begin(), VE = CV.end(); 2347 2348 RecordDecl::field_iterator FI, FE; 2349 RegionBindingsRef NewB(B); 2350 2351 for (FI = RD->field_begin(), FE = RD->field_end(); FI != FE; ++FI) { 2352 2353 if (VI == VE) 2354 break; 2355 2356 // Skip any unnamed bitfields to stay in sync with the initializers. 2357 if (FI->isUnnamedBitfield()) 2358 continue; 2359 2360 QualType FTy = FI->getType(); 2361 const FieldRegion* FR = MRMgr.getFieldRegion(*FI, R); 2362 2363 if (FTy->isArrayType()) 2364 NewB = bindArray(NewB, FR, *VI); 2365 else if (FTy->isStructureOrClassType()) 2366 NewB = bindStruct(NewB, FR, *VI); 2367 else 2368 NewB = bind(NewB, loc::MemRegionVal(FR), *VI); 2369 ++VI; 2370 } 2371 2372 // There may be fewer values in the initialize list than the fields of struct. 2373 if (FI != FE) { 2374 NewB = NewB.addBinding(R, BindingKey::Default, 2375 svalBuilder.makeIntVal(0, false)); 2376 } 2377 2378 return NewB; 2379 } 2380 2381 RegionBindingsRef 2382 RegionStoreManager::bindAggregate(RegionBindingsConstRef B, 2383 const TypedRegion *R, 2384 SVal Val) { 2385 // Remove the old bindings, using 'R' as the root of all regions 2386 // we will invalidate. Then add the new binding. 2387 return removeSubRegionBindings(B, R).addBinding(R, BindingKey::Default, Val); 2388 } 2389 2390 //===----------------------------------------------------------------------===// 2391 // State pruning. 2392 //===----------------------------------------------------------------------===// 2393 2394 namespace { 2395 class RemoveDeadBindingsWorker 2396 : public ClusterAnalysis<RemoveDeadBindingsWorker> { 2397 using ChildrenListTy = SmallVector<const SymbolDerived *, 4>; 2398 using MapParentsToDerivedTy = llvm::DenseMap<SymbolRef, ChildrenListTy>; 2399 2400 MapParentsToDerivedTy ParentsToDerived; 2401 SymbolReaper &SymReaper; 2402 const StackFrameContext *CurrentLCtx; 2403 2404 public: 2405 RemoveDeadBindingsWorker(RegionStoreManager &rm, 2406 ProgramStateManager &stateMgr, 2407 RegionBindingsRef b, SymbolReaper &symReaper, 2408 const StackFrameContext *LCtx) 2409 : ClusterAnalysis<RemoveDeadBindingsWorker>(rm, stateMgr, b), 2410 SymReaper(symReaper), CurrentLCtx(LCtx) {} 2411 2412 // Called by ClusterAnalysis. 2413 void VisitAddedToCluster(const MemRegion *baseR, const ClusterBindings &C); 2414 void VisitCluster(const MemRegion *baseR, const ClusterBindings *C); 2415 using ClusterAnalysis<RemoveDeadBindingsWorker>::VisitCluster; 2416 2417 using ClusterAnalysis::AddToWorkList; 2418 2419 bool AddToWorkList(const MemRegion *R); 2420 2421 void VisitBinding(SVal V); 2422 2423 private: 2424 void populateWorklistFromSymbol(SymbolRef s); 2425 }; 2426 } 2427 2428 bool RemoveDeadBindingsWorker::AddToWorkList(const MemRegion *R) { 2429 const MemRegion *BaseR = R->getBaseRegion(); 2430 return AddToWorkList(WorkListElement(BaseR), getCluster(BaseR)); 2431 } 2432 2433 void RemoveDeadBindingsWorker::VisitAddedToCluster(const MemRegion *baseR, 2434 const ClusterBindings &C) { 2435 2436 if (const VarRegion *VR = dyn_cast<VarRegion>(baseR)) { 2437 if (SymReaper.isLive(VR)) 2438 AddToWorkList(baseR, &C); 2439 2440 return; 2441 } 2442 2443 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(baseR)) { 2444 if (SymReaper.isLive(SR->getSymbol())) { 2445 AddToWorkList(SR, &C); 2446 } else if (const auto *SD = dyn_cast<SymbolDerived>(SR->getSymbol())) { 2447 ParentsToDerived[SD->getParentSymbol()].push_back(SD); 2448 } 2449 2450 return; 2451 } 2452 2453 if (isa<NonStaticGlobalSpaceRegion>(baseR)) { 2454 AddToWorkList(baseR, &C); 2455 return; 2456 } 2457 2458 // CXXThisRegion in the current or parent location context is live. 2459 if (const CXXThisRegion *TR = dyn_cast<CXXThisRegion>(baseR)) { 2460 const auto *StackReg = 2461 cast<StackArgumentsSpaceRegion>(TR->getSuperRegion()); 2462 const StackFrameContext *RegCtx = StackReg->getStackFrame(); 2463 if (CurrentLCtx && 2464 (RegCtx == CurrentLCtx || RegCtx->isParentOf(CurrentLCtx))) 2465 AddToWorkList(TR, &C); 2466 } 2467 } 2468 2469 void RemoveDeadBindingsWorker::VisitCluster(const MemRegion *baseR, 2470 const ClusterBindings *C) { 2471 if (!C) 2472 return; 2473 2474 // Mark the symbol for any SymbolicRegion with live bindings as live itself. 2475 // This means we should continue to track that symbol. 2476 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(baseR)) 2477 SymReaper.markLive(SymR->getSymbol()); 2478 2479 for (ClusterBindings::iterator I = C->begin(), E = C->end(); I != E; ++I) { 2480 // Element index of a binding key is live. 2481 SymReaper.markElementIndicesLive(I.getKey().getRegion()); 2482 2483 VisitBinding(I.getData()); 2484 } 2485 } 2486 2487 void RemoveDeadBindingsWorker::VisitBinding(SVal V) { 2488 // Is it a LazyCompoundVal? All referenced regions are live as well. 2489 if (Optional<nonloc::LazyCompoundVal> LCS = 2490 V.getAs<nonloc::LazyCompoundVal>()) { 2491 2492 const RegionStoreManager::SValListTy &Vals = RM.getInterestingValues(*LCS); 2493 2494 for (RegionStoreManager::SValListTy::const_iterator I = Vals.begin(), 2495 E = Vals.end(); 2496 I != E; ++I) 2497 VisitBinding(*I); 2498 2499 return; 2500 } 2501 2502 // If V is a region, then add it to the worklist. 2503 if (const MemRegion *R = V.getAsRegion()) { 2504 AddToWorkList(R); 2505 2506 if (const auto *TVR = dyn_cast<TypedValueRegion>(R)) { 2507 DefinedOrUnknownSVal RVS = 2508 RM.getSValBuilder().getRegionValueSymbolVal(TVR); 2509 if (const MemRegion *SR = RVS.getAsRegion()) { 2510 AddToWorkList(SR); 2511 } 2512 } 2513 2514 SymReaper.markLive(R); 2515 2516 // All regions captured by a block are also live. 2517 if (const BlockDataRegion *BR = dyn_cast<BlockDataRegion>(R)) { 2518 BlockDataRegion::referenced_vars_iterator I = BR->referenced_vars_begin(), 2519 E = BR->referenced_vars_end(); 2520 for ( ; I != E; ++I) 2521 AddToWorkList(I.getCapturedRegion()); 2522 } 2523 } 2524 2525 2526 // Update the set of live symbols. 2527 for (auto SI = V.symbol_begin(), SE = V.symbol_end(); SI != SE; ++SI) { 2528 populateWorklistFromSymbol(*SI); 2529 2530 for (const auto *SD : ParentsToDerived[*SI]) 2531 populateWorklistFromSymbol(SD); 2532 2533 SymReaper.markLive(*SI); 2534 } 2535 } 2536 2537 void RemoveDeadBindingsWorker::populateWorklistFromSymbol(SymbolRef S) { 2538 if (const auto *SD = dyn_cast<SymbolData>(S)) { 2539 if (Loc::isLocType(SD->getType()) && !SymReaper.isLive(SD)) { 2540 const SymbolicRegion *SR = RM.getRegionManager().getSymbolicRegion(SD); 2541 2542 if (B.contains(SR)) 2543 AddToWorkList(SR); 2544 2545 const SymbolicRegion *SHR = 2546 RM.getRegionManager().getSymbolicHeapRegion(SD); 2547 if (B.contains(SHR)) 2548 AddToWorkList(SHR); 2549 } 2550 } 2551 } 2552 2553 StoreRef RegionStoreManager::removeDeadBindings(Store store, 2554 const StackFrameContext *LCtx, 2555 SymbolReaper& SymReaper) { 2556 RegionBindingsRef B = getRegionBindings(store); 2557 RemoveDeadBindingsWorker W(*this, StateMgr, B, SymReaper, LCtx); 2558 W.GenerateClusters(); 2559 2560 // Enqueue the region roots onto the worklist. 2561 for (SymbolReaper::region_iterator I = SymReaper.region_begin(), 2562 E = SymReaper.region_end(); I != E; ++I) { 2563 W.AddToWorkList(*I); 2564 } 2565 2566 W.RunWorkList(); 2567 2568 // We have now scanned the store, marking reachable regions and symbols 2569 // as live. We now remove all the regions that are dead from the store 2570 // as well as update DSymbols with the set symbols that are now dead. 2571 for (RegionBindingsRef::iterator I = B.begin(), E = B.end(); I != E; ++I) { 2572 const MemRegion *Base = I.getKey(); 2573 2574 // If the cluster has been visited, we know the region has been marked. 2575 if (W.isVisited(Base)) 2576 continue; 2577 2578 // Remove the dead entry. 2579 B = B.remove(Base); 2580 2581 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(Base)) 2582 SymReaper.maybeDead(SymR->getSymbol()); 2583 2584 // Mark all non-live symbols that this binding references as dead. 2585 const ClusterBindings &Cluster = I.getData(); 2586 for (ClusterBindings::iterator CI = Cluster.begin(), CE = Cluster.end(); 2587 CI != CE; ++CI) { 2588 SVal X = CI.getData(); 2589 SymExpr::symbol_iterator SI = X.symbol_begin(), SE = X.symbol_end(); 2590 for (; SI != SE; ++SI) 2591 SymReaper.maybeDead(*SI); 2592 } 2593 } 2594 2595 return StoreRef(B.asStore(), *this); 2596 } 2597 2598 //===----------------------------------------------------------------------===// 2599 // Utility methods. 2600 //===----------------------------------------------------------------------===// 2601 2602 void RegionStoreManager::print(Store store, raw_ostream &OS, 2603 const char* nl, const char *sep) { 2604 RegionBindingsRef B = getRegionBindings(store); 2605 OS << "Store (direct and default bindings), " 2606 << B.asStore() 2607 << " :" << nl; 2608 B.dump(OS, nl); 2609 } 2610