1 //===- MemRegion.cpp - Abstract memory regions for static analysis --------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines MemRegion and its subclasses. MemRegion defines a 11 // partially-typed abstraction of memory useful for path-sensitive dataflow 12 // analyses. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h" 17 #include "clang/AST/ASTContext.h" 18 #include "clang/AST/Attr.h" 19 #include "clang/AST/CharUnits.h" 20 #include "clang/AST/Decl.h" 21 #include "clang/AST/DeclCXX.h" 22 #include "clang/AST/DeclObjC.h" 23 #include "clang/AST/Expr.h" 24 #include "clang/AST/PrettyPrinter.h" 25 #include "clang/AST/RecordLayout.h" 26 #include "clang/AST/Type.h" 27 #include "clang/Analysis/AnalysisDeclContext.h" 28 #include "clang/Analysis/Support/BumpVector.h" 29 #include "clang/Basic/IdentifierTable.h" 30 #include "clang/Basic/LLVM.h" 31 #include "clang/Basic/SourceManager.h" 32 #include "clang/StaticAnalyzer/Core/PathSensitive/SValBuilder.h" 33 #include "clang/StaticAnalyzer/Core/PathSensitive/SVals.h" 34 #include "clang/StaticAnalyzer/Core/PathSensitive/SymbolManager.h" 35 #include "llvm/ADT/APInt.h" 36 #include "llvm/ADT/FoldingSet.h" 37 #include "llvm/ADT/Optional.h" 38 #include "llvm/ADT/PointerUnion.h" 39 #include "llvm/ADT/SmallString.h" 40 #include "llvm/ADT/StringRef.h" 41 #include "llvm/ADT/Twine.h" 42 #include "llvm/Support/Allocator.h" 43 #include "llvm/Support/Casting.h" 44 #include "llvm/Support/Compiler.h" 45 #include "llvm/Support/Debug.h" 46 #include "llvm/Support/ErrorHandling.h" 47 #include "llvm/Support/raw_ostream.h" 48 #include <cassert> 49 #include <cstdint> 50 #include <functional> 51 #include <iterator> 52 #include <string> 53 #include <tuple> 54 #include <utility> 55 56 using namespace clang; 57 using namespace ento; 58 59 #define DEBUG_TYPE "MemRegion" 60 61 //===----------------------------------------------------------------------===// 62 // MemRegion Construction. 63 //===----------------------------------------------------------------------===// 64 65 template <typename RegionTy, typename SuperTy, typename Arg1Ty> 66 RegionTy* MemRegionManager::getSubRegion(const Arg1Ty arg1, 67 const SuperTy *superRegion) { 68 llvm::FoldingSetNodeID ID; 69 RegionTy::ProfileRegion(ID, arg1, superRegion); 70 void *InsertPos; 71 auto *R = cast_or_null<RegionTy>(Regions.FindNodeOrInsertPos(ID, InsertPos)); 72 73 if (!R) { 74 R = A.Allocate<RegionTy>(); 75 new (R) RegionTy(arg1, superRegion); 76 Regions.InsertNode(R, InsertPos); 77 } 78 79 return R; 80 } 81 82 template <typename RegionTy, typename SuperTy, typename Arg1Ty, typename Arg2Ty> 83 RegionTy* MemRegionManager::getSubRegion(const Arg1Ty arg1, const Arg2Ty arg2, 84 const SuperTy *superRegion) { 85 llvm::FoldingSetNodeID ID; 86 RegionTy::ProfileRegion(ID, arg1, arg2, superRegion); 87 void *InsertPos; 88 auto *R = cast_or_null<RegionTy>(Regions.FindNodeOrInsertPos(ID, InsertPos)); 89 90 if (!R) { 91 R = A.Allocate<RegionTy>(); 92 new (R) RegionTy(arg1, arg2, superRegion); 93 Regions.InsertNode(R, InsertPos); 94 } 95 96 return R; 97 } 98 99 template <typename RegionTy, typename SuperTy, 100 typename Arg1Ty, typename Arg2Ty, typename Arg3Ty> 101 RegionTy* MemRegionManager::getSubRegion(const Arg1Ty arg1, const Arg2Ty arg2, 102 const Arg3Ty arg3, 103 const SuperTy *superRegion) { 104 llvm::FoldingSetNodeID ID; 105 RegionTy::ProfileRegion(ID, arg1, arg2, arg3, superRegion); 106 void *InsertPos; 107 auto *R = cast_or_null<RegionTy>(Regions.FindNodeOrInsertPos(ID, InsertPos)); 108 109 if (!R) { 110 R = A.Allocate<RegionTy>(); 111 new (R) RegionTy(arg1, arg2, arg3, superRegion); 112 Regions.InsertNode(R, InsertPos); 113 } 114 115 return R; 116 } 117 118 //===----------------------------------------------------------------------===// 119 // Object destruction. 120 //===----------------------------------------------------------------------===// 121 122 MemRegion::~MemRegion() = default; 123 124 // All regions and their data are BumpPtrAllocated. No need to call their 125 // destructors. 126 MemRegionManager::~MemRegionManager() = default; 127 128 //===----------------------------------------------------------------------===// 129 // Basic methods. 130 //===----------------------------------------------------------------------===// 131 132 bool SubRegion::isSubRegionOf(const MemRegion* R) const { 133 const MemRegion* r = this; 134 do { 135 if (r == R) 136 return true; 137 if (const auto *sr = dyn_cast<SubRegion>(r)) 138 r = sr->getSuperRegion(); 139 else 140 break; 141 } while (r != nullptr); 142 return false; 143 } 144 145 MemRegionManager* SubRegion::getMemRegionManager() const { 146 const SubRegion* r = this; 147 do { 148 const MemRegion *superRegion = r->getSuperRegion(); 149 if (const auto *sr = dyn_cast<SubRegion>(superRegion)) { 150 r = sr; 151 continue; 152 } 153 return superRegion->getMemRegionManager(); 154 } while (true); 155 } 156 157 const StackFrameContext *VarRegion::getStackFrame() const { 158 const auto *SSR = dyn_cast<StackSpaceRegion>(getMemorySpace()); 159 return SSR ? SSR->getStackFrame() : nullptr; 160 } 161 162 //===----------------------------------------------------------------------===// 163 // Region extents. 164 //===----------------------------------------------------------------------===// 165 166 DefinedOrUnknownSVal TypedValueRegion::getExtent(SValBuilder &svalBuilder) const { 167 ASTContext &Ctx = svalBuilder.getContext(); 168 QualType T = getDesugaredValueType(Ctx); 169 170 if (isa<VariableArrayType>(T)) 171 return nonloc::SymbolVal(svalBuilder.getSymbolManager().getExtentSymbol(this)); 172 if (T->isIncompleteType()) 173 return UnknownVal(); 174 175 CharUnits size = Ctx.getTypeSizeInChars(T); 176 QualType sizeTy = svalBuilder.getArrayIndexType(); 177 return svalBuilder.makeIntVal(size.getQuantity(), sizeTy); 178 } 179 180 DefinedOrUnknownSVal FieldRegion::getExtent(SValBuilder &svalBuilder) const { 181 // Force callers to deal with bitfields explicitly. 182 if (getDecl()->isBitField()) 183 return UnknownVal(); 184 185 DefinedOrUnknownSVal Extent = DeclRegion::getExtent(svalBuilder); 186 187 // A zero-length array at the end of a struct often stands for dynamically- 188 // allocated extra memory. 189 if (Extent.isZeroConstant()) { 190 QualType T = getDesugaredValueType(svalBuilder.getContext()); 191 192 if (isa<ConstantArrayType>(T)) 193 return UnknownVal(); 194 } 195 196 return Extent; 197 } 198 199 DefinedOrUnknownSVal AllocaRegion::getExtent(SValBuilder &svalBuilder) const { 200 return nonloc::SymbolVal(svalBuilder.getSymbolManager().getExtentSymbol(this)); 201 } 202 203 DefinedOrUnknownSVal SymbolicRegion::getExtent(SValBuilder &svalBuilder) const { 204 return nonloc::SymbolVal(svalBuilder.getSymbolManager().getExtentSymbol(this)); 205 } 206 207 DefinedOrUnknownSVal StringRegion::getExtent(SValBuilder &svalBuilder) const { 208 return svalBuilder.makeIntVal(getStringLiteral()->getByteLength()+1, 209 svalBuilder.getArrayIndexType()); 210 } 211 212 ObjCIvarRegion::ObjCIvarRegion(const ObjCIvarDecl *ivd, const SubRegion *sReg) 213 : DeclRegion(ivd, sReg, ObjCIvarRegionKind) {} 214 215 const ObjCIvarDecl *ObjCIvarRegion::getDecl() const { 216 return cast<ObjCIvarDecl>(D); 217 } 218 219 QualType ObjCIvarRegion::getValueType() const { 220 return getDecl()->getType(); 221 } 222 223 QualType CXXBaseObjectRegion::getValueType() const { 224 return QualType(getDecl()->getTypeForDecl(), 0); 225 } 226 227 //===----------------------------------------------------------------------===// 228 // FoldingSet profiling. 229 //===----------------------------------------------------------------------===// 230 231 void MemSpaceRegion::Profile(llvm::FoldingSetNodeID &ID) const { 232 ID.AddInteger(static_cast<unsigned>(getKind())); 233 } 234 235 void StackSpaceRegion::Profile(llvm::FoldingSetNodeID &ID) const { 236 ID.AddInteger(static_cast<unsigned>(getKind())); 237 ID.AddPointer(getStackFrame()); 238 } 239 240 void StaticGlobalSpaceRegion::Profile(llvm::FoldingSetNodeID &ID) const { 241 ID.AddInteger(static_cast<unsigned>(getKind())); 242 ID.AddPointer(getCodeRegion()); 243 } 244 245 void StringRegion::ProfileRegion(llvm::FoldingSetNodeID &ID, 246 const StringLiteral *Str, 247 const MemRegion *superRegion) { 248 ID.AddInteger(static_cast<unsigned>(StringRegionKind)); 249 ID.AddPointer(Str); 250 ID.AddPointer(superRegion); 251 } 252 253 void ObjCStringRegion::ProfileRegion(llvm::FoldingSetNodeID &ID, 254 const ObjCStringLiteral *Str, 255 const MemRegion *superRegion) { 256 ID.AddInteger(static_cast<unsigned>(ObjCStringRegionKind)); 257 ID.AddPointer(Str); 258 ID.AddPointer(superRegion); 259 } 260 261 void AllocaRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 262 const Expr *Ex, unsigned cnt, 263 const MemRegion *superRegion) { 264 ID.AddInteger(static_cast<unsigned>(AllocaRegionKind)); 265 ID.AddPointer(Ex); 266 ID.AddInteger(cnt); 267 ID.AddPointer(superRegion); 268 } 269 270 void AllocaRegion::Profile(llvm::FoldingSetNodeID& ID) const { 271 ProfileRegion(ID, Ex, Cnt, superRegion); 272 } 273 274 void CompoundLiteralRegion::Profile(llvm::FoldingSetNodeID& ID) const { 275 CompoundLiteralRegion::ProfileRegion(ID, CL, superRegion); 276 } 277 278 void CompoundLiteralRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 279 const CompoundLiteralExpr *CL, 280 const MemRegion* superRegion) { 281 ID.AddInteger(static_cast<unsigned>(CompoundLiteralRegionKind)); 282 ID.AddPointer(CL); 283 ID.AddPointer(superRegion); 284 } 285 286 void CXXThisRegion::ProfileRegion(llvm::FoldingSetNodeID &ID, 287 const PointerType *PT, 288 const MemRegion *sRegion) { 289 ID.AddInteger(static_cast<unsigned>(CXXThisRegionKind)); 290 ID.AddPointer(PT); 291 ID.AddPointer(sRegion); 292 } 293 294 void CXXThisRegion::Profile(llvm::FoldingSetNodeID &ID) const { 295 CXXThisRegion::ProfileRegion(ID, ThisPointerTy, superRegion); 296 } 297 298 void ObjCIvarRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 299 const ObjCIvarDecl *ivd, 300 const MemRegion* superRegion) { 301 DeclRegion::ProfileRegion(ID, ivd, superRegion, ObjCIvarRegionKind); 302 } 303 304 void DeclRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, const Decl *D, 305 const MemRegion* superRegion, Kind k) { 306 ID.AddInteger(static_cast<unsigned>(k)); 307 ID.AddPointer(D); 308 ID.AddPointer(superRegion); 309 } 310 311 void DeclRegion::Profile(llvm::FoldingSetNodeID& ID) const { 312 DeclRegion::ProfileRegion(ID, D, superRegion, getKind()); 313 } 314 315 void VarRegion::Profile(llvm::FoldingSetNodeID &ID) const { 316 VarRegion::ProfileRegion(ID, getDecl(), superRegion); 317 } 318 319 void SymbolicRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, SymbolRef sym, 320 const MemRegion *sreg) { 321 ID.AddInteger(static_cast<unsigned>(MemRegion::SymbolicRegionKind)); 322 ID.Add(sym); 323 ID.AddPointer(sreg); 324 } 325 326 void SymbolicRegion::Profile(llvm::FoldingSetNodeID& ID) const { 327 SymbolicRegion::ProfileRegion(ID, sym, getSuperRegion()); 328 } 329 330 void ElementRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 331 QualType ElementType, SVal Idx, 332 const MemRegion* superRegion) { 333 ID.AddInteger(MemRegion::ElementRegionKind); 334 ID.Add(ElementType); 335 ID.AddPointer(superRegion); 336 Idx.Profile(ID); 337 } 338 339 void ElementRegion::Profile(llvm::FoldingSetNodeID& ID) const { 340 ElementRegion::ProfileRegion(ID, ElementType, Index, superRegion); 341 } 342 343 void FunctionCodeRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 344 const NamedDecl *FD, 345 const MemRegion*) { 346 ID.AddInteger(MemRegion::FunctionCodeRegionKind); 347 ID.AddPointer(FD); 348 } 349 350 void FunctionCodeRegion::Profile(llvm::FoldingSetNodeID& ID) const { 351 FunctionCodeRegion::ProfileRegion(ID, FD, superRegion); 352 } 353 354 void BlockCodeRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 355 const BlockDecl *BD, CanQualType, 356 const AnalysisDeclContext *AC, 357 const MemRegion*) { 358 ID.AddInteger(MemRegion::BlockCodeRegionKind); 359 ID.AddPointer(BD); 360 } 361 362 void BlockCodeRegion::Profile(llvm::FoldingSetNodeID& ID) const { 363 BlockCodeRegion::ProfileRegion(ID, BD, locTy, AC, superRegion); 364 } 365 366 void BlockDataRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 367 const BlockCodeRegion *BC, 368 const LocationContext *LC, 369 unsigned BlkCount, 370 const MemRegion *sReg) { 371 ID.AddInteger(MemRegion::BlockDataRegionKind); 372 ID.AddPointer(BC); 373 ID.AddPointer(LC); 374 ID.AddInteger(BlkCount); 375 ID.AddPointer(sReg); 376 } 377 378 void BlockDataRegion::Profile(llvm::FoldingSetNodeID& ID) const { 379 BlockDataRegion::ProfileRegion(ID, BC, LC, BlockCount, getSuperRegion()); 380 } 381 382 void CXXTempObjectRegion::ProfileRegion(llvm::FoldingSetNodeID &ID, 383 Expr const *Ex, 384 const MemRegion *sReg) { 385 ID.AddPointer(Ex); 386 ID.AddPointer(sReg); 387 } 388 389 void CXXTempObjectRegion::Profile(llvm::FoldingSetNodeID &ID) const { 390 ProfileRegion(ID, Ex, getSuperRegion()); 391 } 392 393 void CXXBaseObjectRegion::ProfileRegion(llvm::FoldingSetNodeID &ID, 394 const CXXRecordDecl *RD, 395 bool IsVirtual, 396 const MemRegion *SReg) { 397 ID.AddPointer(RD); 398 ID.AddBoolean(IsVirtual); 399 ID.AddPointer(SReg); 400 } 401 402 void CXXBaseObjectRegion::Profile(llvm::FoldingSetNodeID &ID) const { 403 ProfileRegion(ID, getDecl(), isVirtual(), superRegion); 404 } 405 406 //===----------------------------------------------------------------------===// 407 // Region anchors. 408 //===----------------------------------------------------------------------===// 409 410 void GlobalsSpaceRegion::anchor() {} 411 412 void NonStaticGlobalSpaceRegion::anchor() {} 413 414 void StackSpaceRegion::anchor() {} 415 416 void TypedRegion::anchor() {} 417 418 void TypedValueRegion::anchor() {} 419 420 void CodeTextRegion::anchor() {} 421 422 void SubRegion::anchor() {} 423 424 //===----------------------------------------------------------------------===// 425 // Region pretty-printing. 426 //===----------------------------------------------------------------------===// 427 428 LLVM_DUMP_METHOD void MemRegion::dump() const { 429 dumpToStream(llvm::errs()); 430 } 431 432 std::string MemRegion::getString() const { 433 std::string s; 434 llvm::raw_string_ostream os(s); 435 dumpToStream(os); 436 return os.str(); 437 } 438 439 void MemRegion::dumpToStream(raw_ostream &os) const { 440 os << "<Unknown Region>"; 441 } 442 443 void AllocaRegion::dumpToStream(raw_ostream &os) const { 444 os << "alloca{" << static_cast<const void *>(Ex) << ',' << Cnt << '}'; 445 } 446 447 void FunctionCodeRegion::dumpToStream(raw_ostream &os) const { 448 os << "code{" << getDecl()->getDeclName().getAsString() << '}'; 449 } 450 451 void BlockCodeRegion::dumpToStream(raw_ostream &os) const { 452 os << "block_code{" << static_cast<const void *>(this) << '}'; 453 } 454 455 void BlockDataRegion::dumpToStream(raw_ostream &os) const { 456 os << "block_data{" << BC; 457 os << "; "; 458 for (BlockDataRegion::referenced_vars_iterator 459 I = referenced_vars_begin(), 460 E = referenced_vars_end(); I != E; ++I) 461 os << "(" << I.getCapturedRegion() << "<-" << 462 I.getOriginalRegion() << ") "; 463 os << '}'; 464 } 465 466 void CompoundLiteralRegion::dumpToStream(raw_ostream &os) const { 467 // FIXME: More elaborate pretty-printing. 468 os << "{ " << static_cast<const void *>(CL) << " }"; 469 } 470 471 void CXXTempObjectRegion::dumpToStream(raw_ostream &os) const { 472 os << "temp_object{" << getValueType().getAsString() << ',' 473 << static_cast<const void *>(Ex) << '}'; 474 } 475 476 void CXXBaseObjectRegion::dumpToStream(raw_ostream &os) const { 477 os << "base{" << superRegion << ',' << getDecl()->getName() << '}'; 478 } 479 480 void CXXThisRegion::dumpToStream(raw_ostream &os) const { 481 os << "this"; 482 } 483 484 void ElementRegion::dumpToStream(raw_ostream &os) const { 485 os << "element{" << superRegion << ',' 486 << Index << ',' << getElementType().getAsString() << '}'; 487 } 488 489 void FieldRegion::dumpToStream(raw_ostream &os) const { 490 os << superRegion << "->" << *getDecl(); 491 } 492 493 void ObjCIvarRegion::dumpToStream(raw_ostream &os) const { 494 os << "ivar{" << superRegion << ',' << *getDecl() << '}'; 495 } 496 497 void StringRegion::dumpToStream(raw_ostream &os) const { 498 assert(Str != nullptr && "Expecting non-null StringLiteral"); 499 Str->printPretty(os, nullptr, PrintingPolicy(getContext().getLangOpts())); 500 } 501 502 void ObjCStringRegion::dumpToStream(raw_ostream &os) const { 503 assert(Str != nullptr && "Expecting non-null ObjCStringLiteral"); 504 Str->printPretty(os, nullptr, PrintingPolicy(getContext().getLangOpts())); 505 } 506 507 void SymbolicRegion::dumpToStream(raw_ostream &os) const { 508 if (isa<HeapSpaceRegion>(getSuperRegion())) 509 os << "Heap"; 510 os << "SymRegion{" << sym << '}'; 511 } 512 513 void VarRegion::dumpToStream(raw_ostream &os) const { 514 const auto *VD = cast<VarDecl>(D); 515 if (const IdentifierInfo *ID = VD->getIdentifier()) 516 os << ID->getName(); 517 else 518 os << "VarRegion{" << static_cast<const void *>(this) << '}'; 519 } 520 521 LLVM_DUMP_METHOD void RegionRawOffset::dump() const { 522 dumpToStream(llvm::errs()); 523 } 524 525 void RegionRawOffset::dumpToStream(raw_ostream &os) const { 526 os << "raw_offset{" << getRegion() << ',' << getOffset().getQuantity() << '}'; 527 } 528 529 void CodeSpaceRegion::dumpToStream(raw_ostream &os) const { 530 os << "CodeSpaceRegion"; 531 } 532 533 void StaticGlobalSpaceRegion::dumpToStream(raw_ostream &os) const { 534 os << "StaticGlobalsMemSpace{" << CR << '}'; 535 } 536 537 void GlobalInternalSpaceRegion::dumpToStream(raw_ostream &os) const { 538 os << "GlobalInternalSpaceRegion"; 539 } 540 541 void GlobalSystemSpaceRegion::dumpToStream(raw_ostream &os) const { 542 os << "GlobalSystemSpaceRegion"; 543 } 544 545 void GlobalImmutableSpaceRegion::dumpToStream(raw_ostream &os) const { 546 os << "GlobalImmutableSpaceRegion"; 547 } 548 549 void HeapSpaceRegion::dumpToStream(raw_ostream &os) const { 550 os << "HeapSpaceRegion"; 551 } 552 553 void UnknownSpaceRegion::dumpToStream(raw_ostream &os) const { 554 os << "UnknownSpaceRegion"; 555 } 556 557 void StackArgumentsSpaceRegion::dumpToStream(raw_ostream &os) const { 558 os << "StackArgumentsSpaceRegion"; 559 } 560 561 void StackLocalsSpaceRegion::dumpToStream(raw_ostream &os) const { 562 os << "StackLocalsSpaceRegion"; 563 } 564 565 bool MemRegion::canPrintPretty() const { 566 return canPrintPrettyAsExpr(); 567 } 568 569 bool MemRegion::canPrintPrettyAsExpr() const { 570 return false; 571 } 572 573 void MemRegion::printPretty(raw_ostream &os) const { 574 assert(canPrintPretty() && "This region cannot be printed pretty."); 575 os << "'"; 576 printPrettyAsExpr(os); 577 os << "'"; 578 } 579 580 void MemRegion::printPrettyAsExpr(raw_ostream &os) const { 581 llvm_unreachable("This region cannot be printed pretty."); 582 } 583 584 bool VarRegion::canPrintPrettyAsExpr() const { 585 return true; 586 } 587 588 void VarRegion::printPrettyAsExpr(raw_ostream &os) const { 589 os << getDecl()->getName(); 590 } 591 592 bool ObjCIvarRegion::canPrintPrettyAsExpr() const { 593 return true; 594 } 595 596 void ObjCIvarRegion::printPrettyAsExpr(raw_ostream &os) const { 597 os << getDecl()->getName(); 598 } 599 600 bool FieldRegion::canPrintPretty() const { 601 return true; 602 } 603 604 bool FieldRegion::canPrintPrettyAsExpr() const { 605 return superRegion->canPrintPrettyAsExpr(); 606 } 607 608 void FieldRegion::printPrettyAsExpr(raw_ostream &os) const { 609 assert(canPrintPrettyAsExpr()); 610 superRegion->printPrettyAsExpr(os); 611 os << "." << getDecl()->getName(); 612 } 613 614 void FieldRegion::printPretty(raw_ostream &os) const { 615 if (canPrintPrettyAsExpr()) { 616 os << "\'"; 617 printPrettyAsExpr(os); 618 os << "'"; 619 } else { 620 os << "field " << "\'" << getDecl()->getName() << "'"; 621 } 622 } 623 624 bool CXXBaseObjectRegion::canPrintPrettyAsExpr() const { 625 return superRegion->canPrintPrettyAsExpr(); 626 } 627 628 void CXXBaseObjectRegion::printPrettyAsExpr(raw_ostream &os) const { 629 superRegion->printPrettyAsExpr(os); 630 } 631 632 std::string MemRegion::getDescriptiveName(bool UseQuotes) const { 633 std::string VariableName; 634 std::string ArrayIndices; 635 const MemRegion *R = this; 636 SmallString<50> buf; 637 llvm::raw_svector_ostream os(buf); 638 639 // Obtain array indices to add them to the variable name. 640 const ElementRegion *ER = nullptr; 641 while ((ER = R->getAs<ElementRegion>())) { 642 // Index is a ConcreteInt. 643 if (auto CI = ER->getIndex().getAs<nonloc::ConcreteInt>()) { 644 llvm::SmallString<2> Idx; 645 CI->getValue().toString(Idx); 646 ArrayIndices = (llvm::Twine("[") + Idx.str() + "]" + ArrayIndices).str(); 647 } 648 // If not a ConcreteInt, try to obtain the variable 649 // name by calling 'getDescriptiveName' recursively. 650 else { 651 std::string Idx = ER->getDescriptiveName(false); 652 if (!Idx.empty()) { 653 ArrayIndices = (llvm::Twine("[") + Idx + "]" + ArrayIndices).str(); 654 } 655 } 656 R = ER->getSuperRegion(); 657 } 658 659 // Get variable name. 660 if (R && R->canPrintPrettyAsExpr()) { 661 R->printPrettyAsExpr(os); 662 if (UseQuotes) 663 return (llvm::Twine("'") + os.str() + ArrayIndices + "'").str(); 664 else 665 return (llvm::Twine(os.str()) + ArrayIndices).str(); 666 } 667 668 return VariableName; 669 } 670 671 SourceRange MemRegion::sourceRange() const { 672 const auto *const VR = dyn_cast<VarRegion>(this->getBaseRegion()); 673 const auto *const FR = dyn_cast<FieldRegion>(this); 674 675 // Check for more specific regions first. 676 // FieldRegion 677 if (FR) { 678 return FR->getDecl()->getSourceRange(); 679 } 680 // VarRegion 681 else if (VR) { 682 return VR->getDecl()->getSourceRange(); 683 } 684 // Return invalid source range (can be checked by client). 685 else 686 return {}; 687 } 688 689 //===----------------------------------------------------------------------===// 690 // MemRegionManager methods. 691 //===----------------------------------------------------------------------===// 692 693 template <typename REG> 694 const REG *MemRegionManager::LazyAllocate(REG*& region) { 695 if (!region) { 696 region = A.Allocate<REG>(); 697 new (region) REG(this); 698 } 699 700 return region; 701 } 702 703 template <typename REG, typename ARG> 704 const REG *MemRegionManager::LazyAllocate(REG*& region, ARG a) { 705 if (!region) { 706 region = A.Allocate<REG>(); 707 new (region) REG(this, a); 708 } 709 710 return region; 711 } 712 713 const StackLocalsSpaceRegion* 714 MemRegionManager::getStackLocalsRegion(const StackFrameContext *STC) { 715 assert(STC); 716 StackLocalsSpaceRegion *&R = StackLocalsSpaceRegions[STC]; 717 718 if (R) 719 return R; 720 721 R = A.Allocate<StackLocalsSpaceRegion>(); 722 new (R) StackLocalsSpaceRegion(this, STC); 723 return R; 724 } 725 726 const StackArgumentsSpaceRegion * 727 MemRegionManager::getStackArgumentsRegion(const StackFrameContext *STC) { 728 assert(STC); 729 StackArgumentsSpaceRegion *&R = StackArgumentsSpaceRegions[STC]; 730 731 if (R) 732 return R; 733 734 R = A.Allocate<StackArgumentsSpaceRegion>(); 735 new (R) StackArgumentsSpaceRegion(this, STC); 736 return R; 737 } 738 739 const GlobalsSpaceRegion 740 *MemRegionManager::getGlobalsRegion(MemRegion::Kind K, 741 const CodeTextRegion *CR) { 742 if (!CR) { 743 if (K == MemRegion::GlobalSystemSpaceRegionKind) 744 return LazyAllocate(SystemGlobals); 745 if (K == MemRegion::GlobalImmutableSpaceRegionKind) 746 return LazyAllocate(ImmutableGlobals); 747 assert(K == MemRegion::GlobalInternalSpaceRegionKind); 748 return LazyAllocate(InternalGlobals); 749 } 750 751 assert(K == MemRegion::StaticGlobalSpaceRegionKind); 752 StaticGlobalSpaceRegion *&R = StaticsGlobalSpaceRegions[CR]; 753 if (R) 754 return R; 755 756 R = A.Allocate<StaticGlobalSpaceRegion>(); 757 new (R) StaticGlobalSpaceRegion(this, CR); 758 return R; 759 } 760 761 const HeapSpaceRegion *MemRegionManager::getHeapRegion() { 762 return LazyAllocate(heap); 763 } 764 765 const UnknownSpaceRegion *MemRegionManager::getUnknownRegion() { 766 return LazyAllocate(unknown); 767 } 768 769 const CodeSpaceRegion *MemRegionManager::getCodeRegion() { 770 return LazyAllocate(code); 771 } 772 773 //===----------------------------------------------------------------------===// 774 // Constructing regions. 775 //===----------------------------------------------------------------------===// 776 777 const StringRegion *MemRegionManager::getStringRegion(const StringLiteral *Str){ 778 return getSubRegion<StringRegion>( 779 Str, cast<GlobalInternalSpaceRegion>(getGlobalsRegion())); 780 } 781 782 const ObjCStringRegion * 783 MemRegionManager::getObjCStringRegion(const ObjCStringLiteral *Str){ 784 return getSubRegion<ObjCStringRegion>( 785 Str, cast<GlobalInternalSpaceRegion>(getGlobalsRegion())); 786 } 787 788 /// Look through a chain of LocationContexts to either find the 789 /// StackFrameContext that matches a DeclContext, or find a VarRegion 790 /// for a variable captured by a block. 791 static llvm::PointerUnion<const StackFrameContext *, const VarRegion *> 792 getStackOrCaptureRegionForDeclContext(const LocationContext *LC, 793 const DeclContext *DC, 794 const VarDecl *VD) { 795 while (LC) { 796 if (const auto *SFC = dyn_cast<StackFrameContext>(LC)) { 797 if (cast<DeclContext>(SFC->getDecl()) == DC) 798 return SFC; 799 } 800 if (const auto *BC = dyn_cast<BlockInvocationContext>(LC)) { 801 const auto *BR = 802 static_cast<const BlockDataRegion *>(BC->getContextData()); 803 // FIXME: This can be made more efficient. 804 for (BlockDataRegion::referenced_vars_iterator 805 I = BR->referenced_vars_begin(), 806 E = BR->referenced_vars_end(); I != E; ++I) { 807 const VarRegion *VR = I.getOriginalRegion(); 808 if (VR->getDecl() == VD) 809 return cast<VarRegion>(I.getCapturedRegion()); 810 } 811 } 812 813 LC = LC->getParent(); 814 } 815 return (const StackFrameContext *)nullptr; 816 } 817 818 const VarRegion* MemRegionManager::getVarRegion(const VarDecl *D, 819 const LocationContext *LC) { 820 const MemRegion *sReg = nullptr; 821 822 if (D->hasGlobalStorage() && !D->isStaticLocal()) { 823 824 // First handle the globals defined in system headers. 825 if (C.getSourceManager().isInSystemHeader(D->getLocation())) { 826 // Whitelist the system globals which often DO GET modified, assume the 827 // rest are immutable. 828 if (D->getName().find("errno") != StringRef::npos) 829 sReg = getGlobalsRegion(MemRegion::GlobalSystemSpaceRegionKind); 830 else 831 sReg = getGlobalsRegion(MemRegion::GlobalImmutableSpaceRegionKind); 832 833 // Treat other globals as GlobalInternal unless they are constants. 834 } else { 835 QualType GQT = D->getType(); 836 const Type *GT = GQT.getTypePtrOrNull(); 837 // TODO: We could walk the complex types here and see if everything is 838 // constified. 839 if (GT && GQT.isConstQualified() && GT->isArithmeticType()) 840 sReg = getGlobalsRegion(MemRegion::GlobalImmutableSpaceRegionKind); 841 else 842 sReg = getGlobalsRegion(); 843 } 844 845 // Finally handle static locals. 846 } else { 847 // FIXME: Once we implement scope handling, we will need to properly lookup 848 // 'D' to the proper LocationContext. 849 const DeclContext *DC = D->getDeclContext(); 850 llvm::PointerUnion<const StackFrameContext *, const VarRegion *> V = 851 getStackOrCaptureRegionForDeclContext(LC, DC, D); 852 853 if (V.is<const VarRegion*>()) 854 return V.get<const VarRegion*>(); 855 856 const auto *STC = V.get<const StackFrameContext *>(); 857 858 if (!STC) { 859 // FIXME: Assign a more sensible memory space to static locals 860 // we see from within blocks that we analyze as top-level declarations. 861 sReg = getUnknownRegion(); 862 } else { 863 if (D->hasLocalStorage()) { 864 sReg = isa<ParmVarDecl>(D) || isa<ImplicitParamDecl>(D) 865 ? static_cast<const MemRegion*>(getStackArgumentsRegion(STC)) 866 : static_cast<const MemRegion*>(getStackLocalsRegion(STC)); 867 } 868 else { 869 assert(D->isStaticLocal()); 870 const Decl *STCD = STC->getDecl(); 871 if (isa<FunctionDecl>(STCD) || isa<ObjCMethodDecl>(STCD)) 872 sReg = getGlobalsRegion(MemRegion::StaticGlobalSpaceRegionKind, 873 getFunctionCodeRegion(cast<NamedDecl>(STCD))); 874 else if (const auto *BD = dyn_cast<BlockDecl>(STCD)) { 875 // FIXME: The fallback type here is totally bogus -- though it should 876 // never be queried, it will prevent uniquing with the real 877 // BlockCodeRegion. Ideally we'd fix the AST so that we always had a 878 // signature. 879 QualType T; 880 if (const TypeSourceInfo *TSI = BD->getSignatureAsWritten()) 881 T = TSI->getType(); 882 if (T.isNull()) 883 T = getContext().VoidTy; 884 if (!T->getAs<FunctionType>()) 885 T = getContext().getFunctionNoProtoType(T); 886 T = getContext().getBlockPointerType(T); 887 888 const BlockCodeRegion *BTR = 889 getBlockCodeRegion(BD, C.getCanonicalType(T), 890 STC->getAnalysisDeclContext()); 891 sReg = getGlobalsRegion(MemRegion::StaticGlobalSpaceRegionKind, 892 BTR); 893 } 894 else { 895 sReg = getGlobalsRegion(); 896 } 897 } 898 } 899 } 900 901 return getSubRegion<VarRegion>(D, sReg); 902 } 903 904 const VarRegion *MemRegionManager::getVarRegion(const VarDecl *D, 905 const MemRegion *superR) { 906 return getSubRegion<VarRegion>(D, superR); 907 } 908 909 const BlockDataRegion * 910 MemRegionManager::getBlockDataRegion(const BlockCodeRegion *BC, 911 const LocationContext *LC, 912 unsigned blockCount) { 913 const MemSpaceRegion *sReg = nullptr; 914 const BlockDecl *BD = BC->getDecl(); 915 if (!BD->hasCaptures()) { 916 // This handles 'static' blocks. 917 sReg = getGlobalsRegion(MemRegion::GlobalImmutableSpaceRegionKind); 918 } 919 else { 920 if (LC) { 921 // FIXME: Once we implement scope handling, we want the parent region 922 // to be the scope. 923 const StackFrameContext *STC = LC->getCurrentStackFrame(); 924 assert(STC); 925 sReg = getStackLocalsRegion(STC); 926 } 927 else { 928 // We allow 'LC' to be NULL for cases where want BlockDataRegions 929 // without context-sensitivity. 930 sReg = getUnknownRegion(); 931 } 932 } 933 934 return getSubRegion<BlockDataRegion>(BC, LC, blockCount, sReg); 935 } 936 937 const CXXTempObjectRegion * 938 MemRegionManager::getCXXStaticTempObjectRegion(const Expr *Ex) { 939 return getSubRegion<CXXTempObjectRegion>( 940 Ex, getGlobalsRegion(MemRegion::GlobalInternalSpaceRegionKind, nullptr)); 941 } 942 943 const CompoundLiteralRegion* 944 MemRegionManager::getCompoundLiteralRegion(const CompoundLiteralExpr *CL, 945 const LocationContext *LC) { 946 const MemSpaceRegion *sReg = nullptr; 947 948 if (CL->isFileScope()) 949 sReg = getGlobalsRegion(); 950 else { 951 const StackFrameContext *STC = LC->getCurrentStackFrame(); 952 assert(STC); 953 sReg = getStackLocalsRegion(STC); 954 } 955 956 return getSubRegion<CompoundLiteralRegion>(CL, sReg); 957 } 958 959 const ElementRegion* 960 MemRegionManager::getElementRegion(QualType elementType, NonLoc Idx, 961 const SubRegion* superRegion, 962 ASTContext &Ctx){ 963 QualType T = Ctx.getCanonicalType(elementType).getUnqualifiedType(); 964 965 llvm::FoldingSetNodeID ID; 966 ElementRegion::ProfileRegion(ID, T, Idx, superRegion); 967 968 void *InsertPos; 969 MemRegion* data = Regions.FindNodeOrInsertPos(ID, InsertPos); 970 auto *R = cast_or_null<ElementRegion>(data); 971 972 if (!R) { 973 R = A.Allocate<ElementRegion>(); 974 new (R) ElementRegion(T, Idx, superRegion); 975 Regions.InsertNode(R, InsertPos); 976 } 977 978 return R; 979 } 980 981 const FunctionCodeRegion * 982 MemRegionManager::getFunctionCodeRegion(const NamedDecl *FD) { 983 return getSubRegion<FunctionCodeRegion>(FD, getCodeRegion()); 984 } 985 986 const BlockCodeRegion * 987 MemRegionManager::getBlockCodeRegion(const BlockDecl *BD, CanQualType locTy, 988 AnalysisDeclContext *AC) { 989 return getSubRegion<BlockCodeRegion>(BD, locTy, AC, getCodeRegion()); 990 } 991 992 /// getSymbolicRegion - Retrieve or create a "symbolic" memory region. 993 const SymbolicRegion *MemRegionManager::getSymbolicRegion(SymbolRef sym) { 994 return getSubRegion<SymbolicRegion>(sym, getUnknownRegion()); 995 } 996 997 const SymbolicRegion *MemRegionManager::getSymbolicHeapRegion(SymbolRef Sym) { 998 return getSubRegion<SymbolicRegion>(Sym, getHeapRegion()); 999 } 1000 1001 const FieldRegion* 1002 MemRegionManager::getFieldRegion(const FieldDecl *d, 1003 const SubRegion* superRegion){ 1004 return getSubRegion<FieldRegion>(d, superRegion); 1005 } 1006 1007 const ObjCIvarRegion* 1008 MemRegionManager::getObjCIvarRegion(const ObjCIvarDecl *d, 1009 const SubRegion* superRegion) { 1010 return getSubRegion<ObjCIvarRegion>(d, superRegion); 1011 } 1012 1013 const CXXTempObjectRegion* 1014 MemRegionManager::getCXXTempObjectRegion(Expr const *E, 1015 LocationContext const *LC) { 1016 const StackFrameContext *SFC = LC->getCurrentStackFrame(); 1017 assert(SFC); 1018 return getSubRegion<CXXTempObjectRegion>(E, getStackLocalsRegion(SFC)); 1019 } 1020 1021 /// Checks whether \p BaseClass is a valid virtual or direct non-virtual base 1022 /// class of the type of \p Super. 1023 static bool isValidBaseClass(const CXXRecordDecl *BaseClass, 1024 const TypedValueRegion *Super, 1025 bool IsVirtual) { 1026 BaseClass = BaseClass->getCanonicalDecl(); 1027 1028 const CXXRecordDecl *Class = Super->getValueType()->getAsCXXRecordDecl(); 1029 if (!Class) 1030 return true; 1031 1032 if (IsVirtual) 1033 return Class->isVirtuallyDerivedFrom(BaseClass); 1034 1035 for (const auto &I : Class->bases()) { 1036 if (I.getType()->getAsCXXRecordDecl()->getCanonicalDecl() == BaseClass) 1037 return true; 1038 } 1039 1040 return false; 1041 } 1042 1043 const CXXBaseObjectRegion * 1044 MemRegionManager::getCXXBaseObjectRegion(const CXXRecordDecl *RD, 1045 const SubRegion *Super, 1046 bool IsVirtual) { 1047 if (isa<TypedValueRegion>(Super)) { 1048 assert(isValidBaseClass(RD, dyn_cast<TypedValueRegion>(Super), IsVirtual)); 1049 (void)&isValidBaseClass; 1050 1051 if (IsVirtual) { 1052 // Virtual base regions should not be layered, since the layout rules 1053 // are different. 1054 while (const auto *Base = dyn_cast<CXXBaseObjectRegion>(Super)) 1055 Super = cast<SubRegion>(Base->getSuperRegion()); 1056 assert(Super && !isa<MemSpaceRegion>(Super)); 1057 } 1058 } 1059 1060 return getSubRegion<CXXBaseObjectRegion>(RD, IsVirtual, Super); 1061 } 1062 1063 const CXXThisRegion* 1064 MemRegionManager::getCXXThisRegion(QualType thisPointerTy, 1065 const LocationContext *LC) { 1066 const auto *PT = thisPointerTy->getAs<PointerType>(); 1067 assert(PT); 1068 // Inside the body of the operator() of a lambda a this expr might refer to an 1069 // object in one of the parent location contexts. 1070 const auto *D = dyn_cast<CXXMethodDecl>(LC->getDecl()); 1071 // FIXME: when operator() of lambda is analyzed as a top level function and 1072 // 'this' refers to a this to the enclosing scope, there is no right region to 1073 // return. 1074 while (!LC->inTopFrame() && 1075 (!D || D->isStatic() || 1076 PT != D->getThisType(getContext())->getAs<PointerType>())) { 1077 LC = LC->getParent(); 1078 D = dyn_cast<CXXMethodDecl>(LC->getDecl()); 1079 } 1080 const StackFrameContext *STC = LC->getCurrentStackFrame(); 1081 assert(STC); 1082 return getSubRegion<CXXThisRegion>(PT, getStackArgumentsRegion(STC)); 1083 } 1084 1085 const AllocaRegion* 1086 MemRegionManager::getAllocaRegion(const Expr *E, unsigned cnt, 1087 const LocationContext *LC) { 1088 const StackFrameContext *STC = LC->getCurrentStackFrame(); 1089 assert(STC); 1090 return getSubRegion<AllocaRegion>(E, cnt, getStackLocalsRegion(STC)); 1091 } 1092 1093 const MemSpaceRegion *MemRegion::getMemorySpace() const { 1094 const MemRegion *R = this; 1095 const auto *SR = dyn_cast<SubRegion>(this); 1096 1097 while (SR) { 1098 R = SR->getSuperRegion(); 1099 SR = dyn_cast<SubRegion>(R); 1100 } 1101 1102 return dyn_cast<MemSpaceRegion>(R); 1103 } 1104 1105 bool MemRegion::hasStackStorage() const { 1106 return isa<StackSpaceRegion>(getMemorySpace()); 1107 } 1108 1109 bool MemRegion::hasStackNonParametersStorage() const { 1110 return isa<StackLocalsSpaceRegion>(getMemorySpace()); 1111 } 1112 1113 bool MemRegion::hasStackParametersStorage() const { 1114 return isa<StackArgumentsSpaceRegion>(getMemorySpace()); 1115 } 1116 1117 bool MemRegion::hasGlobalsOrParametersStorage() const { 1118 const MemSpaceRegion *MS = getMemorySpace(); 1119 return isa<StackArgumentsSpaceRegion>(MS) || 1120 isa<GlobalsSpaceRegion>(MS); 1121 } 1122 1123 // getBaseRegion strips away all elements and fields, and get the base region 1124 // of them. 1125 const MemRegion *MemRegion::getBaseRegion() const { 1126 const MemRegion *R = this; 1127 while (true) { 1128 switch (R->getKind()) { 1129 case MemRegion::ElementRegionKind: 1130 case MemRegion::FieldRegionKind: 1131 case MemRegion::ObjCIvarRegionKind: 1132 case MemRegion::CXXBaseObjectRegionKind: 1133 R = cast<SubRegion>(R)->getSuperRegion(); 1134 continue; 1135 default: 1136 break; 1137 } 1138 break; 1139 } 1140 return R; 1141 } 1142 1143 bool MemRegion::isSubRegionOf(const MemRegion *R) const { 1144 return false; 1145 } 1146 1147 //===----------------------------------------------------------------------===// 1148 // View handling. 1149 //===----------------------------------------------------------------------===// 1150 1151 const MemRegion *MemRegion::StripCasts(bool StripBaseCasts) const { 1152 const MemRegion *R = this; 1153 while (true) { 1154 switch (R->getKind()) { 1155 case ElementRegionKind: { 1156 const auto *ER = cast<ElementRegion>(R); 1157 if (!ER->getIndex().isZeroConstant()) 1158 return R; 1159 R = ER->getSuperRegion(); 1160 break; 1161 } 1162 case CXXBaseObjectRegionKind: 1163 if (!StripBaseCasts) 1164 return R; 1165 R = cast<CXXBaseObjectRegion>(R)->getSuperRegion(); 1166 break; 1167 default: 1168 return R; 1169 } 1170 } 1171 } 1172 1173 const SymbolicRegion *MemRegion::getSymbolicBase() const { 1174 const auto *SubR = dyn_cast<SubRegion>(this); 1175 1176 while (SubR) { 1177 if (const auto *SymR = dyn_cast<SymbolicRegion>(SubR)) 1178 return SymR; 1179 SubR = dyn_cast<SubRegion>(SubR->getSuperRegion()); 1180 } 1181 return nullptr; 1182 } 1183 1184 /// Perform a given operation on two integers, return whether it overflows. 1185 /// Optionally write the resulting output into \p Res. 1186 static bool checkedOp( 1187 int64_t LHS, 1188 int64_t RHS, 1189 std::function<llvm::APInt(llvm::APInt *, const llvm::APInt &, bool &)> Op, 1190 int64_t *Res = nullptr) { 1191 llvm::APInt ALHS(/*BitSize=*/64, LHS, /*Signed=*/true); 1192 llvm::APInt ARHS(/*BitSize=*/64, RHS, /*Signed=*/true); 1193 bool Overflow; 1194 llvm::APInt Out = Op(&ALHS, ARHS, Overflow); 1195 if (!Overflow && Res) 1196 *Res = Out.getSExtValue(); 1197 return Overflow; 1198 } 1199 1200 static bool checkedAdd( 1201 int64_t LHS, 1202 int64_t RHS, 1203 int64_t *Res=nullptr) { 1204 return checkedOp(LHS, RHS, &llvm::APInt::sadd_ov, Res); 1205 } 1206 1207 static bool checkedMul( 1208 int64_t LHS, 1209 int64_t RHS, 1210 int64_t *Res=nullptr) { 1211 return checkedOp(LHS, RHS, &llvm::APInt::smul_ov, Res); 1212 } 1213 1214 RegionRawOffset ElementRegion::getAsArrayOffset() const { 1215 CharUnits offset = CharUnits::Zero(); 1216 const ElementRegion *ER = this; 1217 const MemRegion *superR = nullptr; 1218 ASTContext &C = getContext(); 1219 1220 // FIXME: Handle multi-dimensional arrays. 1221 1222 while (ER) { 1223 superR = ER->getSuperRegion(); 1224 1225 // FIXME: generalize to symbolic offsets. 1226 SVal index = ER->getIndex(); 1227 if (Optional<nonloc::ConcreteInt> CI = index.getAs<nonloc::ConcreteInt>()) { 1228 // Update the offset. 1229 int64_t i = CI->getValue().getSExtValue(); 1230 1231 if (i != 0) { 1232 QualType elemType = ER->getElementType(); 1233 1234 // If we are pointing to an incomplete type, go no further. 1235 if (elemType->isIncompleteType()) { 1236 superR = ER; 1237 break; 1238 } 1239 1240 CharUnits size = C.getTypeSizeInChars(elemType); 1241 1242 int64_t Mult; 1243 bool Overflow = checkedAdd(i, size.getQuantity(), &Mult); 1244 if (!Overflow) 1245 Overflow = checkedMul(Mult, offset.getQuantity()); 1246 if (Overflow) { 1247 DEBUG(llvm::dbgs() << "MemRegion::getAsArrayOffset: " 1248 << "offset overflowing, returning unknown\n"); 1249 1250 return nullptr; 1251 } 1252 1253 offset += (i * size); 1254 } 1255 1256 // Go to the next ElementRegion (if any). 1257 ER = dyn_cast<ElementRegion>(superR); 1258 continue; 1259 } 1260 1261 return nullptr; 1262 } 1263 1264 assert(superR && "super region cannot be NULL"); 1265 return RegionRawOffset(superR, offset); 1266 } 1267 1268 /// Returns true if \p Base is an immediate base class of \p Child 1269 static bool isImmediateBase(const CXXRecordDecl *Child, 1270 const CXXRecordDecl *Base) { 1271 assert(Child && "Child must not be null"); 1272 // Note that we do NOT canonicalize the base class here, because 1273 // ASTRecordLayout doesn't either. If that leads us down the wrong path, 1274 // so be it; at least we won't crash. 1275 for (const auto &I : Child->bases()) { 1276 if (I.getType()->getAsCXXRecordDecl() == Base) 1277 return true; 1278 } 1279 1280 return false; 1281 } 1282 1283 static RegionOffset calculateOffset(const MemRegion *R) { 1284 const MemRegion *SymbolicOffsetBase = nullptr; 1285 int64_t Offset = 0; 1286 1287 while (true) { 1288 switch (R->getKind()) { 1289 case MemRegion::CodeSpaceRegionKind: 1290 case MemRegion::StackLocalsSpaceRegionKind: 1291 case MemRegion::StackArgumentsSpaceRegionKind: 1292 case MemRegion::HeapSpaceRegionKind: 1293 case MemRegion::UnknownSpaceRegionKind: 1294 case MemRegion::StaticGlobalSpaceRegionKind: 1295 case MemRegion::GlobalInternalSpaceRegionKind: 1296 case MemRegion::GlobalSystemSpaceRegionKind: 1297 case MemRegion::GlobalImmutableSpaceRegionKind: 1298 // Stores can bind directly to a region space to set a default value. 1299 assert(Offset == 0 && !SymbolicOffsetBase); 1300 goto Finish; 1301 1302 case MemRegion::FunctionCodeRegionKind: 1303 case MemRegion::BlockCodeRegionKind: 1304 case MemRegion::BlockDataRegionKind: 1305 // These will never have bindings, but may end up having values requested 1306 // if the user does some strange casting. 1307 if (Offset != 0) 1308 SymbolicOffsetBase = R; 1309 goto Finish; 1310 1311 case MemRegion::SymbolicRegionKind: 1312 case MemRegion::AllocaRegionKind: 1313 case MemRegion::CompoundLiteralRegionKind: 1314 case MemRegion::CXXThisRegionKind: 1315 case MemRegion::StringRegionKind: 1316 case MemRegion::ObjCStringRegionKind: 1317 case MemRegion::VarRegionKind: 1318 case MemRegion::CXXTempObjectRegionKind: 1319 // Usual base regions. 1320 goto Finish; 1321 1322 case MemRegion::ObjCIvarRegionKind: 1323 // This is a little strange, but it's a compromise between 1324 // ObjCIvarRegions having unknown compile-time offsets (when using the 1325 // non-fragile runtime) and yet still being distinct, non-overlapping 1326 // regions. Thus we treat them as "like" base regions for the purposes 1327 // of computing offsets. 1328 goto Finish; 1329 1330 case MemRegion::CXXBaseObjectRegionKind: { 1331 const auto *BOR = cast<CXXBaseObjectRegion>(R); 1332 R = BOR->getSuperRegion(); 1333 1334 QualType Ty; 1335 bool RootIsSymbolic = false; 1336 if (const auto *TVR = dyn_cast<TypedValueRegion>(R)) { 1337 Ty = TVR->getDesugaredValueType(R->getContext()); 1338 } else if (const auto *SR = dyn_cast<SymbolicRegion>(R)) { 1339 // If our base region is symbolic, we don't know what type it really is. 1340 // Pretend the type of the symbol is the true dynamic type. 1341 // (This will at least be self-consistent for the life of the symbol.) 1342 Ty = SR->getSymbol()->getType()->getPointeeType(); 1343 RootIsSymbolic = true; 1344 } 1345 1346 const CXXRecordDecl *Child = Ty->getAsCXXRecordDecl(); 1347 if (!Child) { 1348 // We cannot compute the offset of the base class. 1349 SymbolicOffsetBase = R; 1350 } else { 1351 if (RootIsSymbolic) { 1352 // Base layers on symbolic regions may not be type-correct. 1353 // Double-check the inheritance here, and revert to a symbolic offset 1354 // if it's invalid (e.g. due to a reinterpret_cast). 1355 if (BOR->isVirtual()) { 1356 if (!Child->isVirtuallyDerivedFrom(BOR->getDecl())) 1357 SymbolicOffsetBase = R; 1358 } else { 1359 if (!isImmediateBase(Child, BOR->getDecl())) 1360 SymbolicOffsetBase = R; 1361 } 1362 } 1363 } 1364 1365 // Don't bother calculating precise offsets if we already have a 1366 // symbolic offset somewhere in the chain. 1367 if (SymbolicOffsetBase) 1368 continue; 1369 1370 CharUnits BaseOffset; 1371 const ASTRecordLayout &Layout = R->getContext().getASTRecordLayout(Child); 1372 if (BOR->isVirtual()) 1373 BaseOffset = Layout.getVBaseClassOffset(BOR->getDecl()); 1374 else 1375 BaseOffset = Layout.getBaseClassOffset(BOR->getDecl()); 1376 1377 // The base offset is in chars, not in bits. 1378 Offset += BaseOffset.getQuantity() * R->getContext().getCharWidth(); 1379 break; 1380 } 1381 case MemRegion::ElementRegionKind: { 1382 const auto *ER = cast<ElementRegion>(R); 1383 R = ER->getSuperRegion(); 1384 1385 QualType EleTy = ER->getValueType(); 1386 if (EleTy->isIncompleteType()) { 1387 // We cannot compute the offset of the base class. 1388 SymbolicOffsetBase = R; 1389 continue; 1390 } 1391 1392 SVal Index = ER->getIndex(); 1393 if (Optional<nonloc::ConcreteInt> CI = 1394 Index.getAs<nonloc::ConcreteInt>()) { 1395 // Don't bother calculating precise offsets if we already have a 1396 // symbolic offset somewhere in the chain. 1397 if (SymbolicOffsetBase) 1398 continue; 1399 1400 int64_t i = CI->getValue().getSExtValue(); 1401 // This type size is in bits. 1402 Offset += i * R->getContext().getTypeSize(EleTy); 1403 } else { 1404 // We cannot compute offset for non-concrete index. 1405 SymbolicOffsetBase = R; 1406 } 1407 break; 1408 } 1409 case MemRegion::FieldRegionKind: { 1410 const auto *FR = cast<FieldRegion>(R); 1411 R = FR->getSuperRegion(); 1412 1413 const RecordDecl *RD = FR->getDecl()->getParent(); 1414 if (RD->isUnion() || !RD->isCompleteDefinition()) { 1415 // We cannot compute offset for incomplete type. 1416 // For unions, we could treat everything as offset 0, but we'd rather 1417 // treat each field as a symbolic offset so they aren't stored on top 1418 // of each other, since we depend on things in typed regions actually 1419 // matching their types. 1420 SymbolicOffsetBase = R; 1421 } 1422 1423 // Don't bother calculating precise offsets if we already have a 1424 // symbolic offset somewhere in the chain. 1425 if (SymbolicOffsetBase) 1426 continue; 1427 1428 // Get the field number. 1429 unsigned idx = 0; 1430 for (RecordDecl::field_iterator FI = RD->field_begin(), 1431 FE = RD->field_end(); FI != FE; ++FI, ++idx) { 1432 if (FR->getDecl() == *FI) 1433 break; 1434 } 1435 const ASTRecordLayout &Layout = R->getContext().getASTRecordLayout(RD); 1436 // This is offset in bits. 1437 Offset += Layout.getFieldOffset(idx); 1438 break; 1439 } 1440 } 1441 } 1442 1443 Finish: 1444 if (SymbolicOffsetBase) 1445 return RegionOffset(SymbolicOffsetBase, RegionOffset::Symbolic); 1446 return RegionOffset(R, Offset); 1447 } 1448 1449 RegionOffset MemRegion::getAsOffset() const { 1450 if (!cachedOffset) 1451 cachedOffset = calculateOffset(this); 1452 return *cachedOffset; 1453 } 1454 1455 //===----------------------------------------------------------------------===// 1456 // BlockDataRegion 1457 //===----------------------------------------------------------------------===// 1458 1459 std::pair<const VarRegion *, const VarRegion *> 1460 BlockDataRegion::getCaptureRegions(const VarDecl *VD) { 1461 MemRegionManager &MemMgr = *getMemRegionManager(); 1462 const VarRegion *VR = nullptr; 1463 const VarRegion *OriginalVR = nullptr; 1464 1465 if (!VD->hasAttr<BlocksAttr>() && VD->hasLocalStorage()) { 1466 VR = MemMgr.getVarRegion(VD, this); 1467 OriginalVR = MemMgr.getVarRegion(VD, LC); 1468 } 1469 else { 1470 if (LC) { 1471 VR = MemMgr.getVarRegion(VD, LC); 1472 OriginalVR = VR; 1473 } 1474 else { 1475 VR = MemMgr.getVarRegion(VD, MemMgr.getUnknownRegion()); 1476 OriginalVR = MemMgr.getVarRegion(VD, LC); 1477 } 1478 } 1479 return std::make_pair(VR, OriginalVR); 1480 } 1481 1482 void BlockDataRegion::LazyInitializeReferencedVars() { 1483 if (ReferencedVars) 1484 return; 1485 1486 AnalysisDeclContext *AC = getCodeRegion()->getAnalysisDeclContext(); 1487 const auto &ReferencedBlockVars = AC->getReferencedBlockVars(BC->getDecl()); 1488 auto NumBlockVars = 1489 std::distance(ReferencedBlockVars.begin(), ReferencedBlockVars.end()); 1490 1491 if (NumBlockVars == 0) { 1492 ReferencedVars = (void*) 0x1; 1493 return; 1494 } 1495 1496 MemRegionManager &MemMgr = *getMemRegionManager(); 1497 llvm::BumpPtrAllocator &A = MemMgr.getAllocator(); 1498 BumpVectorContext BC(A); 1499 1500 using VarVec = BumpVector<const MemRegion *>; 1501 1502 auto *BV = A.Allocate<VarVec>(); 1503 new (BV) VarVec(BC, NumBlockVars); 1504 auto *BVOriginal = A.Allocate<VarVec>(); 1505 new (BVOriginal) VarVec(BC, NumBlockVars); 1506 1507 for (const auto *VD : ReferencedBlockVars) { 1508 const VarRegion *VR = nullptr; 1509 const VarRegion *OriginalVR = nullptr; 1510 std::tie(VR, OriginalVR) = getCaptureRegions(VD); 1511 assert(VR); 1512 assert(OriginalVR); 1513 BV->push_back(VR, BC); 1514 BVOriginal->push_back(OriginalVR, BC); 1515 } 1516 1517 ReferencedVars = BV; 1518 OriginalVars = BVOriginal; 1519 } 1520 1521 BlockDataRegion::referenced_vars_iterator 1522 BlockDataRegion::referenced_vars_begin() const { 1523 const_cast<BlockDataRegion*>(this)->LazyInitializeReferencedVars(); 1524 1525 auto *Vec = static_cast<BumpVector<const MemRegion *> *>(ReferencedVars); 1526 1527 if (Vec == (void*) 0x1) 1528 return BlockDataRegion::referenced_vars_iterator(nullptr, nullptr); 1529 1530 auto *VecOriginal = 1531 static_cast<BumpVector<const MemRegion *> *>(OriginalVars); 1532 1533 return BlockDataRegion::referenced_vars_iterator(Vec->begin(), 1534 VecOriginal->begin()); 1535 } 1536 1537 BlockDataRegion::referenced_vars_iterator 1538 BlockDataRegion::referenced_vars_end() const { 1539 const_cast<BlockDataRegion*>(this)->LazyInitializeReferencedVars(); 1540 1541 auto *Vec = static_cast<BumpVector<const MemRegion *> *>(ReferencedVars); 1542 1543 if (Vec == (void*) 0x1) 1544 return BlockDataRegion::referenced_vars_iterator(nullptr, nullptr); 1545 1546 auto *VecOriginal = 1547 static_cast<BumpVector<const MemRegion *> *>(OriginalVars); 1548 1549 return BlockDataRegion::referenced_vars_iterator(Vec->end(), 1550 VecOriginal->end()); 1551 } 1552 1553 const VarRegion *BlockDataRegion::getOriginalRegion(const VarRegion *R) const { 1554 for (referenced_vars_iterator I = referenced_vars_begin(), 1555 E = referenced_vars_end(); 1556 I != E; ++I) { 1557 if (I.getCapturedRegion() == R) 1558 return I.getOriginalRegion(); 1559 } 1560 return nullptr; 1561 } 1562 1563 //===----------------------------------------------------------------------===// 1564 // RegionAndSymbolInvalidationTraits 1565 //===----------------------------------------------------------------------===// 1566 1567 void RegionAndSymbolInvalidationTraits::setTrait(SymbolRef Sym, 1568 InvalidationKinds IK) { 1569 SymTraitsMap[Sym] |= IK; 1570 } 1571 1572 void RegionAndSymbolInvalidationTraits::setTrait(const MemRegion *MR, 1573 InvalidationKinds IK) { 1574 assert(MR); 1575 if (const auto *SR = dyn_cast<SymbolicRegion>(MR)) 1576 setTrait(SR->getSymbol(), IK); 1577 else 1578 MRTraitsMap[MR] |= IK; 1579 } 1580 1581 bool RegionAndSymbolInvalidationTraits::hasTrait(SymbolRef Sym, 1582 InvalidationKinds IK) const { 1583 const_symbol_iterator I = SymTraitsMap.find(Sym); 1584 if (I != SymTraitsMap.end()) 1585 return I->second & IK; 1586 1587 return false; 1588 } 1589 1590 bool RegionAndSymbolInvalidationTraits::hasTrait(const MemRegion *MR, 1591 InvalidationKinds IK) const { 1592 if (!MR) 1593 return false; 1594 1595 if (const auto *SR = dyn_cast<SymbolicRegion>(MR)) 1596 return hasTrait(SR->getSymbol(), IK); 1597 1598 const_region_iterator I = MRTraitsMap.find(MR); 1599 if (I != MRTraitsMap.end()) 1600 return I->second & IK; 1601 1602 return false; 1603 } 1604