1 //== MemRegion.cpp - Abstract memory regions for static analysis --*- C++ -*--// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines MemRegion and its subclasses. MemRegion defines a 11 // partially-typed abstraction of memory useful for path-sensitive dataflow 12 // analyses. 13 // 14 //===----------------------------------------------------------------------===// 15 16 #include "clang/StaticAnalyzer/Core/PathSensitive/MemRegion.h" 17 #include "clang/AST/Attr.h" 18 #include "clang/AST/CharUnits.h" 19 #include "clang/AST/DeclObjC.h" 20 #include "clang/AST/RecordLayout.h" 21 #include "clang/Analysis/AnalysisDeclContext.h" 22 #include "clang/Analysis/Support/BumpVector.h" 23 #include "clang/Basic/SourceManager.h" 24 #include "clang/StaticAnalyzer/Core/PathSensitive/SValBuilder.h" 25 #include "llvm/Support/raw_ostream.h" 26 27 using namespace clang; 28 using namespace ento; 29 30 //===----------------------------------------------------------------------===// 31 // MemRegion Construction. 32 //===----------------------------------------------------------------------===// 33 34 template <typename RegionTy, typename SuperTy, typename Arg1Ty> 35 RegionTy* MemRegionManager::getSubRegion(const Arg1Ty arg1, 36 const SuperTy *superRegion) { 37 llvm::FoldingSetNodeID ID; 38 RegionTy::ProfileRegion(ID, arg1, superRegion); 39 void *InsertPos; 40 RegionTy* R = cast_or_null<RegionTy>(Regions.FindNodeOrInsertPos(ID, 41 InsertPos)); 42 43 if (!R) { 44 R = A.Allocate<RegionTy>(); 45 new (R) RegionTy(arg1, superRegion); 46 Regions.InsertNode(R, InsertPos); 47 } 48 49 return R; 50 } 51 52 template <typename RegionTy, typename SuperTy, typename Arg1Ty, typename Arg2Ty> 53 RegionTy* MemRegionManager::getSubRegion(const Arg1Ty arg1, const Arg2Ty arg2, 54 const SuperTy *superRegion) { 55 llvm::FoldingSetNodeID ID; 56 RegionTy::ProfileRegion(ID, arg1, arg2, superRegion); 57 void *InsertPos; 58 RegionTy* R = cast_or_null<RegionTy>(Regions.FindNodeOrInsertPos(ID, 59 InsertPos)); 60 61 if (!R) { 62 R = A.Allocate<RegionTy>(); 63 new (R) RegionTy(arg1, arg2, superRegion); 64 Regions.InsertNode(R, InsertPos); 65 } 66 67 return R; 68 } 69 70 template <typename RegionTy, typename SuperTy, 71 typename Arg1Ty, typename Arg2Ty, typename Arg3Ty> 72 RegionTy* MemRegionManager::getSubRegion(const Arg1Ty arg1, const Arg2Ty arg2, 73 const Arg3Ty arg3, 74 const SuperTy *superRegion) { 75 llvm::FoldingSetNodeID ID; 76 RegionTy::ProfileRegion(ID, arg1, arg2, arg3, superRegion); 77 void *InsertPos; 78 RegionTy* R = cast_or_null<RegionTy>(Regions.FindNodeOrInsertPos(ID, 79 InsertPos)); 80 81 if (!R) { 82 R = A.Allocate<RegionTy>(); 83 new (R) RegionTy(arg1, arg2, arg3, superRegion); 84 Regions.InsertNode(R, InsertPos); 85 } 86 87 return R; 88 } 89 90 //===----------------------------------------------------------------------===// 91 // Object destruction. 92 //===----------------------------------------------------------------------===// 93 94 MemRegion::~MemRegion() {} 95 96 MemRegionManager::~MemRegionManager() { 97 // All regions and their data are BumpPtrAllocated. No need to call 98 // their destructors. 99 } 100 101 //===----------------------------------------------------------------------===// 102 // Basic methods. 103 //===----------------------------------------------------------------------===// 104 105 bool SubRegion::isSubRegionOf(const MemRegion* R) const { 106 const MemRegion* r = this; 107 do { 108 if (r == R) 109 return true; 110 if (const SubRegion* sr = dyn_cast<SubRegion>(r)) 111 r = sr->getSuperRegion(); 112 else 113 break; 114 } while (r != nullptr); 115 return false; 116 } 117 118 MemRegionManager* SubRegion::getMemRegionManager() const { 119 const SubRegion* r = this; 120 do { 121 const MemRegion *superRegion = r->getSuperRegion(); 122 if (const SubRegion *sr = dyn_cast<SubRegion>(superRegion)) { 123 r = sr; 124 continue; 125 } 126 return superRegion->getMemRegionManager(); 127 } while (1); 128 } 129 130 const StackFrameContext *VarRegion::getStackFrame() const { 131 const StackSpaceRegion *SSR = dyn_cast<StackSpaceRegion>(getMemorySpace()); 132 return SSR ? SSR->getStackFrame() : nullptr; 133 } 134 135 //===----------------------------------------------------------------------===// 136 // Region extents. 137 //===----------------------------------------------------------------------===// 138 139 DefinedOrUnknownSVal TypedValueRegion::getExtent(SValBuilder &svalBuilder) const { 140 ASTContext &Ctx = svalBuilder.getContext(); 141 QualType T = getDesugaredValueType(Ctx); 142 143 if (isa<VariableArrayType>(T)) 144 return nonloc::SymbolVal(svalBuilder.getSymbolManager().getExtentSymbol(this)); 145 if (T->isIncompleteType()) 146 return UnknownVal(); 147 148 CharUnits size = Ctx.getTypeSizeInChars(T); 149 QualType sizeTy = svalBuilder.getArrayIndexType(); 150 return svalBuilder.makeIntVal(size.getQuantity(), sizeTy); 151 } 152 153 DefinedOrUnknownSVal FieldRegion::getExtent(SValBuilder &svalBuilder) const { 154 // Force callers to deal with bitfields explicitly. 155 if (getDecl()->isBitField()) 156 return UnknownVal(); 157 158 DefinedOrUnknownSVal Extent = DeclRegion::getExtent(svalBuilder); 159 160 // A zero-length array at the end of a struct often stands for dynamically- 161 // allocated extra memory. 162 if (Extent.isZeroConstant()) { 163 QualType T = getDesugaredValueType(svalBuilder.getContext()); 164 165 if (isa<ConstantArrayType>(T)) 166 return UnknownVal(); 167 } 168 169 return Extent; 170 } 171 172 DefinedOrUnknownSVal AllocaRegion::getExtent(SValBuilder &svalBuilder) const { 173 return nonloc::SymbolVal(svalBuilder.getSymbolManager().getExtentSymbol(this)); 174 } 175 176 DefinedOrUnknownSVal SymbolicRegion::getExtent(SValBuilder &svalBuilder) const { 177 return nonloc::SymbolVal(svalBuilder.getSymbolManager().getExtentSymbol(this)); 178 } 179 180 DefinedOrUnknownSVal StringRegion::getExtent(SValBuilder &svalBuilder) const { 181 return svalBuilder.makeIntVal(getStringLiteral()->getByteLength()+1, 182 svalBuilder.getArrayIndexType()); 183 } 184 185 ObjCIvarRegion::ObjCIvarRegion(const ObjCIvarDecl *ivd, const SubRegion *sReg) 186 : DeclRegion(ivd, sReg, ObjCIvarRegionKind) {} 187 188 const ObjCIvarDecl *ObjCIvarRegion::getDecl() const { 189 return cast<ObjCIvarDecl>(D); 190 } 191 192 QualType ObjCIvarRegion::getValueType() const { 193 return getDecl()->getType(); 194 } 195 196 QualType CXXBaseObjectRegion::getValueType() const { 197 return QualType(getDecl()->getTypeForDecl(), 0); 198 } 199 200 //===----------------------------------------------------------------------===// 201 // FoldingSet profiling. 202 //===----------------------------------------------------------------------===// 203 204 void MemSpaceRegion::Profile(llvm::FoldingSetNodeID &ID) const { 205 ID.AddInteger(static_cast<unsigned>(getKind())); 206 } 207 208 void StackSpaceRegion::Profile(llvm::FoldingSetNodeID &ID) const { 209 ID.AddInteger(static_cast<unsigned>(getKind())); 210 ID.AddPointer(getStackFrame()); 211 } 212 213 void StaticGlobalSpaceRegion::Profile(llvm::FoldingSetNodeID &ID) const { 214 ID.AddInteger(static_cast<unsigned>(getKind())); 215 ID.AddPointer(getCodeRegion()); 216 } 217 218 void StringRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 219 const StringLiteral* Str, 220 const MemRegion* superRegion) { 221 ID.AddInteger(static_cast<unsigned>(StringRegionKind)); 222 ID.AddPointer(Str); 223 ID.AddPointer(superRegion); 224 } 225 226 void ObjCStringRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 227 const ObjCStringLiteral* Str, 228 const MemRegion* superRegion) { 229 ID.AddInteger(static_cast<unsigned>(ObjCStringRegionKind)); 230 ID.AddPointer(Str); 231 ID.AddPointer(superRegion); 232 } 233 234 void AllocaRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 235 const Expr *Ex, unsigned cnt, 236 const MemRegion *superRegion) { 237 ID.AddInteger(static_cast<unsigned>(AllocaRegionKind)); 238 ID.AddPointer(Ex); 239 ID.AddInteger(cnt); 240 ID.AddPointer(superRegion); 241 } 242 243 void AllocaRegion::Profile(llvm::FoldingSetNodeID& ID) const { 244 ProfileRegion(ID, Ex, Cnt, superRegion); 245 } 246 247 void CompoundLiteralRegion::Profile(llvm::FoldingSetNodeID& ID) const { 248 CompoundLiteralRegion::ProfileRegion(ID, CL, superRegion); 249 } 250 251 void CompoundLiteralRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 252 const CompoundLiteralExpr *CL, 253 const MemRegion* superRegion) { 254 ID.AddInteger(static_cast<unsigned>(CompoundLiteralRegionKind)); 255 ID.AddPointer(CL); 256 ID.AddPointer(superRegion); 257 } 258 259 void CXXThisRegion::ProfileRegion(llvm::FoldingSetNodeID &ID, 260 const PointerType *PT, 261 const MemRegion *sRegion) { 262 ID.AddInteger(static_cast<unsigned>(CXXThisRegionKind)); 263 ID.AddPointer(PT); 264 ID.AddPointer(sRegion); 265 } 266 267 void CXXThisRegion::Profile(llvm::FoldingSetNodeID &ID) const { 268 CXXThisRegion::ProfileRegion(ID, ThisPointerTy, superRegion); 269 } 270 271 void ObjCIvarRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 272 const ObjCIvarDecl *ivd, 273 const MemRegion* superRegion) { 274 DeclRegion::ProfileRegion(ID, ivd, superRegion, ObjCIvarRegionKind); 275 } 276 277 void DeclRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, const Decl *D, 278 const MemRegion* superRegion, Kind k) { 279 ID.AddInteger(static_cast<unsigned>(k)); 280 ID.AddPointer(D); 281 ID.AddPointer(superRegion); 282 } 283 284 void DeclRegion::Profile(llvm::FoldingSetNodeID& ID) const { 285 DeclRegion::ProfileRegion(ID, D, superRegion, getKind()); 286 } 287 288 void VarRegion::Profile(llvm::FoldingSetNodeID &ID) const { 289 VarRegion::ProfileRegion(ID, getDecl(), superRegion); 290 } 291 292 void SymbolicRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, SymbolRef sym, 293 const MemRegion *sreg) { 294 ID.AddInteger(static_cast<unsigned>(MemRegion::SymbolicRegionKind)); 295 ID.Add(sym); 296 ID.AddPointer(sreg); 297 } 298 299 void SymbolicRegion::Profile(llvm::FoldingSetNodeID& ID) const { 300 SymbolicRegion::ProfileRegion(ID, sym, getSuperRegion()); 301 } 302 303 void ElementRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 304 QualType ElementType, SVal Idx, 305 const MemRegion* superRegion) { 306 ID.AddInteger(MemRegion::ElementRegionKind); 307 ID.Add(ElementType); 308 ID.AddPointer(superRegion); 309 Idx.Profile(ID); 310 } 311 312 void ElementRegion::Profile(llvm::FoldingSetNodeID& ID) const { 313 ElementRegion::ProfileRegion(ID, ElementType, Index, superRegion); 314 } 315 316 void FunctionCodeRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 317 const NamedDecl *FD, 318 const MemRegion*) { 319 ID.AddInteger(MemRegion::FunctionCodeRegionKind); 320 ID.AddPointer(FD); 321 } 322 323 void FunctionCodeRegion::Profile(llvm::FoldingSetNodeID& ID) const { 324 FunctionCodeRegion::ProfileRegion(ID, FD, superRegion); 325 } 326 327 void BlockCodeRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 328 const BlockDecl *BD, CanQualType, 329 const AnalysisDeclContext *AC, 330 const MemRegion*) { 331 ID.AddInteger(MemRegion::BlockCodeRegionKind); 332 ID.AddPointer(BD); 333 } 334 335 void BlockCodeRegion::Profile(llvm::FoldingSetNodeID& ID) const { 336 BlockCodeRegion::ProfileRegion(ID, BD, locTy, AC, superRegion); 337 } 338 339 void BlockDataRegion::ProfileRegion(llvm::FoldingSetNodeID& ID, 340 const BlockCodeRegion *BC, 341 const LocationContext *LC, 342 unsigned BlkCount, 343 const MemRegion *sReg) { 344 ID.AddInteger(MemRegion::BlockDataRegionKind); 345 ID.AddPointer(BC); 346 ID.AddPointer(LC); 347 ID.AddInteger(BlkCount); 348 ID.AddPointer(sReg); 349 } 350 351 void BlockDataRegion::Profile(llvm::FoldingSetNodeID& ID) const { 352 BlockDataRegion::ProfileRegion(ID, BC, LC, BlockCount, getSuperRegion()); 353 } 354 355 void CXXTempObjectRegion::ProfileRegion(llvm::FoldingSetNodeID &ID, 356 Expr const *Ex, 357 const MemRegion *sReg) { 358 ID.AddPointer(Ex); 359 ID.AddPointer(sReg); 360 } 361 362 void CXXTempObjectRegion::Profile(llvm::FoldingSetNodeID &ID) const { 363 ProfileRegion(ID, Ex, getSuperRegion()); 364 } 365 366 void CXXBaseObjectRegion::ProfileRegion(llvm::FoldingSetNodeID &ID, 367 const CXXRecordDecl *RD, 368 bool IsVirtual, 369 const MemRegion *SReg) { 370 ID.AddPointer(RD); 371 ID.AddBoolean(IsVirtual); 372 ID.AddPointer(SReg); 373 } 374 375 void CXXBaseObjectRegion::Profile(llvm::FoldingSetNodeID &ID) const { 376 ProfileRegion(ID, getDecl(), isVirtual(), superRegion); 377 } 378 379 //===----------------------------------------------------------------------===// 380 // Region anchors. 381 //===----------------------------------------------------------------------===// 382 383 void GlobalsSpaceRegion::anchor() { } 384 void NonStaticGlobalSpaceRegion::anchor() { } 385 void StackSpaceRegion::anchor() { } 386 void TypedRegion::anchor() { } 387 void TypedValueRegion::anchor() { } 388 void CodeTextRegion::anchor() { } 389 void SubRegion::anchor() { } 390 391 //===----------------------------------------------------------------------===// 392 // Region pretty-printing. 393 //===----------------------------------------------------------------------===// 394 395 LLVM_DUMP_METHOD void MemRegion::dump() const { 396 dumpToStream(llvm::errs()); 397 } 398 399 std::string MemRegion::getString() const { 400 std::string s; 401 llvm::raw_string_ostream os(s); 402 dumpToStream(os); 403 return os.str(); 404 } 405 406 void MemRegion::dumpToStream(raw_ostream &os) const { 407 os << "<Unknown Region>"; 408 } 409 410 void AllocaRegion::dumpToStream(raw_ostream &os) const { 411 os << "alloca{" << static_cast<const void*>(Ex) << ',' << Cnt << '}'; 412 } 413 414 void FunctionCodeRegion::dumpToStream(raw_ostream &os) const { 415 os << "code{" << getDecl()->getDeclName().getAsString() << '}'; 416 } 417 418 void BlockCodeRegion::dumpToStream(raw_ostream &os) const { 419 os << "block_code{" << static_cast<const void*>(this) << '}'; 420 } 421 422 void BlockDataRegion::dumpToStream(raw_ostream &os) const { 423 os << "block_data{" << BC; 424 os << "; "; 425 for (BlockDataRegion::referenced_vars_iterator 426 I = referenced_vars_begin(), 427 E = referenced_vars_end(); I != E; ++I) 428 os << "(" << I.getCapturedRegion() << "," << 429 I.getOriginalRegion() << ") "; 430 os << '}'; 431 } 432 433 void CompoundLiteralRegion::dumpToStream(raw_ostream &os) const { 434 // FIXME: More elaborate pretty-printing. 435 os << "{ " << static_cast<const void*>(CL) << " }"; 436 } 437 438 void CXXTempObjectRegion::dumpToStream(raw_ostream &os) const { 439 os << "temp_object{" << getValueType().getAsString() << ',' 440 << static_cast<const void*>(Ex) << '}'; 441 } 442 443 void CXXBaseObjectRegion::dumpToStream(raw_ostream &os) const { 444 os << "base{" << superRegion << ',' << getDecl()->getName() << '}'; 445 } 446 447 void CXXThisRegion::dumpToStream(raw_ostream &os) const { 448 os << "this"; 449 } 450 451 void ElementRegion::dumpToStream(raw_ostream &os) const { 452 os << "element{" << superRegion << ',' 453 << Index << ',' << getElementType().getAsString() << '}'; 454 } 455 456 void FieldRegion::dumpToStream(raw_ostream &os) const { 457 os << superRegion << "->" << *getDecl(); 458 } 459 460 void ObjCIvarRegion::dumpToStream(raw_ostream &os) const { 461 os << "ivar{" << superRegion << ',' << *getDecl() << '}'; 462 } 463 464 void StringRegion::dumpToStream(raw_ostream &os) const { 465 assert(Str != nullptr && "Expecting non-null StringLiteral"); 466 Str->printPretty(os, nullptr, PrintingPolicy(getContext().getLangOpts())); 467 } 468 469 void ObjCStringRegion::dumpToStream(raw_ostream &os) const { 470 assert(Str != nullptr && "Expecting non-null ObjCStringLiteral"); 471 Str->printPretty(os, nullptr, PrintingPolicy(getContext().getLangOpts())); 472 } 473 474 void SymbolicRegion::dumpToStream(raw_ostream &os) const { 475 if (isa<HeapSpaceRegion>(getSuperRegion())) 476 os << "Heap"; 477 os << "SymRegion{" << sym << '}'; 478 } 479 480 void VarRegion::dumpToStream(raw_ostream &os) const { 481 os << *cast<VarDecl>(D); 482 } 483 484 LLVM_DUMP_METHOD void RegionRawOffset::dump() const { 485 dumpToStream(llvm::errs()); 486 } 487 488 void RegionRawOffset::dumpToStream(raw_ostream &os) const { 489 os << "raw_offset{" << getRegion() << ',' << getOffset().getQuantity() << '}'; 490 } 491 492 void CodeSpaceRegion::dumpToStream(raw_ostream &os) const { 493 os << "CodeSpaceRegion"; 494 } 495 496 void StaticGlobalSpaceRegion::dumpToStream(raw_ostream &os) const { 497 os << "StaticGlobalsMemSpace{" << CR << '}'; 498 } 499 500 void GlobalInternalSpaceRegion::dumpToStream(raw_ostream &os) const { 501 os << "GlobalInternalSpaceRegion"; 502 } 503 504 void GlobalSystemSpaceRegion::dumpToStream(raw_ostream &os) const { 505 os << "GlobalSystemSpaceRegion"; 506 } 507 508 void GlobalImmutableSpaceRegion::dumpToStream(raw_ostream &os) const { 509 os << "GlobalImmutableSpaceRegion"; 510 } 511 512 void HeapSpaceRegion::dumpToStream(raw_ostream &os) const { 513 os << "HeapSpaceRegion"; 514 } 515 516 void UnknownSpaceRegion::dumpToStream(raw_ostream &os) const { 517 os << "UnknownSpaceRegion"; 518 } 519 520 void StackArgumentsSpaceRegion::dumpToStream(raw_ostream &os) const { 521 os << "StackArgumentsSpaceRegion"; 522 } 523 524 void StackLocalsSpaceRegion::dumpToStream(raw_ostream &os) const { 525 os << "StackLocalsSpaceRegion"; 526 } 527 528 bool MemRegion::canPrintPretty() const { 529 return canPrintPrettyAsExpr(); 530 } 531 532 bool MemRegion::canPrintPrettyAsExpr() const { 533 return false; 534 } 535 536 void MemRegion::printPretty(raw_ostream &os) const { 537 assert(canPrintPretty() && "This region cannot be printed pretty."); 538 os << "'"; 539 printPrettyAsExpr(os); 540 os << "'"; 541 } 542 543 void MemRegion::printPrettyAsExpr(raw_ostream &os) const { 544 llvm_unreachable("This region cannot be printed pretty."); 545 } 546 547 bool VarRegion::canPrintPrettyAsExpr() const { 548 return true; 549 } 550 551 void VarRegion::printPrettyAsExpr(raw_ostream &os) const { 552 os << getDecl()->getName(); 553 } 554 555 bool ObjCIvarRegion::canPrintPrettyAsExpr() const { 556 return true; 557 } 558 559 void ObjCIvarRegion::printPrettyAsExpr(raw_ostream &os) const { 560 os << getDecl()->getName(); 561 } 562 563 bool FieldRegion::canPrintPretty() const { 564 return true; 565 } 566 567 bool FieldRegion::canPrintPrettyAsExpr() const { 568 return superRegion->canPrintPrettyAsExpr(); 569 } 570 571 void FieldRegion::printPrettyAsExpr(raw_ostream &os) const { 572 assert(canPrintPrettyAsExpr()); 573 superRegion->printPrettyAsExpr(os); 574 os << "." << getDecl()->getName(); 575 } 576 577 void FieldRegion::printPretty(raw_ostream &os) const { 578 if (canPrintPrettyAsExpr()) { 579 os << "\'"; 580 printPrettyAsExpr(os); 581 os << "'"; 582 } else { 583 os << "field " << "\'" << getDecl()->getName() << "'"; 584 } 585 } 586 587 bool CXXBaseObjectRegion::canPrintPrettyAsExpr() const { 588 return superRegion->canPrintPrettyAsExpr(); 589 } 590 591 void CXXBaseObjectRegion::printPrettyAsExpr(raw_ostream &os) const { 592 superRegion->printPrettyAsExpr(os); 593 } 594 595 std::string MemRegion::getDescriptiveName(bool UseQuotes) const { 596 std::string VariableName; 597 std::string ArrayIndices; 598 const MemRegion *R = this; 599 SmallString<50> buf; 600 llvm::raw_svector_ostream os(buf); 601 602 // Obtain array indices to add them to the variable name. 603 const ElementRegion *ER = nullptr; 604 while ((ER = R->getAs<ElementRegion>())) { 605 // Index is a ConcreteInt. 606 if (auto CI = ER->getIndex().getAs<nonloc::ConcreteInt>()) { 607 llvm::SmallString<2> Idx; 608 CI->getValue().toString(Idx); 609 ArrayIndices = (llvm::Twine("[") + Idx.str() + "]" + ArrayIndices).str(); 610 } 611 // If not a ConcreteInt, try to obtain the variable 612 // name by calling 'getDescriptiveName' recursively. 613 else { 614 std::string Idx = ER->getDescriptiveName(false); 615 if (!Idx.empty()) { 616 ArrayIndices = (llvm::Twine("[") + Idx + "]" + ArrayIndices).str(); 617 } 618 } 619 R = ER->getSuperRegion(); 620 } 621 622 // Get variable name. 623 if (R && R->canPrintPrettyAsExpr()) { 624 R->printPrettyAsExpr(os); 625 if (UseQuotes) { 626 return (llvm::Twine("'") + os.str() + ArrayIndices + "'").str(); 627 } else { 628 return (llvm::Twine(os.str()) + ArrayIndices).str(); 629 } 630 } 631 632 return VariableName; 633 } 634 635 SourceRange MemRegion::sourceRange() const { 636 const VarRegion *const VR = dyn_cast<VarRegion>(this->getBaseRegion()); 637 const FieldRegion *const FR = dyn_cast<FieldRegion>(this); 638 639 // Check for more specific regions first. 640 // FieldRegion 641 if (FR) { 642 return FR->getDecl()->getSourceRange(); 643 } 644 // VarRegion 645 else if (VR) { 646 return VR->getDecl()->getSourceRange(); 647 } 648 // Return invalid source range (can be checked by client). 649 else { 650 return SourceRange{}; 651 } 652 } 653 654 //===----------------------------------------------------------------------===// 655 // MemRegionManager methods. 656 //===----------------------------------------------------------------------===// 657 658 template <typename REG> 659 const REG *MemRegionManager::LazyAllocate(REG*& region) { 660 if (!region) { 661 region = A.Allocate<REG>(); 662 new (region) REG(this); 663 } 664 665 return region; 666 } 667 668 template <typename REG, typename ARG> 669 const REG *MemRegionManager::LazyAllocate(REG*& region, ARG a) { 670 if (!region) { 671 region = A.Allocate<REG>(); 672 new (region) REG(this, a); 673 } 674 675 return region; 676 } 677 678 const StackLocalsSpaceRegion* 679 MemRegionManager::getStackLocalsRegion(const StackFrameContext *STC) { 680 assert(STC); 681 StackLocalsSpaceRegion *&R = StackLocalsSpaceRegions[STC]; 682 683 if (R) 684 return R; 685 686 R = A.Allocate<StackLocalsSpaceRegion>(); 687 new (R) StackLocalsSpaceRegion(this, STC); 688 return R; 689 } 690 691 const StackArgumentsSpaceRegion * 692 MemRegionManager::getStackArgumentsRegion(const StackFrameContext *STC) { 693 assert(STC); 694 StackArgumentsSpaceRegion *&R = StackArgumentsSpaceRegions[STC]; 695 696 if (R) 697 return R; 698 699 R = A.Allocate<StackArgumentsSpaceRegion>(); 700 new (R) StackArgumentsSpaceRegion(this, STC); 701 return R; 702 } 703 704 const GlobalsSpaceRegion 705 *MemRegionManager::getGlobalsRegion(MemRegion::Kind K, 706 const CodeTextRegion *CR) { 707 if (!CR) { 708 if (K == MemRegion::GlobalSystemSpaceRegionKind) 709 return LazyAllocate(SystemGlobals); 710 if (K == MemRegion::GlobalImmutableSpaceRegionKind) 711 return LazyAllocate(ImmutableGlobals); 712 assert(K == MemRegion::GlobalInternalSpaceRegionKind); 713 return LazyAllocate(InternalGlobals); 714 } 715 716 assert(K == MemRegion::StaticGlobalSpaceRegionKind); 717 StaticGlobalSpaceRegion *&R = StaticsGlobalSpaceRegions[CR]; 718 if (R) 719 return R; 720 721 R = A.Allocate<StaticGlobalSpaceRegion>(); 722 new (R) StaticGlobalSpaceRegion(this, CR); 723 return R; 724 } 725 726 const HeapSpaceRegion *MemRegionManager::getHeapRegion() { 727 return LazyAllocate(heap); 728 } 729 730 const UnknownSpaceRegion *MemRegionManager::getUnknownRegion() { 731 return LazyAllocate(unknown); 732 } 733 734 const CodeSpaceRegion *MemRegionManager::getCodeRegion() { 735 return LazyAllocate(code); 736 } 737 738 //===----------------------------------------------------------------------===// 739 // Constructing regions. 740 //===----------------------------------------------------------------------===// 741 const StringRegion* MemRegionManager::getStringRegion(const StringLiteral* Str){ 742 return getSubRegion<StringRegion>( 743 Str, cast<GlobalInternalSpaceRegion>(getGlobalsRegion())); 744 } 745 746 const ObjCStringRegion * 747 MemRegionManager::getObjCStringRegion(const ObjCStringLiteral* Str){ 748 return getSubRegion<ObjCStringRegion>( 749 Str, cast<GlobalInternalSpaceRegion>(getGlobalsRegion())); 750 } 751 752 /// Look through a chain of LocationContexts to either find the 753 /// StackFrameContext that matches a DeclContext, or find a VarRegion 754 /// for a variable captured by a block. 755 static llvm::PointerUnion<const StackFrameContext *, const VarRegion *> 756 getStackOrCaptureRegionForDeclContext(const LocationContext *LC, 757 const DeclContext *DC, 758 const VarDecl *VD) { 759 while (LC) { 760 if (const StackFrameContext *SFC = dyn_cast<StackFrameContext>(LC)) { 761 if (cast<DeclContext>(SFC->getDecl()) == DC) 762 return SFC; 763 } 764 if (const BlockInvocationContext *BC = 765 dyn_cast<BlockInvocationContext>(LC)) { 766 const BlockDataRegion *BR = 767 static_cast<const BlockDataRegion*>(BC->getContextData()); 768 // FIXME: This can be made more efficient. 769 for (BlockDataRegion::referenced_vars_iterator 770 I = BR->referenced_vars_begin(), 771 E = BR->referenced_vars_end(); I != E; ++I) { 772 if (const VarRegion *VR = dyn_cast<VarRegion>(I.getOriginalRegion())) 773 if (VR->getDecl() == VD) 774 return cast<VarRegion>(I.getCapturedRegion()); 775 } 776 } 777 778 LC = LC->getParent(); 779 } 780 return (const StackFrameContext *)nullptr; 781 } 782 783 const VarRegion* MemRegionManager::getVarRegion(const VarDecl *D, 784 const LocationContext *LC) { 785 const MemRegion *sReg = nullptr; 786 787 if (D->hasGlobalStorage() && !D->isStaticLocal()) { 788 789 // First handle the globals defined in system headers. 790 if (C.getSourceManager().isInSystemHeader(D->getLocation())) { 791 // Whitelist the system globals which often DO GET modified, assume the 792 // rest are immutable. 793 if (D->getName().find("errno") != StringRef::npos) 794 sReg = getGlobalsRegion(MemRegion::GlobalSystemSpaceRegionKind); 795 else 796 sReg = getGlobalsRegion(MemRegion::GlobalImmutableSpaceRegionKind); 797 798 // Treat other globals as GlobalInternal unless they are constants. 799 } else { 800 QualType GQT = D->getType(); 801 const Type *GT = GQT.getTypePtrOrNull(); 802 // TODO: We could walk the complex types here and see if everything is 803 // constified. 804 if (GT && GQT.isConstQualified() && GT->isArithmeticType()) 805 sReg = getGlobalsRegion(MemRegion::GlobalImmutableSpaceRegionKind); 806 else 807 sReg = getGlobalsRegion(); 808 } 809 810 // Finally handle static locals. 811 } else { 812 // FIXME: Once we implement scope handling, we will need to properly lookup 813 // 'D' to the proper LocationContext. 814 const DeclContext *DC = D->getDeclContext(); 815 llvm::PointerUnion<const StackFrameContext *, const VarRegion *> V = 816 getStackOrCaptureRegionForDeclContext(LC, DC, D); 817 818 if (V.is<const VarRegion*>()) 819 return V.get<const VarRegion*>(); 820 821 const StackFrameContext *STC = V.get<const StackFrameContext*>(); 822 823 if (!STC) { 824 // FIXME: Assign a more sensible memory space to static locals 825 // we see from within blocks that we analyze as top-level declarations. 826 sReg = getUnknownRegion(); 827 } else { 828 if (D->hasLocalStorage()) { 829 sReg = isa<ParmVarDecl>(D) || isa<ImplicitParamDecl>(D) 830 ? static_cast<const MemRegion*>(getStackArgumentsRegion(STC)) 831 : static_cast<const MemRegion*>(getStackLocalsRegion(STC)); 832 } 833 else { 834 assert(D->isStaticLocal()); 835 const Decl *STCD = STC->getDecl(); 836 if (isa<FunctionDecl>(STCD) || isa<ObjCMethodDecl>(STCD)) 837 sReg = getGlobalsRegion(MemRegion::StaticGlobalSpaceRegionKind, 838 getFunctionCodeRegion(cast<NamedDecl>(STCD))); 839 else if (const BlockDecl *BD = dyn_cast<BlockDecl>(STCD)) { 840 // FIXME: The fallback type here is totally bogus -- though it should 841 // never be queried, it will prevent uniquing with the real 842 // BlockCodeRegion. Ideally we'd fix the AST so that we always had a 843 // signature. 844 QualType T; 845 if (const TypeSourceInfo *TSI = BD->getSignatureAsWritten()) 846 T = TSI->getType(); 847 if (T.isNull()) 848 T = getContext().VoidTy; 849 if (!T->getAs<FunctionType>()) 850 T = getContext().getFunctionNoProtoType(T); 851 T = getContext().getBlockPointerType(T); 852 853 const BlockCodeRegion *BTR = 854 getBlockCodeRegion(BD, C.getCanonicalType(T), 855 STC->getAnalysisDeclContext()); 856 sReg = getGlobalsRegion(MemRegion::StaticGlobalSpaceRegionKind, 857 BTR); 858 } 859 else { 860 sReg = getGlobalsRegion(); 861 } 862 } 863 } 864 } 865 866 return getSubRegion<VarRegion>(D, sReg); 867 } 868 869 const VarRegion *MemRegionManager::getVarRegion(const VarDecl *D, 870 const MemRegion *superR) { 871 return getSubRegion<VarRegion>(D, superR); 872 } 873 874 const BlockDataRegion * 875 MemRegionManager::getBlockDataRegion(const BlockCodeRegion *BC, 876 const LocationContext *LC, 877 unsigned blockCount) { 878 const MemSpaceRegion *sReg = nullptr; 879 const BlockDecl *BD = BC->getDecl(); 880 if (!BD->hasCaptures()) { 881 // This handles 'static' blocks. 882 sReg = getGlobalsRegion(MemRegion::GlobalImmutableSpaceRegionKind); 883 } 884 else { 885 if (LC) { 886 // FIXME: Once we implement scope handling, we want the parent region 887 // to be the scope. 888 const StackFrameContext *STC = LC->getCurrentStackFrame(); 889 assert(STC); 890 sReg = getStackLocalsRegion(STC); 891 } 892 else { 893 // We allow 'LC' to be NULL for cases where want BlockDataRegions 894 // without context-sensitivity. 895 sReg = getUnknownRegion(); 896 } 897 } 898 899 return getSubRegion<BlockDataRegion>(BC, LC, blockCount, sReg); 900 } 901 902 const CXXTempObjectRegion * 903 MemRegionManager::getCXXStaticTempObjectRegion(const Expr *Ex) { 904 return getSubRegion<CXXTempObjectRegion>( 905 Ex, getGlobalsRegion(MemRegion::GlobalInternalSpaceRegionKind, nullptr)); 906 } 907 908 const CompoundLiteralRegion* 909 MemRegionManager::getCompoundLiteralRegion(const CompoundLiteralExpr *CL, 910 const LocationContext *LC) { 911 const MemSpaceRegion *sReg = nullptr; 912 913 if (CL->isFileScope()) 914 sReg = getGlobalsRegion(); 915 else { 916 const StackFrameContext *STC = LC->getCurrentStackFrame(); 917 assert(STC); 918 sReg = getStackLocalsRegion(STC); 919 } 920 921 return getSubRegion<CompoundLiteralRegion>(CL, sReg); 922 } 923 924 const ElementRegion* 925 MemRegionManager::getElementRegion(QualType elementType, NonLoc Idx, 926 const SubRegion* superRegion, 927 ASTContext &Ctx){ 928 QualType T = Ctx.getCanonicalType(elementType).getUnqualifiedType(); 929 930 llvm::FoldingSetNodeID ID; 931 ElementRegion::ProfileRegion(ID, T, Idx, superRegion); 932 933 void *InsertPos; 934 MemRegion* data = Regions.FindNodeOrInsertPos(ID, InsertPos); 935 ElementRegion* R = cast_or_null<ElementRegion>(data); 936 937 if (!R) { 938 R = A.Allocate<ElementRegion>(); 939 new (R) ElementRegion(T, Idx, superRegion); 940 Regions.InsertNode(R, InsertPos); 941 } 942 943 return R; 944 } 945 946 const FunctionCodeRegion * 947 MemRegionManager::getFunctionCodeRegion(const NamedDecl *FD) { 948 return getSubRegion<FunctionCodeRegion>(FD, getCodeRegion()); 949 } 950 951 const BlockCodeRegion * 952 MemRegionManager::getBlockCodeRegion(const BlockDecl *BD, CanQualType locTy, 953 AnalysisDeclContext *AC) { 954 return getSubRegion<BlockCodeRegion>(BD, locTy, AC, getCodeRegion()); 955 } 956 957 958 /// getSymbolicRegion - Retrieve or create a "symbolic" memory region. 959 const SymbolicRegion *MemRegionManager::getSymbolicRegion(SymbolRef sym) { 960 return getSubRegion<SymbolicRegion>(sym, getUnknownRegion()); 961 } 962 963 const SymbolicRegion *MemRegionManager::getSymbolicHeapRegion(SymbolRef Sym) { 964 return getSubRegion<SymbolicRegion>(Sym, getHeapRegion()); 965 } 966 967 const FieldRegion* 968 MemRegionManager::getFieldRegion(const FieldDecl *d, 969 const SubRegion* superRegion){ 970 return getSubRegion<FieldRegion>(d, superRegion); 971 } 972 973 const ObjCIvarRegion* 974 MemRegionManager::getObjCIvarRegion(const ObjCIvarDecl *d, 975 const SubRegion* superRegion) { 976 return getSubRegion<ObjCIvarRegion>(d, superRegion); 977 } 978 979 const CXXTempObjectRegion* 980 MemRegionManager::getCXXTempObjectRegion(Expr const *E, 981 LocationContext const *LC) { 982 const StackFrameContext *SFC = LC->getCurrentStackFrame(); 983 assert(SFC); 984 return getSubRegion<CXXTempObjectRegion>(E, getStackLocalsRegion(SFC)); 985 } 986 987 /// Checks whether \p BaseClass is a valid virtual or direct non-virtual base 988 /// class of the type of \p Super. 989 static bool isValidBaseClass(const CXXRecordDecl *BaseClass, 990 const TypedValueRegion *Super, 991 bool IsVirtual) { 992 BaseClass = BaseClass->getCanonicalDecl(); 993 994 const CXXRecordDecl *Class = Super->getValueType()->getAsCXXRecordDecl(); 995 if (!Class) 996 return true; 997 998 if (IsVirtual) 999 return Class->isVirtuallyDerivedFrom(BaseClass); 1000 1001 for (const auto &I : Class->bases()) { 1002 if (I.getType()->getAsCXXRecordDecl()->getCanonicalDecl() == BaseClass) 1003 return true; 1004 } 1005 1006 return false; 1007 } 1008 1009 const CXXBaseObjectRegion * 1010 MemRegionManager::getCXXBaseObjectRegion(const CXXRecordDecl *RD, 1011 const SubRegion *Super, 1012 bool IsVirtual) { 1013 if (isa<TypedValueRegion>(Super)) { 1014 assert(isValidBaseClass(RD, dyn_cast<TypedValueRegion>(Super), IsVirtual)); 1015 (void)&isValidBaseClass; 1016 1017 if (IsVirtual) { 1018 // Virtual base regions should not be layered, since the layout rules 1019 // are different. 1020 while (const CXXBaseObjectRegion *Base = 1021 dyn_cast<CXXBaseObjectRegion>(Super)) { 1022 Super = cast<SubRegion>(Base->getSuperRegion()); 1023 } 1024 assert(Super && !isa<MemSpaceRegion>(Super)); 1025 } 1026 } 1027 1028 return getSubRegion<CXXBaseObjectRegion>(RD, IsVirtual, Super); 1029 } 1030 1031 const CXXThisRegion* 1032 MemRegionManager::getCXXThisRegion(QualType thisPointerTy, 1033 const LocationContext *LC) { 1034 const PointerType *PT = thisPointerTy->getAs<PointerType>(); 1035 assert(PT); 1036 // Inside the body of the operator() of a lambda a this expr might refer to an 1037 // object in one of the parent location contexts. 1038 const auto *D = dyn_cast<CXXMethodDecl>(LC->getDecl()); 1039 // FIXME: when operator() of lambda is analyzed as a top level function and 1040 // 'this' refers to a this to the enclosing scope, there is no right region to 1041 // return. 1042 while (!LC->inTopFrame() && 1043 (!D || D->isStatic() || 1044 PT != D->getThisType(getContext())->getAs<PointerType>())) { 1045 LC = LC->getParent(); 1046 D = dyn_cast<CXXMethodDecl>(LC->getDecl()); 1047 } 1048 const StackFrameContext *STC = LC->getCurrentStackFrame(); 1049 assert(STC); 1050 return getSubRegion<CXXThisRegion>(PT, getStackArgumentsRegion(STC)); 1051 } 1052 1053 const AllocaRegion* 1054 MemRegionManager::getAllocaRegion(const Expr *E, unsigned cnt, 1055 const LocationContext *LC) { 1056 const StackFrameContext *STC = LC->getCurrentStackFrame(); 1057 assert(STC); 1058 return getSubRegion<AllocaRegion>(E, cnt, getStackLocalsRegion(STC)); 1059 } 1060 1061 const MemSpaceRegion *MemRegion::getMemorySpace() const { 1062 const MemRegion *R = this; 1063 const SubRegion* SR = dyn_cast<SubRegion>(this); 1064 1065 while (SR) { 1066 R = SR->getSuperRegion(); 1067 SR = dyn_cast<SubRegion>(R); 1068 } 1069 1070 return dyn_cast<MemSpaceRegion>(R); 1071 } 1072 1073 bool MemRegion::hasStackStorage() const { 1074 return isa<StackSpaceRegion>(getMemorySpace()); 1075 } 1076 1077 bool MemRegion::hasStackNonParametersStorage() const { 1078 return isa<StackLocalsSpaceRegion>(getMemorySpace()); 1079 } 1080 1081 bool MemRegion::hasStackParametersStorage() const { 1082 return isa<StackArgumentsSpaceRegion>(getMemorySpace()); 1083 } 1084 1085 bool MemRegion::hasGlobalsOrParametersStorage() const { 1086 const MemSpaceRegion *MS = getMemorySpace(); 1087 return isa<StackArgumentsSpaceRegion>(MS) || 1088 isa<GlobalsSpaceRegion>(MS); 1089 } 1090 1091 // getBaseRegion strips away all elements and fields, and get the base region 1092 // of them. 1093 const MemRegion *MemRegion::getBaseRegion() const { 1094 const MemRegion *R = this; 1095 while (true) { 1096 switch (R->getKind()) { 1097 case MemRegion::ElementRegionKind: 1098 case MemRegion::FieldRegionKind: 1099 case MemRegion::ObjCIvarRegionKind: 1100 case MemRegion::CXXBaseObjectRegionKind: 1101 R = cast<SubRegion>(R)->getSuperRegion(); 1102 continue; 1103 default: 1104 break; 1105 } 1106 break; 1107 } 1108 return R; 1109 } 1110 1111 bool MemRegion::isSubRegionOf(const MemRegion *R) const { 1112 return false; 1113 } 1114 1115 //===----------------------------------------------------------------------===// 1116 // View handling. 1117 //===----------------------------------------------------------------------===// 1118 1119 const MemRegion *MemRegion::StripCasts(bool StripBaseCasts) const { 1120 const MemRegion *R = this; 1121 while (true) { 1122 switch (R->getKind()) { 1123 case ElementRegionKind: { 1124 const ElementRegion *ER = cast<ElementRegion>(R); 1125 if (!ER->getIndex().isZeroConstant()) 1126 return R; 1127 R = ER->getSuperRegion(); 1128 break; 1129 } 1130 case CXXBaseObjectRegionKind: 1131 if (!StripBaseCasts) 1132 return R; 1133 R = cast<CXXBaseObjectRegion>(R)->getSuperRegion(); 1134 break; 1135 default: 1136 return R; 1137 } 1138 } 1139 } 1140 1141 const SymbolicRegion *MemRegion::getSymbolicBase() const { 1142 const SubRegion *SubR = dyn_cast<SubRegion>(this); 1143 1144 while (SubR) { 1145 if (const SymbolicRegion *SymR = dyn_cast<SymbolicRegion>(SubR)) 1146 return SymR; 1147 SubR = dyn_cast<SubRegion>(SubR->getSuperRegion()); 1148 } 1149 return nullptr; 1150 } 1151 1152 RegionRawOffset ElementRegion::getAsArrayOffset() const { 1153 CharUnits offset = CharUnits::Zero(); 1154 const ElementRegion *ER = this; 1155 const MemRegion *superR = nullptr; 1156 ASTContext &C = getContext(); 1157 1158 // FIXME: Handle multi-dimensional arrays. 1159 1160 while (ER) { 1161 superR = ER->getSuperRegion(); 1162 1163 // FIXME: generalize to symbolic offsets. 1164 SVal index = ER->getIndex(); 1165 if (Optional<nonloc::ConcreteInt> CI = index.getAs<nonloc::ConcreteInt>()) { 1166 // Update the offset. 1167 int64_t i = CI->getValue().getSExtValue(); 1168 1169 if (i != 0) { 1170 QualType elemType = ER->getElementType(); 1171 1172 // If we are pointing to an incomplete type, go no further. 1173 if (elemType->isIncompleteType()) { 1174 superR = ER; 1175 break; 1176 } 1177 1178 CharUnits size = C.getTypeSizeInChars(elemType); 1179 offset += (i * size); 1180 } 1181 1182 // Go to the next ElementRegion (if any). 1183 ER = dyn_cast<ElementRegion>(superR); 1184 continue; 1185 } 1186 1187 return nullptr; 1188 } 1189 1190 assert(superR && "super region cannot be NULL"); 1191 return RegionRawOffset(superR, offset); 1192 } 1193 1194 1195 /// Returns true if \p Base is an immediate base class of \p Child 1196 static bool isImmediateBase(const CXXRecordDecl *Child, 1197 const CXXRecordDecl *Base) { 1198 assert(Child && "Child must not be null"); 1199 // Note that we do NOT canonicalize the base class here, because 1200 // ASTRecordLayout doesn't either. If that leads us down the wrong path, 1201 // so be it; at least we won't crash. 1202 for (const auto &I : Child->bases()) { 1203 if (I.getType()->getAsCXXRecordDecl() == Base) 1204 return true; 1205 } 1206 1207 return false; 1208 } 1209 1210 RegionOffset MemRegion::getAsOffset() const { 1211 const MemRegion *R = this; 1212 const MemRegion *SymbolicOffsetBase = nullptr; 1213 int64_t Offset = 0; 1214 1215 while (1) { 1216 switch (R->getKind()) { 1217 case CodeSpaceRegionKind: 1218 case StackLocalsSpaceRegionKind: 1219 case StackArgumentsSpaceRegionKind: 1220 case HeapSpaceRegionKind: 1221 case UnknownSpaceRegionKind: 1222 case StaticGlobalSpaceRegionKind: 1223 case GlobalInternalSpaceRegionKind: 1224 case GlobalSystemSpaceRegionKind: 1225 case GlobalImmutableSpaceRegionKind: 1226 // Stores can bind directly to a region space to set a default value. 1227 assert(Offset == 0 && !SymbolicOffsetBase); 1228 goto Finish; 1229 1230 case FunctionCodeRegionKind: 1231 case BlockCodeRegionKind: 1232 case BlockDataRegionKind: 1233 // These will never have bindings, but may end up having values requested 1234 // if the user does some strange casting. 1235 if (Offset != 0) 1236 SymbolicOffsetBase = R; 1237 goto Finish; 1238 1239 case SymbolicRegionKind: 1240 case AllocaRegionKind: 1241 case CompoundLiteralRegionKind: 1242 case CXXThisRegionKind: 1243 case StringRegionKind: 1244 case ObjCStringRegionKind: 1245 case VarRegionKind: 1246 case CXXTempObjectRegionKind: 1247 // Usual base regions. 1248 goto Finish; 1249 1250 case ObjCIvarRegionKind: 1251 // This is a little strange, but it's a compromise between 1252 // ObjCIvarRegions having unknown compile-time offsets (when using the 1253 // non-fragile runtime) and yet still being distinct, non-overlapping 1254 // regions. Thus we treat them as "like" base regions for the purposes 1255 // of computing offsets. 1256 goto Finish; 1257 1258 case CXXBaseObjectRegionKind: { 1259 const CXXBaseObjectRegion *BOR = cast<CXXBaseObjectRegion>(R); 1260 R = BOR->getSuperRegion(); 1261 1262 QualType Ty; 1263 bool RootIsSymbolic = false; 1264 if (const TypedValueRegion *TVR = dyn_cast<TypedValueRegion>(R)) { 1265 Ty = TVR->getDesugaredValueType(getContext()); 1266 } else if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(R)) { 1267 // If our base region is symbolic, we don't know what type it really is. 1268 // Pretend the type of the symbol is the true dynamic type. 1269 // (This will at least be self-consistent for the life of the symbol.) 1270 Ty = SR->getSymbol()->getType()->getPointeeType(); 1271 RootIsSymbolic = true; 1272 } 1273 1274 const CXXRecordDecl *Child = Ty->getAsCXXRecordDecl(); 1275 if (!Child) { 1276 // We cannot compute the offset of the base class. 1277 SymbolicOffsetBase = R; 1278 } else { 1279 if (RootIsSymbolic) { 1280 // Base layers on symbolic regions may not be type-correct. 1281 // Double-check the inheritance here, and revert to a symbolic offset 1282 // if it's invalid (e.g. due to a reinterpret_cast). 1283 if (BOR->isVirtual()) { 1284 if (!Child->isVirtuallyDerivedFrom(BOR->getDecl())) 1285 SymbolicOffsetBase = R; 1286 } else { 1287 if (!isImmediateBase(Child, BOR->getDecl())) 1288 SymbolicOffsetBase = R; 1289 } 1290 } 1291 } 1292 1293 // Don't bother calculating precise offsets if we already have a 1294 // symbolic offset somewhere in the chain. 1295 if (SymbolicOffsetBase) 1296 continue; 1297 1298 CharUnits BaseOffset; 1299 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Child); 1300 if (BOR->isVirtual()) 1301 BaseOffset = Layout.getVBaseClassOffset(BOR->getDecl()); 1302 else 1303 BaseOffset = Layout.getBaseClassOffset(BOR->getDecl()); 1304 1305 // The base offset is in chars, not in bits. 1306 Offset += BaseOffset.getQuantity() * getContext().getCharWidth(); 1307 break; 1308 } 1309 case ElementRegionKind: { 1310 const ElementRegion *ER = cast<ElementRegion>(R); 1311 R = ER->getSuperRegion(); 1312 1313 QualType EleTy = ER->getValueType(); 1314 if (EleTy->isIncompleteType()) { 1315 // We cannot compute the offset of the base class. 1316 SymbolicOffsetBase = R; 1317 continue; 1318 } 1319 1320 SVal Index = ER->getIndex(); 1321 if (Optional<nonloc::ConcreteInt> CI = 1322 Index.getAs<nonloc::ConcreteInt>()) { 1323 // Don't bother calculating precise offsets if we already have a 1324 // symbolic offset somewhere in the chain. 1325 if (SymbolicOffsetBase) 1326 continue; 1327 1328 int64_t i = CI->getValue().getSExtValue(); 1329 // This type size is in bits. 1330 Offset += i * getContext().getTypeSize(EleTy); 1331 } else { 1332 // We cannot compute offset for non-concrete index. 1333 SymbolicOffsetBase = R; 1334 } 1335 break; 1336 } 1337 case FieldRegionKind: { 1338 const FieldRegion *FR = cast<FieldRegion>(R); 1339 R = FR->getSuperRegion(); 1340 1341 const RecordDecl *RD = FR->getDecl()->getParent(); 1342 if (RD->isUnion() || !RD->isCompleteDefinition()) { 1343 // We cannot compute offset for incomplete type. 1344 // For unions, we could treat everything as offset 0, but we'd rather 1345 // treat each field as a symbolic offset so they aren't stored on top 1346 // of each other, since we depend on things in typed regions actually 1347 // matching their types. 1348 SymbolicOffsetBase = R; 1349 } 1350 1351 // Don't bother calculating precise offsets if we already have a 1352 // symbolic offset somewhere in the chain. 1353 if (SymbolicOffsetBase) 1354 continue; 1355 1356 // Get the field number. 1357 unsigned idx = 0; 1358 for (RecordDecl::field_iterator FI = RD->field_begin(), 1359 FE = RD->field_end(); FI != FE; ++FI, ++idx) { 1360 if (FR->getDecl() == *FI) 1361 break; 1362 } 1363 const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD); 1364 // This is offset in bits. 1365 Offset += Layout.getFieldOffset(idx); 1366 break; 1367 } 1368 } 1369 } 1370 1371 Finish: 1372 if (SymbolicOffsetBase) 1373 return RegionOffset(SymbolicOffsetBase, RegionOffset::Symbolic); 1374 return RegionOffset(R, Offset); 1375 } 1376 1377 //===----------------------------------------------------------------------===// 1378 // BlockDataRegion 1379 //===----------------------------------------------------------------------===// 1380 1381 std::pair<const VarRegion *, const VarRegion *> 1382 BlockDataRegion::getCaptureRegions(const VarDecl *VD) { 1383 MemRegionManager &MemMgr = *getMemRegionManager(); 1384 const VarRegion *VR = nullptr; 1385 const VarRegion *OriginalVR = nullptr; 1386 1387 if (!VD->hasAttr<BlocksAttr>() && VD->hasLocalStorage()) { 1388 VR = MemMgr.getVarRegion(VD, this); 1389 OriginalVR = MemMgr.getVarRegion(VD, LC); 1390 } 1391 else { 1392 if (LC) { 1393 VR = MemMgr.getVarRegion(VD, LC); 1394 OriginalVR = VR; 1395 } 1396 else { 1397 VR = MemMgr.getVarRegion(VD, MemMgr.getUnknownRegion()); 1398 OriginalVR = MemMgr.getVarRegion(VD, LC); 1399 } 1400 } 1401 return std::make_pair(VR, OriginalVR); 1402 } 1403 1404 void BlockDataRegion::LazyInitializeReferencedVars() { 1405 if (ReferencedVars) 1406 return; 1407 1408 AnalysisDeclContext *AC = getCodeRegion()->getAnalysisDeclContext(); 1409 const auto &ReferencedBlockVars = AC->getReferencedBlockVars(BC->getDecl()); 1410 auto NumBlockVars = 1411 std::distance(ReferencedBlockVars.begin(), ReferencedBlockVars.end()); 1412 1413 if (NumBlockVars == 0) { 1414 ReferencedVars = (void*) 0x1; 1415 return; 1416 } 1417 1418 MemRegionManager &MemMgr = *getMemRegionManager(); 1419 llvm::BumpPtrAllocator &A = MemMgr.getAllocator(); 1420 BumpVectorContext BC(A); 1421 1422 typedef BumpVector<const MemRegion*> VarVec; 1423 VarVec *BV = A.Allocate<VarVec>(); 1424 new (BV) VarVec(BC, NumBlockVars); 1425 VarVec *BVOriginal = A.Allocate<VarVec>(); 1426 new (BVOriginal) VarVec(BC, NumBlockVars); 1427 1428 for (const VarDecl *VD : ReferencedBlockVars) { 1429 const VarRegion *VR = nullptr; 1430 const VarRegion *OriginalVR = nullptr; 1431 std::tie(VR, OriginalVR) = getCaptureRegions(VD); 1432 assert(VR); 1433 assert(OriginalVR); 1434 BV->push_back(VR, BC); 1435 BVOriginal->push_back(OriginalVR, BC); 1436 } 1437 1438 ReferencedVars = BV; 1439 OriginalVars = BVOriginal; 1440 } 1441 1442 BlockDataRegion::referenced_vars_iterator 1443 BlockDataRegion::referenced_vars_begin() const { 1444 const_cast<BlockDataRegion*>(this)->LazyInitializeReferencedVars(); 1445 1446 BumpVector<const MemRegion*> *Vec = 1447 static_cast<BumpVector<const MemRegion*>*>(ReferencedVars); 1448 1449 if (Vec == (void*) 0x1) 1450 return BlockDataRegion::referenced_vars_iterator(nullptr, nullptr); 1451 1452 BumpVector<const MemRegion*> *VecOriginal = 1453 static_cast<BumpVector<const MemRegion*>*>(OriginalVars); 1454 1455 return BlockDataRegion::referenced_vars_iterator(Vec->begin(), 1456 VecOriginal->begin()); 1457 } 1458 1459 BlockDataRegion::referenced_vars_iterator 1460 BlockDataRegion::referenced_vars_end() const { 1461 const_cast<BlockDataRegion*>(this)->LazyInitializeReferencedVars(); 1462 1463 BumpVector<const MemRegion*> *Vec = 1464 static_cast<BumpVector<const MemRegion*>*>(ReferencedVars); 1465 1466 if (Vec == (void*) 0x1) 1467 return BlockDataRegion::referenced_vars_iterator(nullptr, nullptr); 1468 1469 BumpVector<const MemRegion*> *VecOriginal = 1470 static_cast<BumpVector<const MemRegion*>*>(OriginalVars); 1471 1472 return BlockDataRegion::referenced_vars_iterator(Vec->end(), 1473 VecOriginal->end()); 1474 } 1475 1476 const VarRegion *BlockDataRegion::getOriginalRegion(const VarRegion *R) const { 1477 for (referenced_vars_iterator I = referenced_vars_begin(), 1478 E = referenced_vars_end(); 1479 I != E; ++I) { 1480 if (I.getCapturedRegion() == R) 1481 return I.getOriginalRegion(); 1482 } 1483 return nullptr; 1484 } 1485 1486 //===----------------------------------------------------------------------===// 1487 // RegionAndSymbolInvalidationTraits 1488 //===----------------------------------------------------------------------===// 1489 1490 void RegionAndSymbolInvalidationTraits::setTrait(SymbolRef Sym, 1491 InvalidationKinds IK) { 1492 SymTraitsMap[Sym] |= IK; 1493 } 1494 1495 void RegionAndSymbolInvalidationTraits::setTrait(const MemRegion *MR, 1496 InvalidationKinds IK) { 1497 assert(MR); 1498 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(MR)) 1499 setTrait(SR->getSymbol(), IK); 1500 else 1501 MRTraitsMap[MR] |= IK; 1502 } 1503 1504 bool RegionAndSymbolInvalidationTraits::hasTrait(SymbolRef Sym, 1505 InvalidationKinds IK) const { 1506 const_symbol_iterator I = SymTraitsMap.find(Sym); 1507 if (I != SymTraitsMap.end()) 1508 return I->second & IK; 1509 1510 return false; 1511 } 1512 1513 bool RegionAndSymbolInvalidationTraits::hasTrait(const MemRegion *MR, 1514 InvalidationKinds IK) const { 1515 if (!MR) 1516 return false; 1517 1518 if (const SymbolicRegion *SR = dyn_cast<SymbolicRegion>(MR)) 1519 return hasTrait(SR->getSymbol(), IK); 1520 1521 const_region_iterator I = MRTraitsMap.find(MR); 1522 if (I != MRTraitsMap.end()) 1523 return I->second & IK; 1524 1525 return false; 1526 } 1527