1 //===-- Value.cpp - Implement the Value class -----------------------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements the Value, ValueHandle, and User classes. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/IR/Value.h" 14 #include "LLVMContextImpl.h" 15 #include "llvm/ADT/DenseMap.h" 16 #include "llvm/ADT/SetVector.h" 17 #include "llvm/ADT/SmallString.h" 18 #include "llvm/IR/Constant.h" 19 #include "llvm/IR/Constants.h" 20 #include "llvm/IR/DataLayout.h" 21 #include "llvm/IR/DerivedTypes.h" 22 #include "llvm/IR/DerivedUser.h" 23 #include "llvm/IR/GetElementPtrTypeIterator.h" 24 #include "llvm/IR/InstrTypes.h" 25 #include "llvm/IR/Instructions.h" 26 #include "llvm/IR/IntrinsicInst.h" 27 #include "llvm/IR/Module.h" 28 #include "llvm/IR/Operator.h" 29 #include "llvm/IR/Statepoint.h" 30 #include "llvm/IR/ValueHandle.h" 31 #include "llvm/IR/ValueSymbolTable.h" 32 #include "llvm/Support/CommandLine.h" 33 #include "llvm/Support/Debug.h" 34 #include "llvm/Support/ErrorHandling.h" 35 #include "llvm/Support/ManagedStatic.h" 36 #include "llvm/Support/raw_ostream.h" 37 #include <algorithm> 38 39 using namespace llvm; 40 41 static cl::opt<unsigned> NonGlobalValueMaxNameSize( 42 "non-global-value-max-name-size", cl::Hidden, cl::init(1024), 43 cl::desc("Maximum size for the name of non-global values.")); 44 45 //===----------------------------------------------------------------------===// 46 // Value Class 47 //===----------------------------------------------------------------------===// 48 static inline Type *checkType(Type *Ty) { 49 assert(Ty && "Value defined with a null type: Error!"); 50 return Ty; 51 } 52 53 Value::Value(Type *ty, unsigned scid) 54 : VTy(checkType(ty)), UseList(nullptr), SubclassID(scid), 55 HasValueHandle(0), SubclassOptionalData(0), SubclassData(0), 56 NumUserOperands(0), IsUsedByMD(false), HasName(false) { 57 static_assert(ConstantFirstVal == 0, "!(SubclassID < ConstantFirstVal)"); 58 // FIXME: Why isn't this in the subclass gunk?? 59 // Note, we cannot call isa<CallInst> before the CallInst has been 60 // constructed. 61 if (SubclassID == Instruction::Call || SubclassID == Instruction::Invoke || 62 SubclassID == Instruction::CallBr) 63 assert((VTy->isFirstClassType() || VTy->isVoidTy() || VTy->isStructTy()) && 64 "invalid CallInst type!"); 65 else if (SubclassID != BasicBlockVal && 66 (/*SubclassID < ConstantFirstVal ||*/ SubclassID > ConstantLastVal)) 67 assert((VTy->isFirstClassType() || VTy->isVoidTy()) && 68 "Cannot create non-first-class values except for constants!"); 69 static_assert(sizeof(Value) == 2 * sizeof(void *) + 2 * sizeof(unsigned), 70 "Value too big"); 71 } 72 73 Value::~Value() { 74 // Notify all ValueHandles (if present) that this value is going away. 75 if (HasValueHandle) 76 ValueHandleBase::ValueIsDeleted(this); 77 if (isUsedByMetadata()) 78 ValueAsMetadata::handleDeletion(this); 79 80 #ifndef NDEBUG // Only in -g mode... 81 // Check to make sure that there are no uses of this value that are still 82 // around when the value is destroyed. If there are, then we have a dangling 83 // reference and something is wrong. This code is here to print out where 84 // the value is still being referenced. 85 // 86 // Note that use_empty() cannot be called here, as it eventually downcasts 87 // 'this' to GlobalValue (derived class of Value), but GlobalValue has already 88 // been destructed, so accessing it is UB. 89 // 90 if (!materialized_use_empty()) { 91 dbgs() << "While deleting: " << *VTy << " %" << getName() << "\n"; 92 for (auto *U : users()) 93 dbgs() << "Use still stuck around after Def is destroyed:" << *U << "\n"; 94 } 95 #endif 96 assert(materialized_use_empty() && "Uses remain when a value is destroyed!"); 97 98 // If this value is named, destroy the name. This should not be in a symtab 99 // at this point. 100 destroyValueName(); 101 } 102 103 void Value::deleteValue() { 104 switch (getValueID()) { 105 #define HANDLE_VALUE(Name) \ 106 case Value::Name##Val: \ 107 delete static_cast<Name *>(this); \ 108 break; 109 #define HANDLE_MEMORY_VALUE(Name) \ 110 case Value::Name##Val: \ 111 static_cast<DerivedUser *>(this)->DeleteValue( \ 112 static_cast<DerivedUser *>(this)); \ 113 break; 114 #define HANDLE_INSTRUCTION(Name) /* nothing */ 115 #include "llvm/IR/Value.def" 116 117 #define HANDLE_INST(N, OPC, CLASS) \ 118 case Value::InstructionVal + Instruction::OPC: \ 119 delete static_cast<CLASS *>(this); \ 120 break; 121 #define HANDLE_USER_INST(N, OPC, CLASS) 122 #include "llvm/IR/Instruction.def" 123 124 default: 125 llvm_unreachable("attempting to delete unknown value kind"); 126 } 127 } 128 129 void Value::destroyValueName() { 130 ValueName *Name = getValueName(); 131 if (Name) { 132 MallocAllocator Allocator; 133 Name->Destroy(Allocator); 134 } 135 setValueName(nullptr); 136 } 137 138 bool Value::hasNUses(unsigned N) const { 139 return hasNItems(use_begin(), use_end(), N); 140 } 141 142 bool Value::hasNUsesOrMore(unsigned N) const { 143 return hasNItemsOrMore(use_begin(), use_end(), N); 144 } 145 146 static bool isUnDroppableUser(const User *U) { return !U->isDroppable(); } 147 148 Use *Value::getSingleUndroppableUse() { 149 Use *Result = nullptr; 150 for (Use &U : uses()) { 151 if (!U.getUser()->isDroppable()) { 152 if (Result) 153 return nullptr; 154 Result = &U; 155 } 156 } 157 return Result; 158 } 159 160 bool Value::hasNUndroppableUses(unsigned int N) const { 161 return hasNItems(user_begin(), user_end(), N, isUnDroppableUser); 162 } 163 164 bool Value::hasNUndroppableUsesOrMore(unsigned int N) const { 165 return hasNItemsOrMore(user_begin(), user_end(), N, isUnDroppableUser); 166 } 167 168 void Value::dropDroppableUses( 169 llvm::function_ref<bool(const Use *)> ShouldDrop) { 170 SmallVector<Use *, 8> ToBeEdited; 171 for (Use &U : uses()) 172 if (U.getUser()->isDroppable() && ShouldDrop(&U)) 173 ToBeEdited.push_back(&U); 174 for (Use *U : ToBeEdited) { 175 U->removeFromList(); 176 if (auto *Assume = dyn_cast<IntrinsicInst>(U->getUser())) { 177 assert(Assume->getIntrinsicID() == Intrinsic::assume); 178 unsigned OpNo = U->getOperandNo(); 179 if (OpNo == 0) 180 Assume->setOperand(0, ConstantInt::getTrue(Assume->getContext())); 181 else { 182 Assume->setOperand(OpNo, UndefValue::get(U->get()->getType())); 183 CallInst::BundleOpInfo &BOI = Assume->getBundleOpInfoForOperand(OpNo); 184 BOI.Tag = getContext().pImpl->getOrInsertBundleTag("ignore"); 185 } 186 } else 187 llvm_unreachable("unkown droppable use"); 188 } 189 } 190 191 bool Value::isUsedInBasicBlock(const BasicBlock *BB) const { 192 // This can be computed either by scanning the instructions in BB, or by 193 // scanning the use list of this Value. Both lists can be very long, but 194 // usually one is quite short. 195 // 196 // Scan both lists simultaneously until one is exhausted. This limits the 197 // search to the shorter list. 198 BasicBlock::const_iterator BI = BB->begin(), BE = BB->end(); 199 const_user_iterator UI = user_begin(), UE = user_end(); 200 for (; BI != BE && UI != UE; ++BI, ++UI) { 201 // Scan basic block: Check if this Value is used by the instruction at BI. 202 if (is_contained(BI->operands(), this)) 203 return true; 204 // Scan use list: Check if the use at UI is in BB. 205 const auto *User = dyn_cast<Instruction>(*UI); 206 if (User && User->getParent() == BB) 207 return true; 208 } 209 return false; 210 } 211 212 unsigned Value::getNumUses() const { 213 return (unsigned)std::distance(use_begin(), use_end()); 214 } 215 216 static bool getSymTab(Value *V, ValueSymbolTable *&ST) { 217 ST = nullptr; 218 if (Instruction *I = dyn_cast<Instruction>(V)) { 219 if (BasicBlock *P = I->getParent()) 220 if (Function *PP = P->getParent()) 221 ST = PP->getValueSymbolTable(); 222 } else if (BasicBlock *BB = dyn_cast<BasicBlock>(V)) { 223 if (Function *P = BB->getParent()) 224 ST = P->getValueSymbolTable(); 225 } else if (GlobalValue *GV = dyn_cast<GlobalValue>(V)) { 226 if (Module *P = GV->getParent()) 227 ST = &P->getValueSymbolTable(); 228 } else if (Argument *A = dyn_cast<Argument>(V)) { 229 if (Function *P = A->getParent()) 230 ST = P->getValueSymbolTable(); 231 } else { 232 assert(isa<Constant>(V) && "Unknown value type!"); 233 return true; // no name is setable for this. 234 } 235 return false; 236 } 237 238 ValueName *Value::getValueName() const { 239 if (!HasName) return nullptr; 240 241 LLVMContext &Ctx = getContext(); 242 auto I = Ctx.pImpl->ValueNames.find(this); 243 assert(I != Ctx.pImpl->ValueNames.end() && 244 "No name entry found!"); 245 246 return I->second; 247 } 248 249 void Value::setValueName(ValueName *VN) { 250 LLVMContext &Ctx = getContext(); 251 252 assert(HasName == Ctx.pImpl->ValueNames.count(this) && 253 "HasName bit out of sync!"); 254 255 if (!VN) { 256 if (HasName) 257 Ctx.pImpl->ValueNames.erase(this); 258 HasName = false; 259 return; 260 } 261 262 HasName = true; 263 Ctx.pImpl->ValueNames[this] = VN; 264 } 265 266 StringRef Value::getName() const { 267 // Make sure the empty string is still a C string. For historical reasons, 268 // some clients want to call .data() on the result and expect it to be null 269 // terminated. 270 if (!hasName()) 271 return StringRef("", 0); 272 return getValueName()->getKey(); 273 } 274 275 void Value::setNameImpl(const Twine &NewName) { 276 // Fast-path: LLVMContext can be set to strip out non-GlobalValue names 277 if (getContext().shouldDiscardValueNames() && !isa<GlobalValue>(this)) 278 return; 279 280 // Fast path for common IRBuilder case of setName("") when there is no name. 281 if (NewName.isTriviallyEmpty() && !hasName()) 282 return; 283 284 SmallString<256> NameData; 285 StringRef NameRef = NewName.toStringRef(NameData); 286 assert(NameRef.find_first_of(0) == StringRef::npos && 287 "Null bytes are not allowed in names"); 288 289 // Name isn't changing? 290 if (getName() == NameRef) 291 return; 292 293 // Cap the size of non-GlobalValue names. 294 if (NameRef.size() > NonGlobalValueMaxNameSize && !isa<GlobalValue>(this)) 295 NameRef = 296 NameRef.substr(0, std::max(1u, (unsigned)NonGlobalValueMaxNameSize)); 297 298 assert(!getType()->isVoidTy() && "Cannot assign a name to void values!"); 299 300 // Get the symbol table to update for this object. 301 ValueSymbolTable *ST; 302 if (getSymTab(this, ST)) 303 return; // Cannot set a name on this value (e.g. constant). 304 305 if (!ST) { // No symbol table to update? Just do the change. 306 if (NameRef.empty()) { 307 // Free the name for this value. 308 destroyValueName(); 309 return; 310 } 311 312 // NOTE: Could optimize for the case the name is shrinking to not deallocate 313 // then reallocated. 314 destroyValueName(); 315 316 // Create the new name. 317 MallocAllocator Allocator; 318 setValueName(ValueName::Create(NameRef, Allocator)); 319 getValueName()->setValue(this); 320 return; 321 } 322 323 // NOTE: Could optimize for the case the name is shrinking to not deallocate 324 // then reallocated. 325 if (hasName()) { 326 // Remove old name. 327 ST->removeValueName(getValueName()); 328 destroyValueName(); 329 330 if (NameRef.empty()) 331 return; 332 } 333 334 // Name is changing to something new. 335 setValueName(ST->createValueName(NameRef, this)); 336 } 337 338 void Value::setName(const Twine &NewName) { 339 setNameImpl(NewName); 340 if (Function *F = dyn_cast<Function>(this)) 341 F->recalculateIntrinsicID(); 342 } 343 344 void Value::takeName(Value *V) { 345 ValueSymbolTable *ST = nullptr; 346 // If this value has a name, drop it. 347 if (hasName()) { 348 // Get the symtab this is in. 349 if (getSymTab(this, ST)) { 350 // We can't set a name on this value, but we need to clear V's name if 351 // it has one. 352 if (V->hasName()) V->setName(""); 353 return; // Cannot set a name on this value (e.g. constant). 354 } 355 356 // Remove old name. 357 if (ST) 358 ST->removeValueName(getValueName()); 359 destroyValueName(); 360 } 361 362 // Now we know that this has no name. 363 364 // If V has no name either, we're done. 365 if (!V->hasName()) return; 366 367 // Get this's symtab if we didn't before. 368 if (!ST) { 369 if (getSymTab(this, ST)) { 370 // Clear V's name. 371 V->setName(""); 372 return; // Cannot set a name on this value (e.g. constant). 373 } 374 } 375 376 // Get V's ST, this should always succed, because V has a name. 377 ValueSymbolTable *VST; 378 bool Failure = getSymTab(V, VST); 379 assert(!Failure && "V has a name, so it should have a ST!"); (void)Failure; 380 381 // If these values are both in the same symtab, we can do this very fast. 382 // This works even if both values have no symtab yet. 383 if (ST == VST) { 384 // Take the name! 385 setValueName(V->getValueName()); 386 V->setValueName(nullptr); 387 getValueName()->setValue(this); 388 return; 389 } 390 391 // Otherwise, things are slightly more complex. Remove V's name from VST and 392 // then reinsert it into ST. 393 394 if (VST) 395 VST->removeValueName(V->getValueName()); 396 setValueName(V->getValueName()); 397 V->setValueName(nullptr); 398 getValueName()->setValue(this); 399 400 if (ST) 401 ST->reinsertValue(this); 402 } 403 404 void Value::assertModuleIsMaterializedImpl() const { 405 #ifndef NDEBUG 406 const GlobalValue *GV = dyn_cast<GlobalValue>(this); 407 if (!GV) 408 return; 409 const Module *M = GV->getParent(); 410 if (!M) 411 return; 412 assert(M->isMaterialized()); 413 #endif 414 } 415 416 #ifndef NDEBUG 417 static bool contains(SmallPtrSetImpl<ConstantExpr *> &Cache, ConstantExpr *Expr, 418 Constant *C) { 419 if (!Cache.insert(Expr).second) 420 return false; 421 422 for (auto &O : Expr->operands()) { 423 if (O == C) 424 return true; 425 auto *CE = dyn_cast<ConstantExpr>(O); 426 if (!CE) 427 continue; 428 if (contains(Cache, CE, C)) 429 return true; 430 } 431 return false; 432 } 433 434 static bool contains(Value *Expr, Value *V) { 435 if (Expr == V) 436 return true; 437 438 auto *C = dyn_cast<Constant>(V); 439 if (!C) 440 return false; 441 442 auto *CE = dyn_cast<ConstantExpr>(Expr); 443 if (!CE) 444 return false; 445 446 SmallPtrSet<ConstantExpr *, 4> Cache; 447 return contains(Cache, CE, C); 448 } 449 #endif // NDEBUG 450 451 void Value::doRAUW(Value *New, ReplaceMetadataUses ReplaceMetaUses) { 452 assert(New && "Value::replaceAllUsesWith(<null>) is invalid!"); 453 assert(!contains(New, this) && 454 "this->replaceAllUsesWith(expr(this)) is NOT valid!"); 455 assert(New->getType() == getType() && 456 "replaceAllUses of value with new value of different type!"); 457 458 // Notify all ValueHandles (if present) that this value is going away. 459 if (HasValueHandle) 460 ValueHandleBase::ValueIsRAUWd(this, New); 461 if (ReplaceMetaUses == ReplaceMetadataUses::Yes && isUsedByMetadata()) 462 ValueAsMetadata::handleRAUW(this, New); 463 464 while (!materialized_use_empty()) { 465 Use &U = *UseList; 466 // Must handle Constants specially, we cannot call replaceUsesOfWith on a 467 // constant because they are uniqued. 468 if (auto *C = dyn_cast<Constant>(U.getUser())) { 469 if (!isa<GlobalValue>(C)) { 470 C->handleOperandChange(this, New); 471 continue; 472 } 473 } 474 475 U.set(New); 476 } 477 478 if (BasicBlock *BB = dyn_cast<BasicBlock>(this)) 479 BB->replaceSuccessorsPhiUsesWith(cast<BasicBlock>(New)); 480 } 481 482 void Value::replaceAllUsesWith(Value *New) { 483 doRAUW(New, ReplaceMetadataUses::Yes); 484 } 485 486 void Value::replaceNonMetadataUsesWith(Value *New) { 487 doRAUW(New, ReplaceMetadataUses::No); 488 } 489 490 // Like replaceAllUsesWith except it does not handle constants or basic blocks. 491 // This routine leaves uses within BB. 492 void Value::replaceUsesOutsideBlock(Value *New, BasicBlock *BB) { 493 assert(New && "Value::replaceUsesOutsideBlock(<null>, BB) is invalid!"); 494 assert(!contains(New, this) && 495 "this->replaceUsesOutsideBlock(expr(this), BB) is NOT valid!"); 496 assert(New->getType() == getType() && 497 "replaceUses of value with new value of different type!"); 498 assert(BB && "Basic block that may contain a use of 'New' must be defined\n"); 499 500 replaceUsesWithIf(New, [BB](Use &U) { 501 auto *I = dyn_cast<Instruction>(U.getUser()); 502 // Don't replace if it's an instruction in the BB basic block. 503 return !I || I->getParent() != BB; 504 }); 505 } 506 507 namespace { 508 // Various metrics for how much to strip off of pointers. 509 enum PointerStripKind { 510 PSK_ZeroIndices, 511 PSK_ZeroIndicesAndAliases, 512 PSK_ZeroIndicesSameRepresentation, 513 PSK_ZeroIndicesAndInvariantGroups, 514 PSK_InBoundsConstantIndices, 515 PSK_InBounds 516 }; 517 518 template <PointerStripKind StripKind> static void NoopCallback(const Value *) {} 519 520 template <PointerStripKind StripKind> 521 static const Value *stripPointerCastsAndOffsets( 522 const Value *V, 523 function_ref<void(const Value *)> Func = NoopCallback<StripKind>) { 524 if (!V->getType()->isPointerTy()) 525 return V; 526 527 // Even though we don't look through PHI nodes, we could be called on an 528 // instruction in an unreachable block, which may be on a cycle. 529 SmallPtrSet<const Value *, 4> Visited; 530 531 Visited.insert(V); 532 do { 533 Func(V); 534 if (auto *GEP = dyn_cast<GEPOperator>(V)) { 535 switch (StripKind) { 536 case PSK_ZeroIndices: 537 case PSK_ZeroIndicesAndAliases: 538 case PSK_ZeroIndicesSameRepresentation: 539 case PSK_ZeroIndicesAndInvariantGroups: 540 if (!GEP->hasAllZeroIndices()) 541 return V; 542 break; 543 case PSK_InBoundsConstantIndices: 544 if (!GEP->hasAllConstantIndices()) 545 return V; 546 LLVM_FALLTHROUGH; 547 case PSK_InBounds: 548 if (!GEP->isInBounds()) 549 return V; 550 break; 551 } 552 V = GEP->getPointerOperand(); 553 } else if (Operator::getOpcode(V) == Instruction::BitCast) { 554 V = cast<Operator>(V)->getOperand(0); 555 } else if (StripKind != PSK_ZeroIndicesSameRepresentation && 556 Operator::getOpcode(V) == Instruction::AddrSpaceCast) { 557 // TODO: If we know an address space cast will not change the 558 // representation we could look through it here as well. 559 V = cast<Operator>(V)->getOperand(0); 560 } else if (StripKind == PSK_ZeroIndicesAndAliases && isa<GlobalAlias>(V)) { 561 V = cast<GlobalAlias>(V)->getAliasee(); 562 } else { 563 if (const auto *Call = dyn_cast<CallBase>(V)) { 564 if (const Value *RV = Call->getReturnedArgOperand()) { 565 V = RV; 566 continue; 567 } 568 // The result of launder.invariant.group must alias it's argument, 569 // but it can't be marked with returned attribute, that's why it needs 570 // special case. 571 if (StripKind == PSK_ZeroIndicesAndInvariantGroups && 572 (Call->getIntrinsicID() == Intrinsic::launder_invariant_group || 573 Call->getIntrinsicID() == Intrinsic::strip_invariant_group)) { 574 V = Call->getArgOperand(0); 575 continue; 576 } 577 } 578 return V; 579 } 580 assert(V->getType()->isPointerTy() && "Unexpected operand type!"); 581 } while (Visited.insert(V).second); 582 583 return V; 584 } 585 } // end anonymous namespace 586 587 const Value *Value::stripPointerCasts() const { 588 return stripPointerCastsAndOffsets<PSK_ZeroIndices>(this); 589 } 590 591 const Value *Value::stripPointerCastsAndAliases() const { 592 return stripPointerCastsAndOffsets<PSK_ZeroIndicesAndAliases>(this); 593 } 594 595 const Value *Value::stripPointerCastsSameRepresentation() const { 596 return stripPointerCastsAndOffsets<PSK_ZeroIndicesSameRepresentation>(this); 597 } 598 599 const Value *Value::stripInBoundsConstantOffsets() const { 600 return stripPointerCastsAndOffsets<PSK_InBoundsConstantIndices>(this); 601 } 602 603 const Value *Value::stripPointerCastsAndInvariantGroups() const { 604 return stripPointerCastsAndOffsets<PSK_ZeroIndicesAndInvariantGroups>(this); 605 } 606 607 const Value *Value::stripAndAccumulateConstantOffsets( 608 const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, 609 function_ref<bool(Value &, APInt &)> ExternalAnalysis) const { 610 if (!getType()->isPtrOrPtrVectorTy()) 611 return this; 612 613 unsigned BitWidth = Offset.getBitWidth(); 614 assert(BitWidth == DL.getIndexTypeSizeInBits(getType()) && 615 "The offset bit width does not match the DL specification."); 616 617 // Even though we don't look through PHI nodes, we could be called on an 618 // instruction in an unreachable block, which may be on a cycle. 619 SmallPtrSet<const Value *, 4> Visited; 620 Visited.insert(this); 621 const Value *V = this; 622 do { 623 if (auto *GEP = dyn_cast<GEPOperator>(V)) { 624 // If in-bounds was requested, we do not strip non-in-bounds GEPs. 625 if (!AllowNonInbounds && !GEP->isInBounds()) 626 return V; 627 628 // If one of the values we have visited is an addrspacecast, then 629 // the pointer type of this GEP may be different from the type 630 // of the Ptr parameter which was passed to this function. This 631 // means when we construct GEPOffset, we need to use the size 632 // of GEP's pointer type rather than the size of the original 633 // pointer type. 634 APInt GEPOffset(DL.getIndexTypeSizeInBits(V->getType()), 0); 635 if (!GEP->accumulateConstantOffset(DL, GEPOffset, ExternalAnalysis)) 636 return V; 637 638 // Stop traversal if the pointer offset wouldn't fit in the bit-width 639 // provided by the Offset argument. This can happen due to AddrSpaceCast 640 // stripping. 641 if (GEPOffset.getMinSignedBits() > BitWidth) 642 return V; 643 644 // External Analysis can return a result higher/lower than the value 645 // represents. We need to detect overflow/underflow. 646 APInt GEPOffsetST = GEPOffset.sextOrTrunc(BitWidth); 647 if (!ExternalAnalysis) { 648 Offset += GEPOffsetST; 649 } else { 650 bool Overflow = false; 651 APInt OldOffset = Offset; 652 Offset = Offset.sadd_ov(GEPOffsetST, Overflow); 653 if (Overflow) { 654 Offset = OldOffset; 655 return V; 656 } 657 } 658 V = GEP->getPointerOperand(); 659 } else if (Operator::getOpcode(V) == Instruction::BitCast || 660 Operator::getOpcode(V) == Instruction::AddrSpaceCast) { 661 V = cast<Operator>(V)->getOperand(0); 662 } else if (auto *GA = dyn_cast<GlobalAlias>(V)) { 663 if (!GA->isInterposable()) 664 V = GA->getAliasee(); 665 } else if (const auto *Call = dyn_cast<CallBase>(V)) { 666 if (const Value *RV = Call->getReturnedArgOperand()) 667 V = RV; 668 } 669 assert(V->getType()->isPtrOrPtrVectorTy() && "Unexpected operand type!"); 670 } while (Visited.insert(V).second); 671 672 return V; 673 } 674 675 const Value * 676 Value::stripInBoundsOffsets(function_ref<void(const Value *)> Func) const { 677 return stripPointerCastsAndOffsets<PSK_InBounds>(this, Func); 678 } 679 680 uint64_t Value::getPointerDereferenceableBytes(const DataLayout &DL, 681 bool &CanBeNull) const { 682 assert(getType()->isPointerTy() && "must be pointer"); 683 684 uint64_t DerefBytes = 0; 685 CanBeNull = false; 686 if (const Argument *A = dyn_cast<Argument>(this)) { 687 DerefBytes = A->getDereferenceableBytes(); 688 if (DerefBytes == 0 && (A->hasByValAttr() || A->hasStructRetAttr())) { 689 Type *PT = cast<PointerType>(A->getType())->getElementType(); 690 if (PT->isSized()) 691 DerefBytes = DL.getTypeStoreSize(PT).getKnownMinSize(); 692 } 693 if (DerefBytes == 0) { 694 DerefBytes = A->getDereferenceableOrNullBytes(); 695 CanBeNull = true; 696 } 697 } else if (const auto *Call = dyn_cast<CallBase>(this)) { 698 DerefBytes = Call->getDereferenceableBytes(AttributeList::ReturnIndex); 699 if (DerefBytes == 0) { 700 DerefBytes = 701 Call->getDereferenceableOrNullBytes(AttributeList::ReturnIndex); 702 CanBeNull = true; 703 } 704 } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) { 705 if (MDNode *MD = LI->getMetadata(LLVMContext::MD_dereferenceable)) { 706 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); 707 DerefBytes = CI->getLimitedValue(); 708 } 709 if (DerefBytes == 0) { 710 if (MDNode *MD = 711 LI->getMetadata(LLVMContext::MD_dereferenceable_or_null)) { 712 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); 713 DerefBytes = CI->getLimitedValue(); 714 } 715 CanBeNull = true; 716 } 717 } else if (auto *IP = dyn_cast<IntToPtrInst>(this)) { 718 if (MDNode *MD = IP->getMetadata(LLVMContext::MD_dereferenceable)) { 719 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); 720 DerefBytes = CI->getLimitedValue(); 721 } 722 if (DerefBytes == 0) { 723 if (MDNode *MD = 724 IP->getMetadata(LLVMContext::MD_dereferenceable_or_null)) { 725 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); 726 DerefBytes = CI->getLimitedValue(); 727 } 728 CanBeNull = true; 729 } 730 } else if (auto *AI = dyn_cast<AllocaInst>(this)) { 731 if (!AI->isArrayAllocation()) { 732 DerefBytes = 733 DL.getTypeStoreSize(AI->getAllocatedType()).getKnownMinSize(); 734 CanBeNull = false; 735 } 736 } else if (auto *GV = dyn_cast<GlobalVariable>(this)) { 737 if (GV->getValueType()->isSized() && !GV->hasExternalWeakLinkage()) { 738 // TODO: Don't outright reject hasExternalWeakLinkage but set the 739 // CanBeNull flag. 740 DerefBytes = DL.getTypeStoreSize(GV->getValueType()).getFixedSize(); 741 CanBeNull = false; 742 } 743 } 744 return DerefBytes; 745 } 746 747 Align Value::getPointerAlignment(const DataLayout &DL) const { 748 assert(getType()->isPointerTy() && "must be pointer"); 749 if (auto *GO = dyn_cast<GlobalObject>(this)) { 750 if (isa<Function>(GO)) { 751 Align FunctionPtrAlign = DL.getFunctionPtrAlign().valueOrOne(); 752 switch (DL.getFunctionPtrAlignType()) { 753 case DataLayout::FunctionPtrAlignType::Independent: 754 return FunctionPtrAlign; 755 case DataLayout::FunctionPtrAlignType::MultipleOfFunctionAlign: 756 return std::max(FunctionPtrAlign, GO->getAlign().valueOrOne()); 757 } 758 llvm_unreachable("Unhandled FunctionPtrAlignType"); 759 } 760 const MaybeAlign Alignment(GO->getAlignment()); 761 if (!Alignment) { 762 if (auto *GVar = dyn_cast<GlobalVariable>(GO)) { 763 Type *ObjectType = GVar->getValueType(); 764 if (ObjectType->isSized()) { 765 // If the object is defined in the current Module, we'll be giving 766 // it the preferred alignment. Otherwise, we have to assume that it 767 // may only have the minimum ABI alignment. 768 if (GVar->isStrongDefinitionForLinker()) 769 return Align(DL.getPreferredAlignment(GVar)); 770 else 771 return DL.getABITypeAlign(ObjectType); 772 } 773 } 774 } 775 return Alignment.valueOrOne(); 776 } else if (const Argument *A = dyn_cast<Argument>(this)) { 777 const MaybeAlign Alignment = A->getParamAlign(); 778 if (!Alignment && A->hasStructRetAttr()) { 779 // An sret parameter has at least the ABI alignment of the return type. 780 Type *EltTy = cast<PointerType>(A->getType())->getElementType(); 781 if (EltTy->isSized()) 782 return DL.getABITypeAlign(EltTy); 783 } 784 return Alignment.valueOrOne(); 785 } else if (const AllocaInst *AI = dyn_cast<AllocaInst>(this)) { 786 return AI->getAlign(); 787 } else if (const auto *Call = dyn_cast<CallBase>(this)) { 788 MaybeAlign Alignment = Call->getRetAlign(); 789 if (!Alignment && Call->getCalledFunction()) 790 Alignment = Call->getCalledFunction()->getAttributes().getRetAlignment(); 791 return Alignment.valueOrOne(); 792 } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) { 793 if (MDNode *MD = LI->getMetadata(LLVMContext::MD_align)) { 794 ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0)); 795 return Align(CI->getLimitedValue()); 796 } 797 } else if (auto *CstPtr = dyn_cast<Constant>(this)) { 798 if (auto *CstInt = dyn_cast_or_null<ConstantInt>(ConstantExpr::getPtrToInt( 799 const_cast<Constant *>(CstPtr), DL.getIntPtrType(getType()), 800 /*OnlyIfReduced=*/true))) { 801 size_t TrailingZeros = CstInt->getValue().countTrailingZeros(); 802 // While the actual alignment may be large, elsewhere we have 803 // an arbitrary upper alignmet limit, so let's clamp to it. 804 return Align(TrailingZeros < Value::MaxAlignmentExponent 805 ? uint64_t(1) << TrailingZeros 806 : Value::MaximumAlignment); 807 } 808 } 809 return Align(1); 810 } 811 812 const Value *Value::DoPHITranslation(const BasicBlock *CurBB, 813 const BasicBlock *PredBB) const { 814 auto *PN = dyn_cast<PHINode>(this); 815 if (PN && PN->getParent() == CurBB) 816 return PN->getIncomingValueForBlock(PredBB); 817 return this; 818 } 819 820 LLVMContext &Value::getContext() const { return VTy->getContext(); } 821 822 void Value::reverseUseList() { 823 if (!UseList || !UseList->Next) 824 // No need to reverse 0 or 1 uses. 825 return; 826 827 Use *Head = UseList; 828 Use *Current = UseList->Next; 829 Head->Next = nullptr; 830 while (Current) { 831 Use *Next = Current->Next; 832 Current->Next = Head; 833 Head->Prev = &Current->Next; 834 Head = Current; 835 Current = Next; 836 } 837 UseList = Head; 838 Head->Prev = &UseList; 839 } 840 841 bool Value::isSwiftError() const { 842 auto *Arg = dyn_cast<Argument>(this); 843 if (Arg) 844 return Arg->hasSwiftErrorAttr(); 845 auto *Alloca = dyn_cast<AllocaInst>(this); 846 if (!Alloca) 847 return false; 848 return Alloca->isSwiftError(); 849 } 850 851 //===----------------------------------------------------------------------===// 852 // ValueHandleBase Class 853 //===----------------------------------------------------------------------===// 854 855 void ValueHandleBase::AddToExistingUseList(ValueHandleBase **List) { 856 assert(List && "Handle list is null?"); 857 858 // Splice ourselves into the list. 859 Next = *List; 860 *List = this; 861 setPrevPtr(List); 862 if (Next) { 863 Next->setPrevPtr(&Next); 864 assert(getValPtr() == Next->getValPtr() && "Added to wrong list?"); 865 } 866 } 867 868 void ValueHandleBase::AddToExistingUseListAfter(ValueHandleBase *List) { 869 assert(List && "Must insert after existing node"); 870 871 Next = List->Next; 872 setPrevPtr(&List->Next); 873 List->Next = this; 874 if (Next) 875 Next->setPrevPtr(&Next); 876 } 877 878 void ValueHandleBase::AddToUseList() { 879 assert(getValPtr() && "Null pointer doesn't have a use list!"); 880 881 LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl; 882 883 if (getValPtr()->HasValueHandle) { 884 // If this value already has a ValueHandle, then it must be in the 885 // ValueHandles map already. 886 ValueHandleBase *&Entry = pImpl->ValueHandles[getValPtr()]; 887 assert(Entry && "Value doesn't have any handles?"); 888 AddToExistingUseList(&Entry); 889 return; 890 } 891 892 // Ok, it doesn't have any handles yet, so we must insert it into the 893 // DenseMap. However, doing this insertion could cause the DenseMap to 894 // reallocate itself, which would invalidate all of the PrevP pointers that 895 // point into the old table. Handle this by checking for reallocation and 896 // updating the stale pointers only if needed. 897 DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles; 898 const void *OldBucketPtr = Handles.getPointerIntoBucketsArray(); 899 900 ValueHandleBase *&Entry = Handles[getValPtr()]; 901 assert(!Entry && "Value really did already have handles?"); 902 AddToExistingUseList(&Entry); 903 getValPtr()->HasValueHandle = true; 904 905 // If reallocation didn't happen or if this was the first insertion, don't 906 // walk the table. 907 if (Handles.isPointerIntoBucketsArray(OldBucketPtr) || 908 Handles.size() == 1) { 909 return; 910 } 911 912 // Okay, reallocation did happen. Fix the Prev Pointers. 913 for (DenseMap<Value*, ValueHandleBase*>::iterator I = Handles.begin(), 914 E = Handles.end(); I != E; ++I) { 915 assert(I->second && I->first == I->second->getValPtr() && 916 "List invariant broken!"); 917 I->second->setPrevPtr(&I->second); 918 } 919 } 920 921 void ValueHandleBase::RemoveFromUseList() { 922 assert(getValPtr() && getValPtr()->HasValueHandle && 923 "Pointer doesn't have a use list!"); 924 925 // Unlink this from its use list. 926 ValueHandleBase **PrevPtr = getPrevPtr(); 927 assert(*PrevPtr == this && "List invariant broken"); 928 929 *PrevPtr = Next; 930 if (Next) { 931 assert(Next->getPrevPtr() == &Next && "List invariant broken"); 932 Next->setPrevPtr(PrevPtr); 933 return; 934 } 935 936 // If the Next pointer was null, then it is possible that this was the last 937 // ValueHandle watching VP. If so, delete its entry from the ValueHandles 938 // map. 939 LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl; 940 DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles; 941 if (Handles.isPointerIntoBucketsArray(PrevPtr)) { 942 Handles.erase(getValPtr()); 943 getValPtr()->HasValueHandle = false; 944 } 945 } 946 947 void ValueHandleBase::ValueIsDeleted(Value *V) { 948 assert(V->HasValueHandle && "Should only be called if ValueHandles present"); 949 950 // Get the linked list base, which is guaranteed to exist since the 951 // HasValueHandle flag is set. 952 LLVMContextImpl *pImpl = V->getContext().pImpl; 953 ValueHandleBase *Entry = pImpl->ValueHandles[V]; 954 assert(Entry && "Value bit set but no entries exist"); 955 956 // We use a local ValueHandleBase as an iterator so that ValueHandles can add 957 // and remove themselves from the list without breaking our iteration. This 958 // is not really an AssertingVH; we just have to give ValueHandleBase a kind. 959 // Note that we deliberately do not the support the case when dropping a value 960 // handle results in a new value handle being permanently added to the list 961 // (as might occur in theory for CallbackVH's): the new value handle will not 962 // be processed and the checking code will mete out righteous punishment if 963 // the handle is still present once we have finished processing all the other 964 // value handles (it is fine to momentarily add then remove a value handle). 965 for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) { 966 Iterator.RemoveFromUseList(); 967 Iterator.AddToExistingUseListAfter(Entry); 968 assert(Entry->Next == &Iterator && "Loop invariant broken."); 969 970 switch (Entry->getKind()) { 971 case Assert: 972 break; 973 case Weak: 974 case WeakTracking: 975 // WeakTracking and Weak just go to null, which unlinks them 976 // from the list. 977 Entry->operator=(nullptr); 978 break; 979 case Callback: 980 // Forward to the subclass's implementation. 981 static_cast<CallbackVH*>(Entry)->deleted(); 982 break; 983 } 984 } 985 986 // All callbacks, weak references, and assertingVHs should be dropped by now. 987 if (V->HasValueHandle) { 988 #ifndef NDEBUG // Only in +Asserts mode... 989 dbgs() << "While deleting: " << *V->getType() << " %" << V->getName() 990 << "\n"; 991 if (pImpl->ValueHandles[V]->getKind() == Assert) 992 llvm_unreachable("An asserting value handle still pointed to this" 993 " value!"); 994 995 #endif 996 llvm_unreachable("All references to V were not removed?"); 997 } 998 } 999 1000 void ValueHandleBase::ValueIsRAUWd(Value *Old, Value *New) { 1001 assert(Old->HasValueHandle &&"Should only be called if ValueHandles present"); 1002 assert(Old != New && "Changing value into itself!"); 1003 assert(Old->getType() == New->getType() && 1004 "replaceAllUses of value with new value of different type!"); 1005 1006 // Get the linked list base, which is guaranteed to exist since the 1007 // HasValueHandle flag is set. 1008 LLVMContextImpl *pImpl = Old->getContext().pImpl; 1009 ValueHandleBase *Entry = pImpl->ValueHandles[Old]; 1010 1011 assert(Entry && "Value bit set but no entries exist"); 1012 1013 // We use a local ValueHandleBase as an iterator so that 1014 // ValueHandles can add and remove themselves from the list without 1015 // breaking our iteration. This is not really an AssertingVH; we 1016 // just have to give ValueHandleBase some kind. 1017 for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) { 1018 Iterator.RemoveFromUseList(); 1019 Iterator.AddToExistingUseListAfter(Entry); 1020 assert(Entry->Next == &Iterator && "Loop invariant broken."); 1021 1022 switch (Entry->getKind()) { 1023 case Assert: 1024 case Weak: 1025 // Asserting and Weak handles do not follow RAUW implicitly. 1026 break; 1027 case WeakTracking: 1028 // Weak goes to the new value, which will unlink it from Old's list. 1029 Entry->operator=(New); 1030 break; 1031 case Callback: 1032 // Forward to the subclass's implementation. 1033 static_cast<CallbackVH*>(Entry)->allUsesReplacedWith(New); 1034 break; 1035 } 1036 } 1037 1038 #ifndef NDEBUG 1039 // If any new weak value handles were added while processing the 1040 // list, then complain about it now. 1041 if (Old->HasValueHandle) 1042 for (Entry = pImpl->ValueHandles[Old]; Entry; Entry = Entry->Next) 1043 switch (Entry->getKind()) { 1044 case WeakTracking: 1045 dbgs() << "After RAUW from " << *Old->getType() << " %" 1046 << Old->getName() << " to " << *New->getType() << " %" 1047 << New->getName() << "\n"; 1048 llvm_unreachable( 1049 "A weak tracking value handle still pointed to the old value!\n"); 1050 default: 1051 break; 1052 } 1053 #endif 1054 } 1055 1056 // Pin the vtable to this file. 1057 void CallbackVH::anchor() {} 1058