1 //===- ValueMapper.cpp - Interface shared by lib/Transforms/Utils ---------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines the MapValue function, which is shared by various parts of 11 // the lib/Transforms/Utils library. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/Transforms/Utils/ValueMapper.h" 16 #include "llvm/ADT/ArrayRef.h" 17 #include "llvm/ADT/DenseMap.h" 18 #include "llvm/ADT/DenseSet.h" 19 #include "llvm/ADT/None.h" 20 #include "llvm/ADT/Optional.h" 21 #include "llvm/ADT/STLExtras.h" 22 #include "llvm/ADT/SmallVector.h" 23 #include "llvm/IR/Argument.h" 24 #include "llvm/IR/BasicBlock.h" 25 #include "llvm/IR/CallSite.h" 26 #include "llvm/IR/Constant.h" 27 #include "llvm/IR/Constants.h" 28 #include "llvm/IR/DebugInfoMetadata.h" 29 #include "llvm/IR/DerivedTypes.h" 30 #include "llvm/IR/Function.h" 31 #include "llvm/IR/GlobalAlias.h" 32 #include "llvm/IR/GlobalObject.h" 33 #include "llvm/IR/GlobalVariable.h" 34 #include "llvm/IR/InlineAsm.h" 35 #include "llvm/IR/Instruction.h" 36 #include "llvm/IR/Instructions.h" 37 #include "llvm/IR/Metadata.h" 38 #include "llvm/IR/Operator.h" 39 #include "llvm/IR/Type.h" 40 #include "llvm/IR/Value.h" 41 #include "llvm/Support/Casting.h" 42 #include <cassert> 43 #include <limits> 44 #include <memory> 45 #include <utility> 46 47 using namespace llvm; 48 49 // Out of line method to get vtable etc for class. 50 void ValueMapTypeRemapper::anchor() {} 51 void ValueMaterializer::anchor() {} 52 53 namespace { 54 55 /// A basic block used in a BlockAddress whose function body is not yet 56 /// materialized. 57 struct DelayedBasicBlock { 58 BasicBlock *OldBB; 59 std::unique_ptr<BasicBlock> TempBB; 60 61 DelayedBasicBlock(const BlockAddress &Old) 62 : OldBB(Old.getBasicBlock()), 63 TempBB(BasicBlock::Create(Old.getContext())) {} 64 }; 65 66 struct WorklistEntry { 67 enum EntryKind { 68 MapGlobalInit, 69 MapAppendingVar, 70 MapGlobalAliasee, 71 RemapFunction 72 }; 73 struct GVInitTy { 74 GlobalVariable *GV; 75 Constant *Init; 76 }; 77 struct AppendingGVTy { 78 GlobalVariable *GV; 79 Constant *InitPrefix; 80 }; 81 struct GlobalAliaseeTy { 82 GlobalAlias *GA; 83 Constant *Aliasee; 84 }; 85 86 unsigned Kind : 2; 87 unsigned MCID : 29; 88 unsigned AppendingGVIsOldCtorDtor : 1; 89 unsigned AppendingGVNumNewMembers; 90 union { 91 GVInitTy GVInit; 92 AppendingGVTy AppendingGV; 93 GlobalAliaseeTy GlobalAliasee; 94 Function *RemapF; 95 } Data; 96 }; 97 98 struct MappingContext { 99 ValueToValueMapTy *VM; 100 ValueMaterializer *Materializer = nullptr; 101 102 /// Construct a MappingContext with a value map and materializer. 103 explicit MappingContext(ValueToValueMapTy &VM, 104 ValueMaterializer *Materializer = nullptr) 105 : VM(&VM), Materializer(Materializer) {} 106 }; 107 108 class Mapper { 109 friend class MDNodeMapper; 110 111 #ifndef NDEBUG 112 DenseSet<GlobalValue *> AlreadyScheduled; 113 #endif 114 115 RemapFlags Flags; 116 ValueMapTypeRemapper *TypeMapper; 117 unsigned CurrentMCID = 0; 118 SmallVector<MappingContext, 2> MCs; 119 SmallVector<WorklistEntry, 4> Worklist; 120 SmallVector<DelayedBasicBlock, 1> DelayedBBs; 121 SmallVector<Constant *, 16> AppendingInits; 122 123 public: 124 Mapper(ValueToValueMapTy &VM, RemapFlags Flags, 125 ValueMapTypeRemapper *TypeMapper, ValueMaterializer *Materializer) 126 : Flags(Flags), TypeMapper(TypeMapper), 127 MCs(1, MappingContext(VM, Materializer)) {} 128 129 /// ValueMapper should explicitly call \a flush() before destruction. 130 ~Mapper() { assert(!hasWorkToDo() && "Expected to be flushed"); } 131 132 bool hasWorkToDo() const { return !Worklist.empty(); } 133 134 unsigned 135 registerAlternateMappingContext(ValueToValueMapTy &VM, 136 ValueMaterializer *Materializer = nullptr) { 137 MCs.push_back(MappingContext(VM, Materializer)); 138 return MCs.size() - 1; 139 } 140 141 void addFlags(RemapFlags Flags); 142 143 void remapGlobalObjectMetadata(GlobalObject &GO); 144 145 Value *mapValue(const Value *V); 146 void remapInstruction(Instruction *I); 147 void remapFunction(Function &F); 148 149 Constant *mapConstant(const Constant *C) { 150 return cast_or_null<Constant>(mapValue(C)); 151 } 152 153 /// Map metadata. 154 /// 155 /// Find the mapping for MD. Guarantees that the return will be resolved 156 /// (not an MDNode, or MDNode::isResolved() returns true). 157 Metadata *mapMetadata(const Metadata *MD); 158 159 void scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init, 160 unsigned MCID); 161 void scheduleMapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 162 bool IsOldCtorDtor, 163 ArrayRef<Constant *> NewMembers, 164 unsigned MCID); 165 void scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 166 unsigned MCID); 167 void scheduleRemapFunction(Function &F, unsigned MCID); 168 169 void flush(); 170 171 private: 172 void mapGlobalInitializer(GlobalVariable &GV, Constant &Init); 173 void mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 174 bool IsOldCtorDtor, 175 ArrayRef<Constant *> NewMembers); 176 void mapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee); 177 void remapFunction(Function &F, ValueToValueMapTy &VM); 178 179 ValueToValueMapTy &getVM() { return *MCs[CurrentMCID].VM; } 180 ValueMaterializer *getMaterializer() { return MCs[CurrentMCID].Materializer; } 181 182 Value *mapBlockAddress(const BlockAddress &BA); 183 184 /// Map metadata that doesn't require visiting operands. 185 Optional<Metadata *> mapSimpleMetadata(const Metadata *MD); 186 187 Metadata *mapToMetadata(const Metadata *Key, Metadata *Val); 188 Metadata *mapToSelf(const Metadata *MD); 189 }; 190 191 class MDNodeMapper { 192 Mapper &M; 193 194 /// Data about a node in \a UniquedGraph. 195 struct Data { 196 bool HasChanged = false; 197 unsigned ID = std::numeric_limits<unsigned>::max(); 198 TempMDNode Placeholder; 199 }; 200 201 /// A graph of uniqued nodes. 202 struct UniquedGraph { 203 SmallDenseMap<const Metadata *, Data, 32> Info; // Node properties. 204 SmallVector<MDNode *, 16> POT; // Post-order traversal. 205 206 /// Propagate changed operands through the post-order traversal. 207 /// 208 /// Iteratively update \a Data::HasChanged for each node based on \a 209 /// Data::HasChanged of its operands, until fixed point. 210 void propagateChanges(); 211 212 /// Get a forward reference to a node to use as an operand. 213 Metadata &getFwdReference(MDNode &Op); 214 }; 215 216 /// Worklist of distinct nodes whose operands need to be remapped. 217 SmallVector<MDNode *, 16> DistinctWorklist; 218 219 // Storage for a UniquedGraph. 220 SmallDenseMap<const Metadata *, Data, 32> InfoStorage; 221 SmallVector<MDNode *, 16> POTStorage; 222 223 public: 224 MDNodeMapper(Mapper &M) : M(M) {} 225 226 /// Map a metadata node (and its transitive operands). 227 /// 228 /// Map all the (unmapped) nodes in the subgraph under \c N. The iterative 229 /// algorithm handles distinct nodes and uniqued node subgraphs using 230 /// different strategies. 231 /// 232 /// Distinct nodes are immediately mapped and added to \a DistinctWorklist 233 /// using \a mapDistinctNode(). Their mapping can always be computed 234 /// immediately without visiting operands, even if their operands change. 235 /// 236 /// The mapping for uniqued nodes depends on whether their operands change. 237 /// \a mapTopLevelUniquedNode() traverses the transitive uniqued subgraph of 238 /// a node to calculate uniqued node mappings in bulk. Distinct leafs are 239 /// added to \a DistinctWorklist with \a mapDistinctNode(). 240 /// 241 /// After mapping \c N itself, this function remaps the operands of the 242 /// distinct nodes in \a DistinctWorklist until the entire subgraph under \c 243 /// N has been mapped. 244 Metadata *map(const MDNode &N); 245 246 private: 247 /// Map a top-level uniqued node and the uniqued subgraph underneath it. 248 /// 249 /// This builds up a post-order traversal of the (unmapped) uniqued subgraph 250 /// underneath \c FirstN and calculates the nodes' mapping. Each node uses 251 /// the identity mapping (\a Mapper::mapToSelf()) as long as all of its 252 /// operands uses the identity mapping. 253 /// 254 /// The algorithm works as follows: 255 /// 256 /// 1. \a createPOT(): traverse the uniqued subgraph under \c FirstN and 257 /// save the post-order traversal in the given \a UniquedGraph, tracking 258 /// nodes' operands change. 259 /// 260 /// 2. \a UniquedGraph::propagateChanges(): propagate changed operands 261 /// through the \a UniquedGraph until fixed point, following the rule 262 /// that if a node changes, any node that references must also change. 263 /// 264 /// 3. \a mapNodesInPOT(): map the uniqued nodes, creating new uniqued nodes 265 /// (referencing new operands) where necessary. 266 Metadata *mapTopLevelUniquedNode(const MDNode &FirstN); 267 268 /// Try to map the operand of an \a MDNode. 269 /// 270 /// If \c Op is already mapped, return the mapping. If it's not an \a 271 /// MDNode, compute and return the mapping. If it's a distinct \a MDNode, 272 /// return the result of \a mapDistinctNode(). 273 /// 274 /// \return None if \c Op is an unmapped uniqued \a MDNode. 275 /// \post getMappedOp(Op) only returns None if this returns None. 276 Optional<Metadata *> tryToMapOperand(const Metadata *Op); 277 278 /// Map a distinct node. 279 /// 280 /// Return the mapping for the distinct node \c N, saving the result in \a 281 /// DistinctWorklist for later remapping. 282 /// 283 /// \pre \c N is not yet mapped. 284 /// \pre \c N.isDistinct(). 285 MDNode *mapDistinctNode(const MDNode &N); 286 287 /// Get a previously mapped node. 288 Optional<Metadata *> getMappedOp(const Metadata *Op) const; 289 290 /// Create a post-order traversal of an unmapped uniqued node subgraph. 291 /// 292 /// This traverses the metadata graph deeply enough to map \c FirstN. It 293 /// uses \a tryToMapOperand() (via \a Mapper::mapSimplifiedNode()), so any 294 /// metadata that has already been mapped will not be part of the POT. 295 /// 296 /// Each node that has a changed operand from outside the graph (e.g., a 297 /// distinct node, an already-mapped uniqued node, or \a ConstantAsMetadata) 298 /// is marked with \a Data::HasChanged. 299 /// 300 /// \return \c true if any nodes in \c G have \a Data::HasChanged. 301 /// \post \c G.POT is a post-order traversal ending with \c FirstN. 302 /// \post \a Data::hasChanged in \c G.Info indicates whether any node needs 303 /// to change because of operands outside the graph. 304 bool createPOT(UniquedGraph &G, const MDNode &FirstN); 305 306 /// Visit the operands of a uniqued node in the POT. 307 /// 308 /// Visit the operands in the range from \c I to \c E, returning the first 309 /// uniqued node we find that isn't yet in \c G. \c I is always advanced to 310 /// where to continue the loop through the operands. 311 /// 312 /// This sets \c HasChanged if any of the visited operands change. 313 MDNode *visitOperands(UniquedGraph &G, MDNode::op_iterator &I, 314 MDNode::op_iterator E, bool &HasChanged); 315 316 /// Map all the nodes in the given uniqued graph. 317 /// 318 /// This visits all the nodes in \c G in post-order, using the identity 319 /// mapping or creating a new node depending on \a Data::HasChanged. 320 /// 321 /// \pre \a getMappedOp() returns None for nodes in \c G, but not for any of 322 /// their operands outside of \c G. 323 /// \pre \a Data::HasChanged is true for a node in \c G iff any of its 324 /// operands have changed. 325 /// \post \a getMappedOp() returns the mapped node for every node in \c G. 326 void mapNodesInPOT(UniquedGraph &G); 327 328 /// Remap a node's operands using the given functor. 329 /// 330 /// Iterate through the operands of \c N and update them in place using \c 331 /// mapOperand. 332 /// 333 /// \pre N.isDistinct() or N.isTemporary(). 334 template <class OperandMapper> 335 void remapOperands(MDNode &N, OperandMapper mapOperand); 336 }; 337 338 } // end anonymous namespace 339 340 Value *Mapper::mapValue(const Value *V) { 341 ValueToValueMapTy::iterator I = getVM().find(V); 342 343 // If the value already exists in the map, use it. 344 if (I != getVM().end()) { 345 assert(I->second && "Unexpected null mapping"); 346 return I->second; 347 } 348 349 // If we have a materializer and it can materialize a value, use that. 350 if (auto *Materializer = getMaterializer()) { 351 if (Value *NewV = Materializer->materialize(const_cast<Value *>(V))) { 352 getVM()[V] = NewV; 353 return NewV; 354 } 355 } 356 357 // Global values do not need to be seeded into the VM if they 358 // are using the identity mapping. 359 if (isa<GlobalValue>(V)) { 360 if (Flags & RF_NullMapMissingGlobalValues) 361 return nullptr; 362 return getVM()[V] = const_cast<Value *>(V); 363 } 364 365 if (const InlineAsm *IA = dyn_cast<InlineAsm>(V)) { 366 // Inline asm may need *type* remapping. 367 FunctionType *NewTy = IA->getFunctionType(); 368 if (TypeMapper) { 369 NewTy = cast<FunctionType>(TypeMapper->remapType(NewTy)); 370 371 if (NewTy != IA->getFunctionType()) 372 V = InlineAsm::get(NewTy, IA->getAsmString(), IA->getConstraintString(), 373 IA->hasSideEffects(), IA->isAlignStack()); 374 } 375 376 return getVM()[V] = const_cast<Value *>(V); 377 } 378 379 if (const auto *MDV = dyn_cast<MetadataAsValue>(V)) { 380 const Metadata *MD = MDV->getMetadata(); 381 382 if (auto *LAM = dyn_cast<LocalAsMetadata>(MD)) { 383 // Look through to grab the local value. 384 if (Value *LV = mapValue(LAM->getValue())) { 385 if (V == LAM->getValue()) 386 return const_cast<Value *>(V); 387 return MetadataAsValue::get(V->getContext(), ValueAsMetadata::get(LV)); 388 } 389 390 // FIXME: always return nullptr once Verifier::verifyDominatesUse() 391 // ensures metadata operands only reference defined SSA values. 392 return (Flags & RF_IgnoreMissingLocals) 393 ? nullptr 394 : MetadataAsValue::get(V->getContext(), 395 MDTuple::get(V->getContext(), None)); 396 } 397 398 // If this is a module-level metadata and we know that nothing at the module 399 // level is changing, then use an identity mapping. 400 if (Flags & RF_NoModuleLevelChanges) 401 return getVM()[V] = const_cast<Value *>(V); 402 403 // Map the metadata and turn it into a value. 404 auto *MappedMD = mapMetadata(MD); 405 if (MD == MappedMD) 406 return getVM()[V] = const_cast<Value *>(V); 407 return getVM()[V] = MetadataAsValue::get(V->getContext(), MappedMD); 408 } 409 410 // Okay, this either must be a constant (which may or may not be mappable) or 411 // is something that is not in the mapping table. 412 Constant *C = const_cast<Constant*>(dyn_cast<Constant>(V)); 413 if (!C) 414 return nullptr; 415 416 if (BlockAddress *BA = dyn_cast<BlockAddress>(C)) 417 return mapBlockAddress(*BA); 418 419 auto mapValueOrNull = [this](Value *V) { 420 auto Mapped = mapValue(V); 421 assert((Mapped || (Flags & RF_NullMapMissingGlobalValues)) && 422 "Unexpected null mapping for constant operand without " 423 "NullMapMissingGlobalValues flag"); 424 return Mapped; 425 }; 426 427 // Otherwise, we have some other constant to remap. Start by checking to see 428 // if all operands have an identity remapping. 429 unsigned OpNo = 0, NumOperands = C->getNumOperands(); 430 Value *Mapped = nullptr; 431 for (; OpNo != NumOperands; ++OpNo) { 432 Value *Op = C->getOperand(OpNo); 433 Mapped = mapValueOrNull(Op); 434 if (!Mapped) 435 return nullptr; 436 if (Mapped != Op) 437 break; 438 } 439 440 // See if the type mapper wants to remap the type as well. 441 Type *NewTy = C->getType(); 442 if (TypeMapper) 443 NewTy = TypeMapper->remapType(NewTy); 444 445 // If the result type and all operands match up, then just insert an identity 446 // mapping. 447 if (OpNo == NumOperands && NewTy == C->getType()) 448 return getVM()[V] = C; 449 450 // Okay, we need to create a new constant. We've already processed some or 451 // all of the operands, set them all up now. 452 SmallVector<Constant*, 8> Ops; 453 Ops.reserve(NumOperands); 454 for (unsigned j = 0; j != OpNo; ++j) 455 Ops.push_back(cast<Constant>(C->getOperand(j))); 456 457 // If one of the operands mismatch, push it and the other mapped operands. 458 if (OpNo != NumOperands) { 459 Ops.push_back(cast<Constant>(Mapped)); 460 461 // Map the rest of the operands that aren't processed yet. 462 for (++OpNo; OpNo != NumOperands; ++OpNo) { 463 Mapped = mapValueOrNull(C->getOperand(OpNo)); 464 if (!Mapped) 465 return nullptr; 466 Ops.push_back(cast<Constant>(Mapped)); 467 } 468 } 469 Type *NewSrcTy = nullptr; 470 if (TypeMapper) 471 if (auto *GEPO = dyn_cast<GEPOperator>(C)) 472 NewSrcTy = TypeMapper->remapType(GEPO->getSourceElementType()); 473 474 if (ConstantExpr *CE = dyn_cast<ConstantExpr>(C)) 475 return getVM()[V] = CE->getWithOperands(Ops, NewTy, false, NewSrcTy); 476 if (isa<ConstantArray>(C)) 477 return getVM()[V] = ConstantArray::get(cast<ArrayType>(NewTy), Ops); 478 if (isa<ConstantStruct>(C)) 479 return getVM()[V] = ConstantStruct::get(cast<StructType>(NewTy), Ops); 480 if (isa<ConstantVector>(C)) 481 return getVM()[V] = ConstantVector::get(Ops); 482 // If this is a no-operand constant, it must be because the type was remapped. 483 if (isa<UndefValue>(C)) 484 return getVM()[V] = UndefValue::get(NewTy); 485 if (isa<ConstantAggregateZero>(C)) 486 return getVM()[V] = ConstantAggregateZero::get(NewTy); 487 assert(isa<ConstantPointerNull>(C)); 488 return getVM()[V] = ConstantPointerNull::get(cast<PointerType>(NewTy)); 489 } 490 491 Value *Mapper::mapBlockAddress(const BlockAddress &BA) { 492 Function *F = cast<Function>(mapValue(BA.getFunction())); 493 494 // F may not have materialized its initializer. In that case, create a 495 // dummy basic block for now, and replace it once we've materialized all 496 // the initializers. 497 BasicBlock *BB; 498 if (F->empty()) { 499 DelayedBBs.push_back(DelayedBasicBlock(BA)); 500 BB = DelayedBBs.back().TempBB.get(); 501 } else { 502 BB = cast_or_null<BasicBlock>(mapValue(BA.getBasicBlock())); 503 } 504 505 return getVM()[&BA] = BlockAddress::get(F, BB ? BB : BA.getBasicBlock()); 506 } 507 508 Metadata *Mapper::mapToMetadata(const Metadata *Key, Metadata *Val) { 509 getVM().MD()[Key].reset(Val); 510 return Val; 511 } 512 513 Metadata *Mapper::mapToSelf(const Metadata *MD) { 514 return mapToMetadata(MD, const_cast<Metadata *>(MD)); 515 } 516 517 Optional<Metadata *> MDNodeMapper::tryToMapOperand(const Metadata *Op) { 518 if (!Op) 519 return nullptr; 520 521 if (Optional<Metadata *> MappedOp = M.mapSimpleMetadata(Op)) { 522 #ifndef NDEBUG 523 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op)) 524 assert((!*MappedOp || M.getVM().count(CMD->getValue()) || 525 M.getVM().getMappedMD(Op)) && 526 "Expected Value to be memoized"); 527 else 528 assert((isa<MDString>(Op) || M.getVM().getMappedMD(Op)) && 529 "Expected result to be memoized"); 530 #endif 531 return *MappedOp; 532 } 533 534 const MDNode &N = *cast<MDNode>(Op); 535 if (N.isDistinct()) 536 return mapDistinctNode(N); 537 return None; 538 } 539 540 static Metadata *cloneOrBuildODR(const MDNode &N) { 541 auto *CT = dyn_cast<DICompositeType>(&N); 542 // If ODR type uniquing is enabled, we would have uniqued composite types 543 // with identifiers during bitcode reading, so we can just use CT. 544 if (CT && CT->getContext().isODRUniquingDebugTypes() && 545 CT->getIdentifier() != "") 546 return const_cast<DICompositeType *>(CT); 547 return MDNode::replaceWithDistinct(N.clone()); 548 } 549 550 MDNode *MDNodeMapper::mapDistinctNode(const MDNode &N) { 551 assert(N.isDistinct() && "Expected a distinct node"); 552 assert(!M.getVM().getMappedMD(&N) && "Expected an unmapped node"); 553 DistinctWorklist.push_back( 554 cast<MDNode>((M.Flags & RF_MoveDistinctMDs) 555 ? M.mapToSelf(&N) 556 : M.mapToMetadata(&N, cloneOrBuildODR(N)))); 557 return DistinctWorklist.back(); 558 } 559 560 static ConstantAsMetadata *wrapConstantAsMetadata(const ConstantAsMetadata &CMD, 561 Value *MappedV) { 562 if (CMD.getValue() == MappedV) 563 return const_cast<ConstantAsMetadata *>(&CMD); 564 return MappedV ? ConstantAsMetadata::getConstant(MappedV) : nullptr; 565 } 566 567 Optional<Metadata *> MDNodeMapper::getMappedOp(const Metadata *Op) const { 568 if (!Op) 569 return nullptr; 570 571 if (Optional<Metadata *> MappedOp = M.getVM().getMappedMD(Op)) 572 return *MappedOp; 573 574 if (isa<MDString>(Op)) 575 return const_cast<Metadata *>(Op); 576 577 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op)) 578 return wrapConstantAsMetadata(*CMD, M.getVM().lookup(CMD->getValue())); 579 580 return None; 581 } 582 583 Metadata &MDNodeMapper::UniquedGraph::getFwdReference(MDNode &Op) { 584 auto Where = Info.find(&Op); 585 assert(Where != Info.end() && "Expected a valid reference"); 586 587 auto &OpD = Where->second; 588 if (!OpD.HasChanged) 589 return Op; 590 591 // Lazily construct a temporary node. 592 if (!OpD.Placeholder) 593 OpD.Placeholder = Op.clone(); 594 595 return *OpD.Placeholder; 596 } 597 598 template <class OperandMapper> 599 void MDNodeMapper::remapOperands(MDNode &N, OperandMapper mapOperand) { 600 assert(!N.isUniqued() && "Expected distinct or temporary nodes"); 601 for (unsigned I = 0, E = N.getNumOperands(); I != E; ++I) { 602 Metadata *Old = N.getOperand(I); 603 Metadata *New = mapOperand(Old); 604 605 if (Old != New) 606 N.replaceOperandWith(I, New); 607 } 608 } 609 610 namespace { 611 612 /// An entry in the worklist for the post-order traversal. 613 struct POTWorklistEntry { 614 MDNode *N; ///< Current node. 615 MDNode::op_iterator Op; ///< Current operand of \c N. 616 617 /// Keep a flag of whether operands have changed in the worklist to avoid 618 /// hitting the map in \a UniquedGraph. 619 bool HasChanged = false; 620 621 POTWorklistEntry(MDNode &N) : N(&N), Op(N.op_begin()) {} 622 }; 623 624 } // end anonymous namespace 625 626 bool MDNodeMapper::createPOT(UniquedGraph &G, const MDNode &FirstN) { 627 assert(G.Info.empty() && "Expected a fresh traversal"); 628 assert(FirstN.isUniqued() && "Expected uniqued node in POT"); 629 630 // Construct a post-order traversal of the uniqued subgraph under FirstN. 631 bool AnyChanges = false; 632 SmallVector<POTWorklistEntry, 16> Worklist; 633 Worklist.push_back(POTWorklistEntry(const_cast<MDNode &>(FirstN))); 634 (void)G.Info[&FirstN]; 635 while (!Worklist.empty()) { 636 // Start or continue the traversal through the this node's operands. 637 auto &WE = Worklist.back(); 638 if (MDNode *N = visitOperands(G, WE.Op, WE.N->op_end(), WE.HasChanged)) { 639 // Push a new node to traverse first. 640 Worklist.push_back(POTWorklistEntry(*N)); 641 continue; 642 } 643 644 // Push the node onto the POT. 645 assert(WE.N->isUniqued() && "Expected only uniqued nodes"); 646 assert(WE.Op == WE.N->op_end() && "Expected to visit all operands"); 647 auto &D = G.Info[WE.N]; 648 AnyChanges |= D.HasChanged = WE.HasChanged; 649 D.ID = G.POT.size(); 650 G.POT.push_back(WE.N); 651 652 // Pop the node off the worklist. 653 Worklist.pop_back(); 654 } 655 return AnyChanges; 656 } 657 658 MDNode *MDNodeMapper::visitOperands(UniquedGraph &G, MDNode::op_iterator &I, 659 MDNode::op_iterator E, bool &HasChanged) { 660 while (I != E) { 661 Metadata *Op = *I++; // Increment even on early return. 662 if (Optional<Metadata *> MappedOp = tryToMapOperand(Op)) { 663 // Check if the operand changes. 664 HasChanged |= Op != *MappedOp; 665 continue; 666 } 667 668 // A uniqued metadata node. 669 MDNode &OpN = *cast<MDNode>(Op); 670 assert(OpN.isUniqued() && 671 "Only uniqued operands cannot be mapped immediately"); 672 if (G.Info.insert(std::make_pair(&OpN, Data())).second) 673 return &OpN; // This is a new one. Return it. 674 } 675 return nullptr; 676 } 677 678 void MDNodeMapper::UniquedGraph::propagateChanges() { 679 bool AnyChanges; 680 do { 681 AnyChanges = false; 682 for (MDNode *N : POT) { 683 auto &D = Info[N]; 684 if (D.HasChanged) 685 continue; 686 687 if (llvm::none_of(N->operands(), [&](const Metadata *Op) { 688 auto Where = Info.find(Op); 689 return Where != Info.end() && Where->second.HasChanged; 690 })) 691 continue; 692 693 AnyChanges = D.HasChanged = true; 694 } 695 } while (AnyChanges); 696 } 697 698 void MDNodeMapper::mapNodesInPOT(UniquedGraph &G) { 699 // Construct uniqued nodes, building forward references as necessary. 700 SmallVector<MDNode *, 16> CyclicNodes; 701 for (auto *N : G.POT) { 702 auto &D = G.Info[N]; 703 if (!D.HasChanged) { 704 // The node hasn't changed. 705 M.mapToSelf(N); 706 continue; 707 } 708 709 // Remember whether this node had a placeholder. 710 bool HadPlaceholder(D.Placeholder); 711 712 // Clone the uniqued node and remap the operands. 713 TempMDNode ClonedN = D.Placeholder ? std::move(D.Placeholder) : N->clone(); 714 remapOperands(*ClonedN, [this, &D, &G](Metadata *Old) { 715 if (Optional<Metadata *> MappedOp = getMappedOp(Old)) 716 return *MappedOp; 717 (void)D; 718 assert(G.Info[Old].ID > D.ID && "Expected a forward reference"); 719 return &G.getFwdReference(*cast<MDNode>(Old)); 720 }); 721 722 auto *NewN = MDNode::replaceWithUniqued(std::move(ClonedN)); 723 M.mapToMetadata(N, NewN); 724 725 // Nodes that were referenced out of order in the POT are involved in a 726 // uniquing cycle. 727 if (HadPlaceholder) 728 CyclicNodes.push_back(NewN); 729 } 730 731 // Resolve cycles. 732 for (auto *N : CyclicNodes) 733 if (!N->isResolved()) 734 N->resolveCycles(); 735 } 736 737 Metadata *MDNodeMapper::map(const MDNode &N) { 738 assert(DistinctWorklist.empty() && "MDNodeMapper::map is not recursive"); 739 assert(!(M.Flags & RF_NoModuleLevelChanges) && 740 "MDNodeMapper::map assumes module-level changes"); 741 742 // Require resolved nodes whenever metadata might be remapped. 743 assert(N.isResolved() && "Unexpected unresolved node"); 744 745 Metadata *MappedN = 746 N.isUniqued() ? mapTopLevelUniquedNode(N) : mapDistinctNode(N); 747 while (!DistinctWorklist.empty()) 748 remapOperands(*DistinctWorklist.pop_back_val(), [this](Metadata *Old) { 749 if (Optional<Metadata *> MappedOp = tryToMapOperand(Old)) 750 return *MappedOp; 751 return mapTopLevelUniquedNode(*cast<MDNode>(Old)); 752 }); 753 return MappedN; 754 } 755 756 Metadata *MDNodeMapper::mapTopLevelUniquedNode(const MDNode &FirstN) { 757 assert(FirstN.isUniqued() && "Expected uniqued node"); 758 759 // Create a post-order traversal of uniqued nodes under FirstN. 760 UniquedGraph G; 761 if (!createPOT(G, FirstN)) { 762 // Return early if no nodes have changed. 763 for (const MDNode *N : G.POT) 764 M.mapToSelf(N); 765 return &const_cast<MDNode &>(FirstN); 766 } 767 768 // Update graph with all nodes that have changed. 769 G.propagateChanges(); 770 771 // Map all the nodes in the graph. 772 mapNodesInPOT(G); 773 774 // Return the original node, remapped. 775 return *getMappedOp(&FirstN); 776 } 777 778 namespace { 779 780 struct MapMetadataDisabler { 781 ValueToValueMapTy &VM; 782 783 MapMetadataDisabler(ValueToValueMapTy &VM) : VM(VM) { 784 VM.disableMapMetadata(); 785 } 786 787 ~MapMetadataDisabler() { VM.enableMapMetadata(); } 788 }; 789 790 } // end anonymous namespace 791 792 Optional<Metadata *> Mapper::mapSimpleMetadata(const Metadata *MD) { 793 // If the value already exists in the map, use it. 794 if (Optional<Metadata *> NewMD = getVM().getMappedMD(MD)) 795 return *NewMD; 796 797 if (isa<MDString>(MD)) 798 return const_cast<Metadata *>(MD); 799 800 // This is a module-level metadata. If nothing at the module level is 801 // changing, use an identity mapping. 802 if ((Flags & RF_NoModuleLevelChanges)) 803 return const_cast<Metadata *>(MD); 804 805 if (auto *CMD = dyn_cast<ConstantAsMetadata>(MD)) { 806 // Disallow recursion into metadata mapping through mapValue. 807 MapMetadataDisabler MMD(getVM()); 808 809 // Don't memoize ConstantAsMetadata. Instead of lasting until the 810 // LLVMContext is destroyed, they can be deleted when the GlobalValue they 811 // reference is destructed. These aren't super common, so the extra 812 // indirection isn't that expensive. 813 return wrapConstantAsMetadata(*CMD, mapValue(CMD->getValue())); 814 } 815 816 assert(isa<MDNode>(MD) && "Expected a metadata node"); 817 818 return None; 819 } 820 821 Metadata *Mapper::mapMetadata(const Metadata *MD) { 822 assert(MD && "Expected valid metadata"); 823 assert(!isa<LocalAsMetadata>(MD) && "Unexpected local metadata"); 824 825 if (Optional<Metadata *> NewMD = mapSimpleMetadata(MD)) 826 return *NewMD; 827 828 return MDNodeMapper(*this).map(*cast<MDNode>(MD)); 829 } 830 831 void Mapper::flush() { 832 // Flush out the worklist of global values. 833 while (!Worklist.empty()) { 834 WorklistEntry E = Worklist.pop_back_val(); 835 CurrentMCID = E.MCID; 836 switch (E.Kind) { 837 case WorklistEntry::MapGlobalInit: 838 E.Data.GVInit.GV->setInitializer(mapConstant(E.Data.GVInit.Init)); 839 remapGlobalObjectMetadata(*E.Data.GVInit.GV); 840 break; 841 case WorklistEntry::MapAppendingVar: { 842 unsigned PrefixSize = AppendingInits.size() - E.AppendingGVNumNewMembers; 843 mapAppendingVariable(*E.Data.AppendingGV.GV, 844 E.Data.AppendingGV.InitPrefix, 845 E.AppendingGVIsOldCtorDtor, 846 makeArrayRef(AppendingInits).slice(PrefixSize)); 847 AppendingInits.resize(PrefixSize); 848 break; 849 } 850 case WorklistEntry::MapGlobalAliasee: 851 E.Data.GlobalAliasee.GA->setAliasee( 852 mapConstant(E.Data.GlobalAliasee.Aliasee)); 853 break; 854 case WorklistEntry::RemapFunction: 855 remapFunction(*E.Data.RemapF); 856 break; 857 } 858 } 859 CurrentMCID = 0; 860 861 // Finish logic for block addresses now that all global values have been 862 // handled. 863 while (!DelayedBBs.empty()) { 864 DelayedBasicBlock DBB = DelayedBBs.pop_back_val(); 865 BasicBlock *BB = cast_or_null<BasicBlock>(mapValue(DBB.OldBB)); 866 DBB.TempBB->replaceAllUsesWith(BB ? BB : DBB.OldBB); 867 } 868 } 869 870 void Mapper::remapInstruction(Instruction *I) { 871 // Remap operands. 872 for (Use &Op : I->operands()) { 873 Value *V = mapValue(Op); 874 // If we aren't ignoring missing entries, assert that something happened. 875 if (V) 876 Op = V; 877 else 878 assert((Flags & RF_IgnoreMissingLocals) && 879 "Referenced value not in value map!"); 880 } 881 882 // Remap phi nodes' incoming blocks. 883 if (PHINode *PN = dyn_cast<PHINode>(I)) { 884 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) { 885 Value *V = mapValue(PN->getIncomingBlock(i)); 886 // If we aren't ignoring missing entries, assert that something happened. 887 if (V) 888 PN->setIncomingBlock(i, cast<BasicBlock>(V)); 889 else 890 assert((Flags & RF_IgnoreMissingLocals) && 891 "Referenced block not in value map!"); 892 } 893 } 894 895 // Remap attached metadata. 896 SmallVector<std::pair<unsigned, MDNode *>, 4> MDs; 897 I->getAllMetadata(MDs); 898 for (const auto &MI : MDs) { 899 MDNode *Old = MI.second; 900 MDNode *New = cast_or_null<MDNode>(mapMetadata(Old)); 901 if (New != Old) 902 I->setMetadata(MI.first, New); 903 } 904 905 if (!TypeMapper) 906 return; 907 908 // If the instruction's type is being remapped, do so now. 909 if (auto CS = CallSite(I)) { 910 SmallVector<Type *, 3> Tys; 911 FunctionType *FTy = CS.getFunctionType(); 912 Tys.reserve(FTy->getNumParams()); 913 for (Type *Ty : FTy->params()) 914 Tys.push_back(TypeMapper->remapType(Ty)); 915 CS.mutateFunctionType(FunctionType::get( 916 TypeMapper->remapType(I->getType()), Tys, FTy->isVarArg())); 917 return; 918 } 919 if (auto *AI = dyn_cast<AllocaInst>(I)) 920 AI->setAllocatedType(TypeMapper->remapType(AI->getAllocatedType())); 921 if (auto *GEP = dyn_cast<GetElementPtrInst>(I)) { 922 GEP->setSourceElementType( 923 TypeMapper->remapType(GEP->getSourceElementType())); 924 GEP->setResultElementType( 925 TypeMapper->remapType(GEP->getResultElementType())); 926 } 927 I->mutateType(TypeMapper->remapType(I->getType())); 928 } 929 930 void Mapper::remapGlobalObjectMetadata(GlobalObject &GO) { 931 SmallVector<std::pair<unsigned, MDNode *>, 8> MDs; 932 GO.getAllMetadata(MDs); 933 GO.clearMetadata(); 934 for (const auto &I : MDs) 935 GO.addMetadata(I.first, *cast<MDNode>(mapMetadata(I.second))); 936 } 937 938 void Mapper::remapFunction(Function &F) { 939 // Remap the operands. 940 for (Use &Op : F.operands()) 941 if (Op) 942 Op = mapValue(Op); 943 944 // Remap the metadata attachments. 945 remapGlobalObjectMetadata(F); 946 947 // Remap the argument types. 948 if (TypeMapper) 949 for (Argument &A : F.args()) 950 A.mutateType(TypeMapper->remapType(A.getType())); 951 952 // Remap the instructions. 953 for (BasicBlock &BB : F) 954 for (Instruction &I : BB) 955 remapInstruction(&I); 956 } 957 958 void Mapper::mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 959 bool IsOldCtorDtor, 960 ArrayRef<Constant *> NewMembers) { 961 SmallVector<Constant *, 16> Elements; 962 if (InitPrefix) { 963 unsigned NumElements = 964 cast<ArrayType>(InitPrefix->getType())->getNumElements(); 965 for (unsigned I = 0; I != NumElements; ++I) 966 Elements.push_back(InitPrefix->getAggregateElement(I)); 967 } 968 969 PointerType *VoidPtrTy; 970 Type *EltTy; 971 if (IsOldCtorDtor) { 972 // FIXME: This upgrade is done during linking to support the C API. See 973 // also IRLinker::linkAppendingVarProto() in IRMover.cpp. 974 VoidPtrTy = Type::getInt8Ty(GV.getContext())->getPointerTo(); 975 auto &ST = *cast<StructType>(NewMembers.front()->getType()); 976 Type *Tys[3] = {ST.getElementType(0), ST.getElementType(1), VoidPtrTy}; 977 EltTy = StructType::get(GV.getContext(), Tys, false); 978 } 979 980 for (auto *V : NewMembers) { 981 Constant *NewV; 982 if (IsOldCtorDtor) { 983 auto *S = cast<ConstantStruct>(V); 984 auto *E1 = cast<Constant>(mapValue(S->getOperand(0))); 985 auto *E2 = cast<Constant>(mapValue(S->getOperand(1))); 986 Constant *Null = Constant::getNullValue(VoidPtrTy); 987 NewV = ConstantStruct::get(cast<StructType>(EltTy), E1, E2, Null); 988 } else { 989 NewV = cast_or_null<Constant>(mapValue(V)); 990 } 991 Elements.push_back(NewV); 992 } 993 994 GV.setInitializer(ConstantArray::get( 995 cast<ArrayType>(GV.getType()->getElementType()), Elements)); 996 } 997 998 void Mapper::scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init, 999 unsigned MCID) { 1000 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule"); 1001 assert(MCID < MCs.size() && "Invalid mapping context"); 1002 1003 WorklistEntry WE; 1004 WE.Kind = WorklistEntry::MapGlobalInit; 1005 WE.MCID = MCID; 1006 WE.Data.GVInit.GV = &GV; 1007 WE.Data.GVInit.Init = &Init; 1008 Worklist.push_back(WE); 1009 } 1010 1011 void Mapper::scheduleMapAppendingVariable(GlobalVariable &GV, 1012 Constant *InitPrefix, 1013 bool IsOldCtorDtor, 1014 ArrayRef<Constant *> NewMembers, 1015 unsigned MCID) { 1016 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule"); 1017 assert(MCID < MCs.size() && "Invalid mapping context"); 1018 1019 WorklistEntry WE; 1020 WE.Kind = WorklistEntry::MapAppendingVar; 1021 WE.MCID = MCID; 1022 WE.Data.AppendingGV.GV = &GV; 1023 WE.Data.AppendingGV.InitPrefix = InitPrefix; 1024 WE.AppendingGVIsOldCtorDtor = IsOldCtorDtor; 1025 WE.AppendingGVNumNewMembers = NewMembers.size(); 1026 Worklist.push_back(WE); 1027 AppendingInits.append(NewMembers.begin(), NewMembers.end()); 1028 } 1029 1030 void Mapper::scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 1031 unsigned MCID) { 1032 assert(AlreadyScheduled.insert(&GA).second && "Should not reschedule"); 1033 assert(MCID < MCs.size() && "Invalid mapping context"); 1034 1035 WorklistEntry WE; 1036 WE.Kind = WorklistEntry::MapGlobalAliasee; 1037 WE.MCID = MCID; 1038 WE.Data.GlobalAliasee.GA = &GA; 1039 WE.Data.GlobalAliasee.Aliasee = &Aliasee; 1040 Worklist.push_back(WE); 1041 } 1042 1043 void Mapper::scheduleRemapFunction(Function &F, unsigned MCID) { 1044 assert(AlreadyScheduled.insert(&F).second && "Should not reschedule"); 1045 assert(MCID < MCs.size() && "Invalid mapping context"); 1046 1047 WorklistEntry WE; 1048 WE.Kind = WorklistEntry::RemapFunction; 1049 WE.MCID = MCID; 1050 WE.Data.RemapF = &F; 1051 Worklist.push_back(WE); 1052 } 1053 1054 void Mapper::addFlags(RemapFlags Flags) { 1055 assert(!hasWorkToDo() && "Expected to have flushed the worklist"); 1056 this->Flags = this->Flags | Flags; 1057 } 1058 1059 static Mapper *getAsMapper(void *pImpl) { 1060 return reinterpret_cast<Mapper *>(pImpl); 1061 } 1062 1063 namespace { 1064 1065 class FlushingMapper { 1066 Mapper &M; 1067 1068 public: 1069 explicit FlushingMapper(void *pImpl) : M(*getAsMapper(pImpl)) { 1070 assert(!M.hasWorkToDo() && "Expected to be flushed"); 1071 } 1072 1073 ~FlushingMapper() { M.flush(); } 1074 1075 Mapper *operator->() const { return &M; } 1076 }; 1077 1078 } // end anonymous namespace 1079 1080 ValueMapper::ValueMapper(ValueToValueMapTy &VM, RemapFlags Flags, 1081 ValueMapTypeRemapper *TypeMapper, 1082 ValueMaterializer *Materializer) 1083 : pImpl(new Mapper(VM, Flags, TypeMapper, Materializer)) {} 1084 1085 ValueMapper::~ValueMapper() { delete getAsMapper(pImpl); } 1086 1087 unsigned 1088 ValueMapper::registerAlternateMappingContext(ValueToValueMapTy &VM, 1089 ValueMaterializer *Materializer) { 1090 return getAsMapper(pImpl)->registerAlternateMappingContext(VM, Materializer); 1091 } 1092 1093 void ValueMapper::addFlags(RemapFlags Flags) { 1094 FlushingMapper(pImpl)->addFlags(Flags); 1095 } 1096 1097 Value *ValueMapper::mapValue(const Value &V) { 1098 return FlushingMapper(pImpl)->mapValue(&V); 1099 } 1100 1101 Constant *ValueMapper::mapConstant(const Constant &C) { 1102 return cast_or_null<Constant>(mapValue(C)); 1103 } 1104 1105 Metadata *ValueMapper::mapMetadata(const Metadata &MD) { 1106 return FlushingMapper(pImpl)->mapMetadata(&MD); 1107 } 1108 1109 MDNode *ValueMapper::mapMDNode(const MDNode &N) { 1110 return cast_or_null<MDNode>(mapMetadata(N)); 1111 } 1112 1113 void ValueMapper::remapInstruction(Instruction &I) { 1114 FlushingMapper(pImpl)->remapInstruction(&I); 1115 } 1116 1117 void ValueMapper::remapFunction(Function &F) { 1118 FlushingMapper(pImpl)->remapFunction(F); 1119 } 1120 1121 void ValueMapper::scheduleMapGlobalInitializer(GlobalVariable &GV, 1122 Constant &Init, 1123 unsigned MCID) { 1124 getAsMapper(pImpl)->scheduleMapGlobalInitializer(GV, Init, MCID); 1125 } 1126 1127 void ValueMapper::scheduleMapAppendingVariable(GlobalVariable &GV, 1128 Constant *InitPrefix, 1129 bool IsOldCtorDtor, 1130 ArrayRef<Constant *> NewMembers, 1131 unsigned MCID) { 1132 getAsMapper(pImpl)->scheduleMapAppendingVariable( 1133 GV, InitPrefix, IsOldCtorDtor, NewMembers, MCID); 1134 } 1135 1136 void ValueMapper::scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 1137 unsigned MCID) { 1138 getAsMapper(pImpl)->scheduleMapGlobalAliasee(GA, Aliasee, MCID); 1139 } 1140 1141 void ValueMapper::scheduleRemapFunction(Function &F, unsigned MCID) { 1142 getAsMapper(pImpl)->scheduleRemapFunction(F, MCID); 1143 } 1144