1 //===- ValueMapper.cpp - Interface shared by lib/Transforms/Utils ---------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines the MapValue function, which is shared by various parts of 11 // the lib/Transforms/Utils library. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/Transforms/Utils/ValueMapper.h" 16 #include "llvm/ADT/DenseSet.h" 17 #include "llvm/IR/CallSite.h" 18 #include "llvm/IR/Constants.h" 19 #include "llvm/IR/DebugInfoMetadata.h" 20 #include "llvm/IR/Function.h" 21 #include "llvm/IR/GlobalAlias.h" 22 #include "llvm/IR/GlobalVariable.h" 23 #include "llvm/IR/InlineAsm.h" 24 #include "llvm/IR/Instructions.h" 25 #include "llvm/IR/Metadata.h" 26 #include "llvm/IR/Operator.h" 27 using namespace llvm; 28 29 // Out of line method to get vtable etc for class. 30 void ValueMapTypeRemapper::anchor() {} 31 void ValueMaterializer::anchor() {} 32 void ValueMaterializer::materializeInitFor(GlobalValue *New, GlobalValue *Old) { 33 } 34 35 namespace { 36 37 /// A basic block used in a BlockAddress whose function body is not yet 38 /// materialized. 39 struct DelayedBasicBlock { 40 BasicBlock *OldBB; 41 std::unique_ptr<BasicBlock> TempBB; 42 43 // Explicit move for MSVC. 44 DelayedBasicBlock(DelayedBasicBlock &&X) 45 : OldBB(std::move(X.OldBB)), TempBB(std::move(X.TempBB)) {} 46 DelayedBasicBlock &operator=(DelayedBasicBlock &&X) { 47 OldBB = std::move(X.OldBB); 48 TempBB = std::move(X.TempBB); 49 return *this; 50 } 51 52 DelayedBasicBlock(const BlockAddress &Old) 53 : OldBB(Old.getBasicBlock()), 54 TempBB(BasicBlock::Create(Old.getContext())) {} 55 }; 56 57 struct WorklistEntry { 58 enum EntryKind { 59 MapGlobalInit, 60 MapAppendingVar, 61 MapGlobalAliasee, 62 RemapFunction 63 }; 64 struct GVInitTy { 65 GlobalVariable *GV; 66 Constant *Init; 67 }; 68 struct AppendingGVTy { 69 GlobalVariable *GV; 70 Constant *InitPrefix; 71 }; 72 struct GlobalAliaseeTy { 73 GlobalAlias *GA; 74 Constant *Aliasee; 75 }; 76 77 unsigned Kind : 2; 78 unsigned MCID : 29; 79 unsigned AppendingGVIsOldCtorDtor : 1; 80 unsigned AppendingGVNumNewMembers; 81 union { 82 GVInitTy GVInit; 83 AppendingGVTy AppendingGV; 84 GlobalAliaseeTy GlobalAliasee; 85 Function *RemapF; 86 } Data; 87 }; 88 89 struct MappingContext { 90 ValueToValueMapTy *VM; 91 ValueMaterializer *Materializer = nullptr; 92 93 /// Construct a MappingContext with a value map and materializer. 94 explicit MappingContext(ValueToValueMapTy &VM, 95 ValueMaterializer *Materializer = nullptr) 96 : VM(&VM), Materializer(Materializer) {} 97 }; 98 99 class MDNodeMapper; 100 class Mapper { 101 friend class MDNodeMapper; 102 103 #ifndef NDEBUG 104 DenseSet<GlobalValue *> AlreadyScheduled; 105 #endif 106 107 RemapFlags Flags; 108 ValueMapTypeRemapper *TypeMapper; 109 unsigned CurrentMCID = 0; 110 SmallVector<MappingContext, 2> MCs; 111 SmallVector<WorklistEntry, 4> Worklist; 112 SmallVector<DelayedBasicBlock, 1> DelayedBBs; 113 SmallVector<Constant *, 16> AppendingInits; 114 115 public: 116 Mapper(ValueToValueMapTy &VM, RemapFlags Flags, 117 ValueMapTypeRemapper *TypeMapper, ValueMaterializer *Materializer) 118 : Flags(Flags), TypeMapper(TypeMapper), 119 MCs(1, MappingContext(VM, Materializer)) {} 120 121 /// ValueMapper should explicitly call \a flush() before destruction. 122 ~Mapper() { assert(!hasWorkToDo() && "Expected to be flushed"); } 123 124 bool hasWorkToDo() const { return !Worklist.empty(); } 125 126 unsigned 127 registerAlternateMappingContext(ValueToValueMapTy &VM, 128 ValueMaterializer *Materializer = nullptr) { 129 MCs.push_back(MappingContext(VM, Materializer)); 130 return MCs.size() - 1; 131 } 132 133 void addFlags(RemapFlags Flags); 134 135 Value *mapValue(const Value *V); 136 void remapInstruction(Instruction *I); 137 void remapFunction(Function &F); 138 139 Constant *mapConstant(const Constant *C) { 140 return cast_or_null<Constant>(mapValue(C)); 141 } 142 143 /// Map metadata. 144 /// 145 /// Find the mapping for MD. Guarantees that the return will be resolved 146 /// (not an MDNode, or MDNode::isResolved() returns true). 147 Metadata *mapMetadata(const Metadata *MD); 148 149 // Map LocalAsMetadata, which never gets memoized. 150 // 151 // If the referenced local is not mapped, the principled return is nullptr. 152 // However, optimization passes sometimes move metadata operands *before* the 153 // SSA values they reference. To prevent crashes in \a RemapInstruction(), 154 // return "!{}" when RF_IgnoreMissingLocals is not set. 155 // 156 // \note Adding a mapping for LocalAsMetadata is unsupported. Add a mapping 157 // to the value map for the SSA value in question instead. 158 // 159 // FIXME: Once we have a verifier check for forward references to SSA values 160 // through metadata operands, always return nullptr on unmapped locals. 161 Metadata *mapLocalAsMetadata(const LocalAsMetadata &LAM); 162 163 void scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init, 164 unsigned MCID); 165 void scheduleMapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 166 bool IsOldCtorDtor, 167 ArrayRef<Constant *> NewMembers, 168 unsigned MCID); 169 void scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 170 unsigned MCID); 171 void scheduleRemapFunction(Function &F, unsigned MCID); 172 173 void flush(); 174 175 private: 176 void mapGlobalInitializer(GlobalVariable &GV, Constant &Init); 177 void mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 178 bool IsOldCtorDtor, 179 ArrayRef<Constant *> NewMembers); 180 void mapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee); 181 void remapFunction(Function &F, ValueToValueMapTy &VM); 182 183 ValueToValueMapTy &getVM() { return *MCs[CurrentMCID].VM; } 184 ValueMaterializer *getMaterializer() { return MCs[CurrentMCID].Materializer; } 185 186 Value *mapBlockAddress(const BlockAddress &BA); 187 188 /// Map metadata that doesn't require visiting operands. 189 Optional<Metadata *> mapSimpleMetadata(const Metadata *MD); 190 191 Metadata *mapToMetadata(const Metadata *Key, Metadata *Val); 192 Metadata *mapToSelf(const Metadata *MD); 193 }; 194 195 class MDNodeMapper { 196 Mapper &M; 197 198 /// Data about a node in \a UniquedGraph. 199 struct Data { 200 bool HasChanged = false; 201 unsigned ID = ~0u; 202 TempMDNode Placeholder; 203 204 Data() {} 205 Data(Data &&X) 206 : HasChanged(std::move(X.HasChanged)), ID(std::move(X.ID)), 207 Placeholder(std::move(X.Placeholder)) {} 208 Data &operator=(Data &&X) { 209 HasChanged = std::move(X.HasChanged); 210 ID = std::move(X.ID); 211 Placeholder = std::move(X.Placeholder); 212 return *this; 213 } 214 }; 215 216 /// A graph of uniqued nodes. 217 struct UniquedGraph { 218 SmallDenseMap<const Metadata *, Data, 32> Info; // Node properties. 219 SmallVector<MDNode *, 16> POT; // Post-order traversal. 220 221 /// Propagate changed operands through the post-order traversal. 222 /// 223 /// Iteratively update \a Data::HasChanged for each node based on \a 224 /// Data::HasChanged of its operands, until fixed point. 225 void propagateChanges(); 226 227 /// Get a forward reference to a node to use as an operand. 228 Metadata &getFwdReference(MDNode &Op); 229 }; 230 231 /// Worklist of distinct nodes whose operands need to be remapped. 232 SmallVector<MDNode *, 16> DistinctWorklist; 233 234 // Storage for a UniquedGraph. 235 SmallDenseMap<const Metadata *, Data, 32> InfoStorage; 236 SmallVector<MDNode *, 16> POTStorage; 237 238 public: 239 MDNodeMapper(Mapper &M) : M(M) {} 240 241 /// Map a metadata node (and its transitive operands). 242 /// 243 /// Map all the (unmapped) nodes in the subgraph under \c N. The iterative 244 /// algorithm handles distinct nodes and uniqued node subgraphs using 245 /// different strategies. 246 /// 247 /// Distinct nodes are immediately mapped and added to \a DistinctWorklist 248 /// using \a mapDistinctNode(). Their mapping can always be computed 249 /// immediately without visiting operands, even if their operands change. 250 /// 251 /// The mapping for uniqued nodes depends on whether their operands change. 252 /// \a mapTopLevelUniquedNode() traverses the transitive uniqued subgraph of 253 /// a node to calculate uniqued node mappings in bulk. Distinct leafs are 254 /// added to \a DistinctWorklist with \a mapDistinctNode(). 255 /// 256 /// After mapping \c N itself, this function remaps the operands of the 257 /// distinct nodes in \a DistinctWorklist until the entire subgraph under \c 258 /// N has been mapped. 259 Metadata *map(const MDNode &N); 260 261 private: 262 /// Map a top-level uniqued node and the uniqued subgraph underneath it. 263 /// 264 /// This builds up a post-order traversal of the (unmapped) uniqued subgraph 265 /// underneath \c FirstN and calculates the nodes' mapping. Each node uses 266 /// the identity mapping (\a Mapper::mapToSelf()) as long as all of its 267 /// operands uses the identity mapping. 268 /// 269 /// The algorithm works as follows: 270 /// 271 /// 1. \a createPOT(): traverse the uniqued subgraph under \c FirstN and 272 /// save the post-order traversal in the given \a UniquedGraph, tracking 273 /// nodes' operands change. 274 /// 275 /// 2. \a UniquedGraph::propagateChanges(): propagate changed operands 276 /// through the \a UniquedGraph until fixed point, following the rule 277 /// that if a node changes, any node that references must also change. 278 /// 279 /// 3. \a mapNodesInPOT(): map the uniqued nodes, creating new uniqued nodes 280 /// (referencing new operands) where necessary. 281 Metadata *mapTopLevelUniquedNode(const MDNode &FirstN); 282 283 /// Try to map the operand of an \a MDNode. 284 /// 285 /// If \c Op is already mapped, return the mapping. If it's not an \a 286 /// MDNode, compute and return the mapping. If it's a distinct \a MDNode, 287 /// return the result of \a mapDistinctNode(). 288 /// 289 /// \return None if \c Op is an unmapped uniqued \a MDNode. 290 /// \post getMappedOp(Op) only returns None if this returns None. 291 Optional<Metadata *> tryToMapOperand(const Metadata *Op); 292 293 /// Map a distinct node. 294 /// 295 /// Return the mapping for the distinct node \c N, saving the result in \a 296 /// DistinctWorklist for later remapping. 297 /// 298 /// \pre \c N is not yet mapped. 299 /// \pre \c N.isDistinct(). 300 MDNode *mapDistinctNode(const MDNode &N); 301 302 /// Get a previously mapped node. 303 Optional<Metadata *> getMappedOp(const Metadata *Op) const; 304 305 /// Create a post-order traversal of an unmapped uniqued node subgraph. 306 /// 307 /// This traverses the metadata graph deeply enough to map \c FirstN. It 308 /// uses \a tryToMapOperand() (via \a Mapper::mapSimplifiedNode()), so any 309 /// metadata that has already been mapped will not be part of the POT. 310 /// 311 /// Each node that has a changed operand from outside the graph (e.g., a 312 /// distinct node, an already-mapped uniqued node, or \a ConstantAsMetadata) 313 /// is marked with \a Data::HasChanged. 314 /// 315 /// \return \c true if any nodes in \c G have \a Data::HasChanged. 316 /// \post \c G.POT is a post-order traversal ending with \c FirstN. 317 /// \post \a Data::hasChanged in \c G.Info indicates whether any node needs 318 /// to change because of operands outside the graph. 319 bool createPOT(UniquedGraph &G, const MDNode &FirstN); 320 321 /// Visit the operands of a uniqued node in the POT. 322 /// 323 /// Visit the operands in the range from \c I to \c E, returning the first 324 /// uniqued node we find that isn't yet in \c G. \c I is always advanced to 325 /// where to continue the loop through the operands. 326 /// 327 /// This sets \c HasChanged if any of the visited operands change. 328 MDNode *visitOperands(UniquedGraph &G, MDNode::op_iterator &I, 329 MDNode::op_iterator E, bool &HasChanged); 330 331 /// Map all the nodes in the given uniqued graph. 332 /// 333 /// This visits all the nodes in \c G in post-order, using the identity 334 /// mapping or creating a new node depending on \a Data::HasChanged. 335 /// 336 /// \pre \a getMappedOp() returns None for nodes in \c G, but not for any of 337 /// their operands outside of \c G. 338 /// \pre \a Data::HasChanged is true for a node in \c G iff any of its 339 /// operands have changed. 340 /// \post \a getMappedOp() returns the mapped node for every node in \c G. 341 void mapNodesInPOT(UniquedGraph &G); 342 343 /// Remap a node's operands using the given functor. 344 /// 345 /// Iterate through the operands of \c N and update them in place using \c 346 /// mapOperand. 347 /// 348 /// \pre N.isDistinct() or N.isTemporary(). 349 template <class OperandMapper> 350 void remapOperands(MDNode &N, OperandMapper mapOperand); 351 }; 352 353 } // end namespace 354 355 Value *Mapper::mapValue(const Value *V) { 356 ValueToValueMapTy::iterator I = getVM().find(V); 357 358 // If the value already exists in the map, use it. 359 if (I != getVM().end()) { 360 assert(I->second && "Unexpected null mapping"); 361 return I->second; 362 } 363 364 // If we have a materializer and it can materialize a value, use that. 365 if (auto *Materializer = getMaterializer()) { 366 if (Value *NewV = 367 Materializer->materializeDeclFor(const_cast<Value *>(V))) { 368 getVM()[V] = NewV; 369 if (auto *NewGV = dyn_cast<GlobalValue>(NewV)) 370 Materializer->materializeInitFor( 371 NewGV, cast<GlobalValue>(const_cast<Value *>(V))); 372 return NewV; 373 } 374 } 375 376 // Global values do not need to be seeded into the VM if they 377 // are using the identity mapping. 378 if (isa<GlobalValue>(V)) { 379 if (Flags & RF_NullMapMissingGlobalValues) 380 return nullptr; 381 return getVM()[V] = const_cast<Value *>(V); 382 } 383 384 if (const InlineAsm *IA = dyn_cast<InlineAsm>(V)) { 385 // Inline asm may need *type* remapping. 386 FunctionType *NewTy = IA->getFunctionType(); 387 if (TypeMapper) { 388 NewTy = cast<FunctionType>(TypeMapper->remapType(NewTy)); 389 390 if (NewTy != IA->getFunctionType()) 391 V = InlineAsm::get(NewTy, IA->getAsmString(), IA->getConstraintString(), 392 IA->hasSideEffects(), IA->isAlignStack()); 393 } 394 395 return getVM()[V] = const_cast<Value *>(V); 396 } 397 398 if (const auto *MDV = dyn_cast<MetadataAsValue>(V)) { 399 const Metadata *MD = MDV->getMetadata(); 400 401 if (auto *LAM = dyn_cast<LocalAsMetadata>(MD)) { 402 // Look through to grab the local value. 403 if (Value *LV = mapValue(LAM->getValue())) { 404 if (V == LAM->getValue()) 405 return const_cast<Value *>(V); 406 return MetadataAsValue::get(V->getContext(), ValueAsMetadata::get(LV)); 407 } 408 409 // FIXME: always return nullptr once Verifier::verifyDominatesUse() 410 // ensures metadata operands only reference defined SSA values. 411 return (Flags & RF_IgnoreMissingLocals) 412 ? nullptr 413 : MetadataAsValue::get(V->getContext(), 414 MDTuple::get(V->getContext(), None)); 415 } 416 417 // If this is a module-level metadata and we know that nothing at the module 418 // level is changing, then use an identity mapping. 419 if (Flags & RF_NoModuleLevelChanges) 420 return getVM()[V] = const_cast<Value *>(V); 421 422 // Map the metadata and turn it into a value. 423 auto *MappedMD = mapMetadata(MD); 424 if (MD == MappedMD) 425 return getVM()[V] = const_cast<Value *>(V); 426 return getVM()[V] = MetadataAsValue::get(V->getContext(), MappedMD); 427 } 428 429 // Okay, this either must be a constant (which may or may not be mappable) or 430 // is something that is not in the mapping table. 431 Constant *C = const_cast<Constant*>(dyn_cast<Constant>(V)); 432 if (!C) 433 return nullptr; 434 435 if (BlockAddress *BA = dyn_cast<BlockAddress>(C)) 436 return mapBlockAddress(*BA); 437 438 // Otherwise, we have some other constant to remap. Start by checking to see 439 // if all operands have an identity remapping. 440 unsigned OpNo = 0, NumOperands = C->getNumOperands(); 441 Value *Mapped = nullptr; 442 for (; OpNo != NumOperands; ++OpNo) { 443 Value *Op = C->getOperand(OpNo); 444 Mapped = mapValue(Op); 445 if (Mapped != C) break; 446 } 447 448 // See if the type mapper wants to remap the type as well. 449 Type *NewTy = C->getType(); 450 if (TypeMapper) 451 NewTy = TypeMapper->remapType(NewTy); 452 453 // If the result type and all operands match up, then just insert an identity 454 // mapping. 455 if (OpNo == NumOperands && NewTy == C->getType()) 456 return getVM()[V] = C; 457 458 // Okay, we need to create a new constant. We've already processed some or 459 // all of the operands, set them all up now. 460 SmallVector<Constant*, 8> Ops; 461 Ops.reserve(NumOperands); 462 for (unsigned j = 0; j != OpNo; ++j) 463 Ops.push_back(cast<Constant>(C->getOperand(j))); 464 465 // If one of the operands mismatch, push it and the other mapped operands. 466 if (OpNo != NumOperands) { 467 Ops.push_back(cast<Constant>(Mapped)); 468 469 // Map the rest of the operands that aren't processed yet. 470 for (++OpNo; OpNo != NumOperands; ++OpNo) 471 Ops.push_back(cast<Constant>(mapValue(C->getOperand(OpNo)))); 472 } 473 Type *NewSrcTy = nullptr; 474 if (TypeMapper) 475 if (auto *GEPO = dyn_cast<GEPOperator>(C)) 476 NewSrcTy = TypeMapper->remapType(GEPO->getSourceElementType()); 477 478 if (ConstantExpr *CE = dyn_cast<ConstantExpr>(C)) 479 return getVM()[V] = CE->getWithOperands(Ops, NewTy, false, NewSrcTy); 480 if (isa<ConstantArray>(C)) 481 return getVM()[V] = ConstantArray::get(cast<ArrayType>(NewTy), Ops); 482 if (isa<ConstantStruct>(C)) 483 return getVM()[V] = ConstantStruct::get(cast<StructType>(NewTy), Ops); 484 if (isa<ConstantVector>(C)) 485 return getVM()[V] = ConstantVector::get(Ops); 486 // If this is a no-operand constant, it must be because the type was remapped. 487 if (isa<UndefValue>(C)) 488 return getVM()[V] = UndefValue::get(NewTy); 489 if (isa<ConstantAggregateZero>(C)) 490 return getVM()[V] = ConstantAggregateZero::get(NewTy); 491 assert(isa<ConstantPointerNull>(C)); 492 return getVM()[V] = ConstantPointerNull::get(cast<PointerType>(NewTy)); 493 } 494 495 Value *Mapper::mapBlockAddress(const BlockAddress &BA) { 496 Function *F = cast<Function>(mapValue(BA.getFunction())); 497 498 // F may not have materialized its initializer. In that case, create a 499 // dummy basic block for now, and replace it once we've materialized all 500 // the initializers. 501 BasicBlock *BB; 502 if (F->empty()) { 503 DelayedBBs.push_back(DelayedBasicBlock(BA)); 504 BB = DelayedBBs.back().TempBB.get(); 505 } else { 506 BB = cast_or_null<BasicBlock>(mapValue(BA.getBasicBlock())); 507 } 508 509 return getVM()[&BA] = BlockAddress::get(F, BB ? BB : BA.getBasicBlock()); 510 } 511 512 Metadata *Mapper::mapToMetadata(const Metadata *Key, Metadata *Val) { 513 getVM().MD()[Key].reset(Val); 514 return Val; 515 } 516 517 Metadata *Mapper::mapToSelf(const Metadata *MD) { 518 return mapToMetadata(MD, const_cast<Metadata *>(MD)); 519 } 520 521 Optional<Metadata *> MDNodeMapper::tryToMapOperand(const Metadata *Op) { 522 if (!Op) 523 return nullptr; 524 525 if (Optional<Metadata *> MappedOp = M.mapSimpleMetadata(Op)) { 526 #ifndef NDEBUG 527 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op)) 528 assert((!*MappedOp || M.getVM().count(CMD->getValue()) || 529 M.getVM().getMappedMD(Op)) && 530 "Expected Value to be memoized"); 531 else 532 assert((isa<MDString>(Op) || M.getVM().getMappedMD(Op)) && 533 "Expected result to be memoized"); 534 #endif 535 return *MappedOp; 536 } 537 538 const MDNode &N = *cast<MDNode>(Op); 539 if (N.isDistinct()) 540 return mapDistinctNode(N); 541 return None; 542 } 543 544 MDNode *MDNodeMapper::mapDistinctNode(const MDNode &N) { 545 assert(N.isDistinct() && "Expected a distinct node"); 546 assert(!M.getVM().getMappedMD(&N) && "Expected an unmapped node"); 547 DistinctWorklist.push_back(cast<MDNode>( 548 (M.Flags & RF_MoveDistinctMDs) 549 ? M.mapToSelf(&N) 550 : M.mapToMetadata(&N, MDNode::replaceWithDistinct(N.clone())))); 551 return DistinctWorklist.back(); 552 } 553 554 static ConstantAsMetadata *wrapConstantAsMetadata(const ConstantAsMetadata &CMD, 555 Value *MappedV) { 556 if (CMD.getValue() == MappedV) 557 return const_cast<ConstantAsMetadata *>(&CMD); 558 return MappedV ? ConstantAsMetadata::getConstant(MappedV) : nullptr; 559 } 560 561 Optional<Metadata *> MDNodeMapper::getMappedOp(const Metadata *Op) const { 562 if (!Op) 563 return nullptr; 564 565 if (Optional<Metadata *> MappedOp = M.getVM().getMappedMD(Op)) 566 return *MappedOp; 567 568 if (isa<MDString>(Op)) 569 return const_cast<Metadata *>(Op); 570 571 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op)) 572 return wrapConstantAsMetadata(*CMD, M.getVM().lookup(CMD->getValue())); 573 574 return None; 575 } 576 577 Metadata &MDNodeMapper::UniquedGraph::getFwdReference(MDNode &Op) { 578 auto Where = Info.find(&Op); 579 assert(Where != Info.end() && "Expected a valid reference"); 580 581 auto &OpD = Where->second; 582 if (!OpD.HasChanged) 583 return Op; 584 585 // Lazily construct a temporary node. 586 if (!OpD.Placeholder) 587 OpD.Placeholder = Op.clone(); 588 589 return *OpD.Placeholder; 590 } 591 592 template <class OperandMapper> 593 void MDNodeMapper::remapOperands(MDNode &N, OperandMapper mapOperand) { 594 assert(!N.isUniqued() && "Expected distinct or temporary nodes"); 595 for (unsigned I = 0, E = N.getNumOperands(); I != E; ++I) { 596 Metadata *Old = N.getOperand(I); 597 Metadata *New = mapOperand(Old); 598 599 if (Old != New) 600 N.replaceOperandWith(I, New); 601 } 602 } 603 604 namespace { 605 /// An entry in the worklist for the post-order traversal. 606 struct POTWorklistEntry { 607 MDNode *N; ///< Current node. 608 MDNode::op_iterator Op; ///< Current operand of \c N. 609 610 /// Keep a flag of whether operands have changed in the worklist to avoid 611 /// hitting the map in \a UniquedGraph. 612 bool HasChanged = false; 613 614 POTWorklistEntry(MDNode &N) : N(&N), Op(N.op_begin()) {} 615 }; 616 } // end namespace 617 618 bool MDNodeMapper::createPOT(UniquedGraph &G, const MDNode &FirstN) { 619 assert(G.Info.empty() && "Expected a fresh traversal"); 620 assert(FirstN.isUniqued() && "Expected uniqued node in POT"); 621 622 // Construct a post-order traversal of the uniqued subgraph under FirstN. 623 bool AnyChanges = false; 624 SmallVector<POTWorklistEntry, 16> Worklist; 625 Worklist.push_back(POTWorklistEntry(const_cast<MDNode &>(FirstN))); 626 (void)G.Info[&FirstN]; 627 while (!Worklist.empty()) { 628 // Start or continue the traversal through the this node's operands. 629 auto &WE = Worklist.back(); 630 if (MDNode *N = visitOperands(G, WE.Op, WE.N->op_end(), WE.HasChanged)) { 631 // Push a new node to traverse first. 632 Worklist.push_back(POTWorklistEntry(*N)); 633 continue; 634 } 635 636 // Push the node onto the POT. 637 assert(WE.N->isUniqued() && "Expected only uniqued nodes"); 638 assert(WE.Op == WE.N->op_end() && "Expected to visit all operands"); 639 auto &D = G.Info[WE.N]; 640 AnyChanges |= D.HasChanged = WE.HasChanged; 641 D.ID = G.POT.size(); 642 G.POT.push_back(WE.N); 643 644 // Pop the node off the worklist. 645 Worklist.pop_back(); 646 } 647 return AnyChanges; 648 } 649 650 MDNode *MDNodeMapper::visitOperands(UniquedGraph &G, MDNode::op_iterator &I, 651 MDNode::op_iterator E, bool &HasChanged) { 652 while (I != E) { 653 Metadata *Op = *I++; // Increment even on early return. 654 if (Optional<Metadata *> MappedOp = tryToMapOperand(Op)) { 655 // Check if the operand changes. 656 HasChanged |= Op != *MappedOp; 657 continue; 658 } 659 660 // A uniqued metadata node. 661 MDNode &OpN = *cast<MDNode>(Op); 662 assert(OpN.isUniqued() && 663 "Only uniqued operands cannot be mapped immediately"); 664 if (G.Info.insert(std::make_pair(&OpN, Data())).second) 665 return &OpN; // This is a new one. Return it. 666 } 667 return nullptr; 668 } 669 670 void MDNodeMapper::UniquedGraph::propagateChanges() { 671 bool AnyChanges; 672 do { 673 AnyChanges = false; 674 for (MDNode *N : POT) { 675 auto &D = Info[N]; 676 if (D.HasChanged) 677 continue; 678 679 if (!llvm::any_of(N->operands(), [&](const Metadata *Op) { 680 auto Where = Info.find(Op); 681 return Where != Info.end() && Where->second.HasChanged; 682 })) 683 continue; 684 685 AnyChanges = D.HasChanged = true; 686 } 687 } while (AnyChanges); 688 } 689 690 void MDNodeMapper::mapNodesInPOT(UniquedGraph &G) { 691 // Construct uniqued nodes, building forward references as necessary. 692 SmallVector<MDNode *, 16> CyclicNodes; 693 for (auto *N : G.POT) { 694 auto &D = G.Info[N]; 695 if (!D.HasChanged) { 696 // The node hasn't changed. 697 M.mapToSelf(N); 698 continue; 699 } 700 701 // Remember whether this node had a placeholder. 702 bool HadPlaceholder(D.Placeholder); 703 704 // Clone the uniqued node and remap the operands. 705 TempMDNode ClonedN = D.Placeholder ? std::move(D.Placeholder) : N->clone(); 706 remapOperands(*ClonedN, [this, &D, &G](Metadata *Old) { 707 if (Optional<Metadata *> MappedOp = getMappedOp(Old)) 708 return *MappedOp; 709 assert(G.Info[Old].ID > D.ID && "Expected a forward reference"); 710 return &G.getFwdReference(*cast<MDNode>(Old)); 711 }); 712 713 auto *NewN = MDNode::replaceWithUniqued(std::move(ClonedN)); 714 M.mapToMetadata(N, NewN); 715 716 // Nodes that were referenced out of order in the POT are involved in a 717 // uniquing cycle. 718 if (HadPlaceholder) 719 CyclicNodes.push_back(NewN); 720 } 721 722 // Resolve cycles. 723 for (auto *N : CyclicNodes) 724 if (!N->isResolved()) 725 N->resolveCycles(); 726 } 727 728 Metadata *MDNodeMapper::map(const MDNode &N) { 729 assert(DistinctWorklist.empty() && "MDNodeMapper::map is not recursive"); 730 assert(!(M.Flags & RF_NoModuleLevelChanges) && 731 "MDNodeMapper::map assumes module-level changes"); 732 733 // Require resolved nodes whenever metadata might be remapped. 734 assert(N.isResolved() && "Unexpected unresolved node"); 735 736 Metadata *MappedN = 737 N.isUniqued() ? mapTopLevelUniquedNode(N) : mapDistinctNode(N); 738 while (!DistinctWorklist.empty()) 739 remapOperands(*DistinctWorklist.pop_back_val(), [this](Metadata *Old) { 740 if (Optional<Metadata *> MappedOp = tryToMapOperand(Old)) 741 return *MappedOp; 742 return mapTopLevelUniquedNode(*cast<MDNode>(Old)); 743 }); 744 return MappedN; 745 } 746 747 Metadata *MDNodeMapper::mapTopLevelUniquedNode(const MDNode &FirstN) { 748 assert(FirstN.isUniqued() && "Expected uniqued node"); 749 750 // Create a post-order traversal of uniqued nodes under FirstN. 751 UniquedGraph G; 752 if (!createPOT(G, FirstN)) { 753 // Return early if no nodes have changed. 754 for (const MDNode *N : G.POT) 755 M.mapToSelf(N); 756 return &const_cast<MDNode &>(FirstN); 757 } 758 759 // Update graph with all nodes that have changed. 760 G.propagateChanges(); 761 762 // Map all the nodes in the graph. 763 mapNodesInPOT(G); 764 765 // Return the original node, remapped. 766 return *getMappedOp(&FirstN); 767 } 768 769 namespace { 770 771 struct MapMetadataDisabler { 772 ValueToValueMapTy &VM; 773 774 MapMetadataDisabler(ValueToValueMapTy &VM) : VM(VM) { 775 VM.disableMapMetadata(); 776 } 777 ~MapMetadataDisabler() { VM.enableMapMetadata(); } 778 }; 779 780 } // end namespace 781 782 Optional<Metadata *> Mapper::mapSimpleMetadata(const Metadata *MD) { 783 // If the value already exists in the map, use it. 784 if (Optional<Metadata *> NewMD = getVM().getMappedMD(MD)) 785 return *NewMD; 786 787 if (isa<MDString>(MD)) 788 return const_cast<Metadata *>(MD); 789 790 // This is a module-level metadata. If nothing at the module level is 791 // changing, use an identity mapping. 792 if ((Flags & RF_NoModuleLevelChanges)) 793 return const_cast<Metadata *>(MD); 794 795 if (auto *CMD = dyn_cast<ConstantAsMetadata>(MD)) { 796 // Disallow recursion into metadata mapping through mapValue. 797 MapMetadataDisabler MMD(getVM()); 798 799 // Don't memoize ConstantAsMetadata. Instead of lasting until the 800 // LLVMContext is destroyed, they can be deleted when the GlobalValue they 801 // reference is destructed. These aren't super common, so the extra 802 // indirection isn't that expensive. 803 return wrapConstantAsMetadata(*CMD, mapValue(CMD->getValue())); 804 } 805 806 assert(isa<MDNode>(MD) && "Expected a metadata node"); 807 808 return None; 809 } 810 811 Metadata *Mapper::mapLocalAsMetadata(const LocalAsMetadata &LAM) { 812 // Lookup the mapping for the value itself, and return the appropriate 813 // metadata. 814 if (Value *V = mapValue(LAM.getValue())) { 815 if (V == LAM.getValue()) 816 return const_cast<LocalAsMetadata *>(&LAM); 817 return ValueAsMetadata::get(V); 818 } 819 820 // FIXME: always return nullptr once Verifier::verifyDominatesUse() ensures 821 // metadata operands only reference defined SSA values. 822 return (Flags & RF_IgnoreMissingLocals) 823 ? nullptr 824 : MDTuple::get(LAM.getContext(), None); 825 } 826 827 Metadata *Mapper::mapMetadata(const Metadata *MD) { 828 assert(MD && "Expected valid metadata"); 829 assert(!isa<LocalAsMetadata>(MD) && "Unexpected local metadata"); 830 831 if (Optional<Metadata *> NewMD = mapSimpleMetadata(MD)) 832 return *NewMD; 833 834 return MDNodeMapper(*this).map(*cast<MDNode>(MD)); 835 } 836 837 void Mapper::flush() { 838 // Flush out the worklist of global values. 839 while (!Worklist.empty()) { 840 WorklistEntry E = Worklist.pop_back_val(); 841 CurrentMCID = E.MCID; 842 switch (E.Kind) { 843 case WorklistEntry::MapGlobalInit: 844 E.Data.GVInit.GV->setInitializer(mapConstant(E.Data.GVInit.Init)); 845 break; 846 case WorklistEntry::MapAppendingVar: { 847 unsigned PrefixSize = AppendingInits.size() - E.AppendingGVNumNewMembers; 848 mapAppendingVariable(*E.Data.AppendingGV.GV, 849 E.Data.AppendingGV.InitPrefix, 850 E.AppendingGVIsOldCtorDtor, 851 makeArrayRef(AppendingInits).slice(PrefixSize)); 852 AppendingInits.resize(PrefixSize); 853 break; 854 } 855 case WorklistEntry::MapGlobalAliasee: 856 E.Data.GlobalAliasee.GA->setAliasee( 857 mapConstant(E.Data.GlobalAliasee.Aliasee)); 858 break; 859 case WorklistEntry::RemapFunction: 860 remapFunction(*E.Data.RemapF); 861 break; 862 } 863 } 864 CurrentMCID = 0; 865 866 // Finish logic for block addresses now that all global values have been 867 // handled. 868 while (!DelayedBBs.empty()) { 869 DelayedBasicBlock DBB = DelayedBBs.pop_back_val(); 870 BasicBlock *BB = cast_or_null<BasicBlock>(mapValue(DBB.OldBB)); 871 DBB.TempBB->replaceAllUsesWith(BB ? BB : DBB.OldBB); 872 } 873 } 874 875 void Mapper::remapInstruction(Instruction *I) { 876 // Remap operands. 877 for (Use &Op : I->operands()) { 878 Value *V = mapValue(Op); 879 // If we aren't ignoring missing entries, assert that something happened. 880 if (V) 881 Op = V; 882 else 883 assert((Flags & RF_IgnoreMissingLocals) && 884 "Referenced value not in value map!"); 885 } 886 887 // Remap phi nodes' incoming blocks. 888 if (PHINode *PN = dyn_cast<PHINode>(I)) { 889 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) { 890 Value *V = mapValue(PN->getIncomingBlock(i)); 891 // If we aren't ignoring missing entries, assert that something happened. 892 if (V) 893 PN->setIncomingBlock(i, cast<BasicBlock>(V)); 894 else 895 assert((Flags & RF_IgnoreMissingLocals) && 896 "Referenced block not in value map!"); 897 } 898 } 899 900 // Remap attached metadata. 901 SmallVector<std::pair<unsigned, MDNode *>, 4> MDs; 902 I->getAllMetadata(MDs); 903 for (const auto &MI : MDs) { 904 MDNode *Old = MI.second; 905 MDNode *New = cast_or_null<MDNode>(mapMetadata(Old)); 906 if (New != Old) 907 I->setMetadata(MI.first, New); 908 } 909 910 if (!TypeMapper) 911 return; 912 913 // If the instruction's type is being remapped, do so now. 914 if (auto CS = CallSite(I)) { 915 SmallVector<Type *, 3> Tys; 916 FunctionType *FTy = CS.getFunctionType(); 917 Tys.reserve(FTy->getNumParams()); 918 for (Type *Ty : FTy->params()) 919 Tys.push_back(TypeMapper->remapType(Ty)); 920 CS.mutateFunctionType(FunctionType::get( 921 TypeMapper->remapType(I->getType()), Tys, FTy->isVarArg())); 922 return; 923 } 924 if (auto *AI = dyn_cast<AllocaInst>(I)) 925 AI->setAllocatedType(TypeMapper->remapType(AI->getAllocatedType())); 926 if (auto *GEP = dyn_cast<GetElementPtrInst>(I)) { 927 GEP->setSourceElementType( 928 TypeMapper->remapType(GEP->getSourceElementType())); 929 GEP->setResultElementType( 930 TypeMapper->remapType(GEP->getResultElementType())); 931 } 932 I->mutateType(TypeMapper->remapType(I->getType())); 933 } 934 935 void Mapper::remapFunction(Function &F) { 936 // Remap the operands. 937 for (Use &Op : F.operands()) 938 if (Op) 939 Op = mapValue(Op); 940 941 // Remap the metadata attachments. 942 SmallVector<std::pair<unsigned, MDNode *>, 8> MDs; 943 F.getAllMetadata(MDs); 944 for (const auto &I : MDs) 945 F.setMetadata(I.first, cast_or_null<MDNode>(mapMetadata(I.second))); 946 947 // Remap the argument types. 948 if (TypeMapper) 949 for (Argument &A : F.args()) 950 A.mutateType(TypeMapper->remapType(A.getType())); 951 952 // Remap the instructions. 953 for (BasicBlock &BB : F) 954 for (Instruction &I : BB) 955 remapInstruction(&I); 956 } 957 958 void Mapper::mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 959 bool IsOldCtorDtor, 960 ArrayRef<Constant *> NewMembers) { 961 SmallVector<Constant *, 16> Elements; 962 if (InitPrefix) { 963 unsigned NumElements = 964 cast<ArrayType>(InitPrefix->getType())->getNumElements(); 965 for (unsigned I = 0; I != NumElements; ++I) 966 Elements.push_back(InitPrefix->getAggregateElement(I)); 967 } 968 969 PointerType *VoidPtrTy; 970 Type *EltTy; 971 if (IsOldCtorDtor) { 972 // FIXME: This upgrade is done during linking to support the C API. See 973 // also IRLinker::linkAppendingVarProto() in IRMover.cpp. 974 VoidPtrTy = Type::getInt8Ty(GV.getContext())->getPointerTo(); 975 auto &ST = *cast<StructType>(NewMembers.front()->getType()); 976 Type *Tys[3] = {ST.getElementType(0), ST.getElementType(1), VoidPtrTy}; 977 EltTy = StructType::get(GV.getContext(), Tys, false); 978 } 979 980 for (auto *V : NewMembers) { 981 Constant *NewV; 982 if (IsOldCtorDtor) { 983 auto *S = cast<ConstantStruct>(V); 984 auto *E1 = mapValue(S->getOperand(0)); 985 auto *E2 = mapValue(S->getOperand(1)); 986 Value *Null = Constant::getNullValue(VoidPtrTy); 987 NewV = 988 ConstantStruct::get(cast<StructType>(EltTy), E1, E2, Null, nullptr); 989 } else { 990 NewV = cast_or_null<Constant>(mapValue(V)); 991 } 992 Elements.push_back(NewV); 993 } 994 995 GV.setInitializer(ConstantArray::get( 996 cast<ArrayType>(GV.getType()->getElementType()), Elements)); 997 } 998 999 void Mapper::scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init, 1000 unsigned MCID) { 1001 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule"); 1002 assert(MCID < MCs.size() && "Invalid mapping context"); 1003 1004 WorklistEntry WE; 1005 WE.Kind = WorklistEntry::MapGlobalInit; 1006 WE.MCID = MCID; 1007 WE.Data.GVInit.GV = &GV; 1008 WE.Data.GVInit.Init = &Init; 1009 Worklist.push_back(WE); 1010 } 1011 1012 void Mapper::scheduleMapAppendingVariable(GlobalVariable &GV, 1013 Constant *InitPrefix, 1014 bool IsOldCtorDtor, 1015 ArrayRef<Constant *> NewMembers, 1016 unsigned MCID) { 1017 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule"); 1018 assert(MCID < MCs.size() && "Invalid mapping context"); 1019 1020 WorklistEntry WE; 1021 WE.Kind = WorklistEntry::MapAppendingVar; 1022 WE.MCID = MCID; 1023 WE.Data.AppendingGV.GV = &GV; 1024 WE.Data.AppendingGV.InitPrefix = InitPrefix; 1025 WE.AppendingGVIsOldCtorDtor = IsOldCtorDtor; 1026 WE.AppendingGVNumNewMembers = NewMembers.size(); 1027 Worklist.push_back(WE); 1028 AppendingInits.append(NewMembers.begin(), NewMembers.end()); 1029 } 1030 1031 void Mapper::scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 1032 unsigned MCID) { 1033 assert(AlreadyScheduled.insert(&GA).second && "Should not reschedule"); 1034 assert(MCID < MCs.size() && "Invalid mapping context"); 1035 1036 WorklistEntry WE; 1037 WE.Kind = WorklistEntry::MapGlobalAliasee; 1038 WE.MCID = MCID; 1039 WE.Data.GlobalAliasee.GA = &GA; 1040 WE.Data.GlobalAliasee.Aliasee = &Aliasee; 1041 Worklist.push_back(WE); 1042 } 1043 1044 void Mapper::scheduleRemapFunction(Function &F, unsigned MCID) { 1045 assert(AlreadyScheduled.insert(&F).second && "Should not reschedule"); 1046 assert(MCID < MCs.size() && "Invalid mapping context"); 1047 1048 WorklistEntry WE; 1049 WE.Kind = WorklistEntry::RemapFunction; 1050 WE.MCID = MCID; 1051 WE.Data.RemapF = &F; 1052 Worklist.push_back(WE); 1053 } 1054 1055 void Mapper::addFlags(RemapFlags Flags) { 1056 assert(!hasWorkToDo() && "Expected to have flushed the worklist"); 1057 this->Flags = this->Flags | Flags; 1058 } 1059 1060 static Mapper *getAsMapper(void *pImpl) { 1061 return reinterpret_cast<Mapper *>(pImpl); 1062 } 1063 1064 namespace { 1065 1066 class FlushingMapper { 1067 Mapper &M; 1068 1069 public: 1070 explicit FlushingMapper(void *pImpl) : M(*getAsMapper(pImpl)) { 1071 assert(!M.hasWorkToDo() && "Expected to be flushed"); 1072 } 1073 ~FlushingMapper() { M.flush(); } 1074 Mapper *operator->() const { return &M; } 1075 }; 1076 1077 } // end namespace 1078 1079 ValueMapper::ValueMapper(ValueToValueMapTy &VM, RemapFlags Flags, 1080 ValueMapTypeRemapper *TypeMapper, 1081 ValueMaterializer *Materializer) 1082 : pImpl(new Mapper(VM, Flags, TypeMapper, Materializer)) {} 1083 1084 ValueMapper::~ValueMapper() { delete getAsMapper(pImpl); } 1085 1086 unsigned 1087 ValueMapper::registerAlternateMappingContext(ValueToValueMapTy &VM, 1088 ValueMaterializer *Materializer) { 1089 return getAsMapper(pImpl)->registerAlternateMappingContext(VM, Materializer); 1090 } 1091 1092 void ValueMapper::addFlags(RemapFlags Flags) { 1093 FlushingMapper(pImpl)->addFlags(Flags); 1094 } 1095 1096 Value *ValueMapper::mapValue(const Value &V) { 1097 return FlushingMapper(pImpl)->mapValue(&V); 1098 } 1099 1100 Constant *ValueMapper::mapConstant(const Constant &C) { 1101 return cast_or_null<Constant>(mapValue(C)); 1102 } 1103 1104 Metadata *ValueMapper::mapMetadata(const Metadata &MD) { 1105 return FlushingMapper(pImpl)->mapMetadata(&MD); 1106 } 1107 1108 MDNode *ValueMapper::mapMDNode(const MDNode &N) { 1109 return cast_or_null<MDNode>(mapMetadata(N)); 1110 } 1111 1112 void ValueMapper::remapInstruction(Instruction &I) { 1113 FlushingMapper(pImpl)->remapInstruction(&I); 1114 } 1115 1116 void ValueMapper::remapFunction(Function &F) { 1117 FlushingMapper(pImpl)->remapFunction(F); 1118 } 1119 1120 void ValueMapper::scheduleMapGlobalInitializer(GlobalVariable &GV, 1121 Constant &Init, 1122 unsigned MCID) { 1123 getAsMapper(pImpl)->scheduleMapGlobalInitializer(GV, Init, MCID); 1124 } 1125 1126 void ValueMapper::scheduleMapAppendingVariable(GlobalVariable &GV, 1127 Constant *InitPrefix, 1128 bool IsOldCtorDtor, 1129 ArrayRef<Constant *> NewMembers, 1130 unsigned MCID) { 1131 getAsMapper(pImpl)->scheduleMapAppendingVariable( 1132 GV, InitPrefix, IsOldCtorDtor, NewMembers, MCID); 1133 } 1134 1135 void ValueMapper::scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 1136 unsigned MCID) { 1137 getAsMapper(pImpl)->scheduleMapGlobalAliasee(GA, Aliasee, MCID); 1138 } 1139 1140 void ValueMapper::scheduleRemapFunction(Function &F, unsigned MCID) { 1141 getAsMapper(pImpl)->scheduleRemapFunction(F, MCID); 1142 } 1143