1 //===- ValueMapper.cpp - Interface shared by lib/Transforms/Utils ---------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines the MapValue function, which is shared by various parts of 11 // the lib/Transforms/Utils library. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/Transforms/Utils/ValueMapper.h" 16 #include "llvm/ADT/DenseSet.h" 17 #include "llvm/IR/CallSite.h" 18 #include "llvm/IR/Constants.h" 19 #include "llvm/IR/DebugInfoMetadata.h" 20 #include "llvm/IR/Function.h" 21 #include "llvm/IR/GlobalAlias.h" 22 #include "llvm/IR/GlobalVariable.h" 23 #include "llvm/IR/InlineAsm.h" 24 #include "llvm/IR/Instructions.h" 25 #include "llvm/IR/Metadata.h" 26 #include "llvm/IR/Operator.h" 27 using namespace llvm; 28 29 // Out of line method to get vtable etc for class. 30 void ValueMapTypeRemapper::anchor() {} 31 void ValueMaterializer::anchor() {} 32 33 namespace { 34 35 /// A basic block used in a BlockAddress whose function body is not yet 36 /// materialized. 37 struct DelayedBasicBlock { 38 BasicBlock *OldBB; 39 std::unique_ptr<BasicBlock> TempBB; 40 41 DelayedBasicBlock(const BlockAddress &Old) 42 : OldBB(Old.getBasicBlock()), 43 TempBB(BasicBlock::Create(Old.getContext())) {} 44 }; 45 46 struct WorklistEntry { 47 enum EntryKind { 48 MapGlobalInit, 49 MapAppendingVar, 50 MapGlobalAliasee, 51 RemapFunction 52 }; 53 struct GVInitTy { 54 GlobalVariable *GV; 55 Constant *Init; 56 }; 57 struct AppendingGVTy { 58 GlobalVariable *GV; 59 Constant *InitPrefix; 60 }; 61 struct GlobalAliaseeTy { 62 GlobalAlias *GA; 63 Constant *Aliasee; 64 }; 65 66 unsigned Kind : 2; 67 unsigned MCID : 29; 68 unsigned AppendingGVIsOldCtorDtor : 1; 69 unsigned AppendingGVNumNewMembers; 70 union { 71 GVInitTy GVInit; 72 AppendingGVTy AppendingGV; 73 GlobalAliaseeTy GlobalAliasee; 74 Function *RemapF; 75 } Data; 76 }; 77 78 struct MappingContext { 79 ValueToValueMapTy *VM; 80 ValueMaterializer *Materializer = nullptr; 81 82 /// Construct a MappingContext with a value map and materializer. 83 explicit MappingContext(ValueToValueMapTy &VM, 84 ValueMaterializer *Materializer = nullptr) 85 : VM(&VM), Materializer(Materializer) {} 86 }; 87 88 class MDNodeMapper; 89 class Mapper { 90 friend class MDNodeMapper; 91 92 #ifndef NDEBUG 93 DenseSet<GlobalValue *> AlreadyScheduled; 94 #endif 95 96 RemapFlags Flags; 97 ValueMapTypeRemapper *TypeMapper; 98 unsigned CurrentMCID = 0; 99 SmallVector<MappingContext, 2> MCs; 100 SmallVector<WorklistEntry, 4> Worklist; 101 SmallVector<DelayedBasicBlock, 1> DelayedBBs; 102 SmallVector<Constant *, 16> AppendingInits; 103 104 public: 105 Mapper(ValueToValueMapTy &VM, RemapFlags Flags, 106 ValueMapTypeRemapper *TypeMapper, ValueMaterializer *Materializer) 107 : Flags(Flags), TypeMapper(TypeMapper), 108 MCs(1, MappingContext(VM, Materializer)) {} 109 110 /// ValueMapper should explicitly call \a flush() before destruction. 111 ~Mapper() { assert(!hasWorkToDo() && "Expected to be flushed"); } 112 113 bool hasWorkToDo() const { return !Worklist.empty(); } 114 115 unsigned 116 registerAlternateMappingContext(ValueToValueMapTy &VM, 117 ValueMaterializer *Materializer = nullptr) { 118 MCs.push_back(MappingContext(VM, Materializer)); 119 return MCs.size() - 1; 120 } 121 122 void addFlags(RemapFlags Flags); 123 124 void remapGlobalObjectMetadata(GlobalObject &GO); 125 126 Value *mapValue(const Value *V); 127 void remapInstruction(Instruction *I); 128 void remapFunction(Function &F); 129 130 Constant *mapConstant(const Constant *C) { 131 return cast_or_null<Constant>(mapValue(C)); 132 } 133 134 /// Map metadata. 135 /// 136 /// Find the mapping for MD. Guarantees that the return will be resolved 137 /// (not an MDNode, or MDNode::isResolved() returns true). 138 Metadata *mapMetadata(const Metadata *MD); 139 140 void scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init, 141 unsigned MCID); 142 void scheduleMapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 143 bool IsOldCtorDtor, 144 ArrayRef<Constant *> NewMembers, 145 unsigned MCID); 146 void scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 147 unsigned MCID); 148 void scheduleRemapFunction(Function &F, unsigned MCID); 149 150 void flush(); 151 152 private: 153 void mapGlobalInitializer(GlobalVariable &GV, Constant &Init); 154 void mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 155 bool IsOldCtorDtor, 156 ArrayRef<Constant *> NewMembers); 157 void mapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee); 158 void remapFunction(Function &F, ValueToValueMapTy &VM); 159 160 ValueToValueMapTy &getVM() { return *MCs[CurrentMCID].VM; } 161 ValueMaterializer *getMaterializer() { return MCs[CurrentMCID].Materializer; } 162 163 Value *mapBlockAddress(const BlockAddress &BA); 164 165 /// Map metadata that doesn't require visiting operands. 166 Optional<Metadata *> mapSimpleMetadata(const Metadata *MD); 167 168 Metadata *mapToMetadata(const Metadata *Key, Metadata *Val); 169 Metadata *mapToSelf(const Metadata *MD); 170 }; 171 172 class MDNodeMapper { 173 Mapper &M; 174 175 /// Data about a node in \a UniquedGraph. 176 struct Data { 177 bool HasChanged = false; 178 unsigned ID = ~0u; 179 TempMDNode Placeholder; 180 }; 181 182 /// A graph of uniqued nodes. 183 struct UniquedGraph { 184 SmallDenseMap<const Metadata *, Data, 32> Info; // Node properties. 185 SmallVector<MDNode *, 16> POT; // Post-order traversal. 186 187 /// Propagate changed operands through the post-order traversal. 188 /// 189 /// Iteratively update \a Data::HasChanged for each node based on \a 190 /// Data::HasChanged of its operands, until fixed point. 191 void propagateChanges(); 192 193 /// Get a forward reference to a node to use as an operand. 194 Metadata &getFwdReference(MDNode &Op); 195 }; 196 197 /// Worklist of distinct nodes whose operands need to be remapped. 198 SmallVector<MDNode *, 16> DistinctWorklist; 199 200 // Storage for a UniquedGraph. 201 SmallDenseMap<const Metadata *, Data, 32> InfoStorage; 202 SmallVector<MDNode *, 16> POTStorage; 203 204 public: 205 MDNodeMapper(Mapper &M) : M(M) {} 206 207 /// Map a metadata node (and its transitive operands). 208 /// 209 /// Map all the (unmapped) nodes in the subgraph under \c N. The iterative 210 /// algorithm handles distinct nodes and uniqued node subgraphs using 211 /// different strategies. 212 /// 213 /// Distinct nodes are immediately mapped and added to \a DistinctWorklist 214 /// using \a mapDistinctNode(). Their mapping can always be computed 215 /// immediately without visiting operands, even if their operands change. 216 /// 217 /// The mapping for uniqued nodes depends on whether their operands change. 218 /// \a mapTopLevelUniquedNode() traverses the transitive uniqued subgraph of 219 /// a node to calculate uniqued node mappings in bulk. Distinct leafs are 220 /// added to \a DistinctWorklist with \a mapDistinctNode(). 221 /// 222 /// After mapping \c N itself, this function remaps the operands of the 223 /// distinct nodes in \a DistinctWorklist until the entire subgraph under \c 224 /// N has been mapped. 225 Metadata *map(const MDNode &N); 226 227 private: 228 /// Map a top-level uniqued node and the uniqued subgraph underneath it. 229 /// 230 /// This builds up a post-order traversal of the (unmapped) uniqued subgraph 231 /// underneath \c FirstN and calculates the nodes' mapping. Each node uses 232 /// the identity mapping (\a Mapper::mapToSelf()) as long as all of its 233 /// operands uses the identity mapping. 234 /// 235 /// The algorithm works as follows: 236 /// 237 /// 1. \a createPOT(): traverse the uniqued subgraph under \c FirstN and 238 /// save the post-order traversal in the given \a UniquedGraph, tracking 239 /// nodes' operands change. 240 /// 241 /// 2. \a UniquedGraph::propagateChanges(): propagate changed operands 242 /// through the \a UniquedGraph until fixed point, following the rule 243 /// that if a node changes, any node that references must also change. 244 /// 245 /// 3. \a mapNodesInPOT(): map the uniqued nodes, creating new uniqued nodes 246 /// (referencing new operands) where necessary. 247 Metadata *mapTopLevelUniquedNode(const MDNode &FirstN); 248 249 /// Try to map the operand of an \a MDNode. 250 /// 251 /// If \c Op is already mapped, return the mapping. If it's not an \a 252 /// MDNode, compute and return the mapping. If it's a distinct \a MDNode, 253 /// return the result of \a mapDistinctNode(). 254 /// 255 /// \return None if \c Op is an unmapped uniqued \a MDNode. 256 /// \post getMappedOp(Op) only returns None if this returns None. 257 Optional<Metadata *> tryToMapOperand(const Metadata *Op); 258 259 /// Map a distinct node. 260 /// 261 /// Return the mapping for the distinct node \c N, saving the result in \a 262 /// DistinctWorklist for later remapping. 263 /// 264 /// \pre \c N is not yet mapped. 265 /// \pre \c N.isDistinct(). 266 MDNode *mapDistinctNode(const MDNode &N); 267 268 /// Get a previously mapped node. 269 Optional<Metadata *> getMappedOp(const Metadata *Op) const; 270 271 /// Create a post-order traversal of an unmapped uniqued node subgraph. 272 /// 273 /// This traverses the metadata graph deeply enough to map \c FirstN. It 274 /// uses \a tryToMapOperand() (via \a Mapper::mapSimplifiedNode()), so any 275 /// metadata that has already been mapped will not be part of the POT. 276 /// 277 /// Each node that has a changed operand from outside the graph (e.g., a 278 /// distinct node, an already-mapped uniqued node, or \a ConstantAsMetadata) 279 /// is marked with \a Data::HasChanged. 280 /// 281 /// \return \c true if any nodes in \c G have \a Data::HasChanged. 282 /// \post \c G.POT is a post-order traversal ending with \c FirstN. 283 /// \post \a Data::hasChanged in \c G.Info indicates whether any node needs 284 /// to change because of operands outside the graph. 285 bool createPOT(UniquedGraph &G, const MDNode &FirstN); 286 287 /// Visit the operands of a uniqued node in the POT. 288 /// 289 /// Visit the operands in the range from \c I to \c E, returning the first 290 /// uniqued node we find that isn't yet in \c G. \c I is always advanced to 291 /// where to continue the loop through the operands. 292 /// 293 /// This sets \c HasChanged if any of the visited operands change. 294 MDNode *visitOperands(UniquedGraph &G, MDNode::op_iterator &I, 295 MDNode::op_iterator E, bool &HasChanged); 296 297 /// Map all the nodes in the given uniqued graph. 298 /// 299 /// This visits all the nodes in \c G in post-order, using the identity 300 /// mapping or creating a new node depending on \a Data::HasChanged. 301 /// 302 /// \pre \a getMappedOp() returns None for nodes in \c G, but not for any of 303 /// their operands outside of \c G. 304 /// \pre \a Data::HasChanged is true for a node in \c G iff any of its 305 /// operands have changed. 306 /// \post \a getMappedOp() returns the mapped node for every node in \c G. 307 void mapNodesInPOT(UniquedGraph &G); 308 309 /// Remap a node's operands using the given functor. 310 /// 311 /// Iterate through the operands of \c N and update them in place using \c 312 /// mapOperand. 313 /// 314 /// \pre N.isDistinct() or N.isTemporary(). 315 template <class OperandMapper> 316 void remapOperands(MDNode &N, OperandMapper mapOperand); 317 }; 318 319 } // end namespace 320 321 Value *Mapper::mapValue(const Value *V) { 322 ValueToValueMapTy::iterator I = getVM().find(V); 323 324 // If the value already exists in the map, use it. 325 if (I != getVM().end()) { 326 assert(I->second && "Unexpected null mapping"); 327 return I->second; 328 } 329 330 // If we have a materializer and it can materialize a value, use that. 331 if (auto *Materializer = getMaterializer()) { 332 if (Value *NewV = Materializer->materialize(const_cast<Value *>(V))) { 333 getVM()[V] = NewV; 334 return NewV; 335 } 336 } 337 338 // Global values do not need to be seeded into the VM if they 339 // are using the identity mapping. 340 if (isa<GlobalValue>(V)) { 341 if (Flags & RF_NullMapMissingGlobalValues) 342 return nullptr; 343 return getVM()[V] = const_cast<Value *>(V); 344 } 345 346 if (const InlineAsm *IA = dyn_cast<InlineAsm>(V)) { 347 // Inline asm may need *type* remapping. 348 FunctionType *NewTy = IA->getFunctionType(); 349 if (TypeMapper) { 350 NewTy = cast<FunctionType>(TypeMapper->remapType(NewTy)); 351 352 if (NewTy != IA->getFunctionType()) 353 V = InlineAsm::get(NewTy, IA->getAsmString(), IA->getConstraintString(), 354 IA->hasSideEffects(), IA->isAlignStack()); 355 } 356 357 return getVM()[V] = const_cast<Value *>(V); 358 } 359 360 if (const auto *MDV = dyn_cast<MetadataAsValue>(V)) { 361 const Metadata *MD = MDV->getMetadata(); 362 363 if (auto *LAM = dyn_cast<LocalAsMetadata>(MD)) { 364 // Look through to grab the local value. 365 if (Value *LV = mapValue(LAM->getValue())) { 366 if (V == LAM->getValue()) 367 return const_cast<Value *>(V); 368 return MetadataAsValue::get(V->getContext(), ValueAsMetadata::get(LV)); 369 } 370 371 // FIXME: always return nullptr once Verifier::verifyDominatesUse() 372 // ensures metadata operands only reference defined SSA values. 373 return (Flags & RF_IgnoreMissingLocals) 374 ? nullptr 375 : MetadataAsValue::get(V->getContext(), 376 MDTuple::get(V->getContext(), None)); 377 } 378 379 // If this is a module-level metadata and we know that nothing at the module 380 // level is changing, then use an identity mapping. 381 if (Flags & RF_NoModuleLevelChanges) 382 return getVM()[V] = const_cast<Value *>(V); 383 384 // Map the metadata and turn it into a value. 385 auto *MappedMD = mapMetadata(MD); 386 if (MD == MappedMD) 387 return getVM()[V] = const_cast<Value *>(V); 388 return getVM()[V] = MetadataAsValue::get(V->getContext(), MappedMD); 389 } 390 391 // Okay, this either must be a constant (which may or may not be mappable) or 392 // is something that is not in the mapping table. 393 Constant *C = const_cast<Constant*>(dyn_cast<Constant>(V)); 394 if (!C) 395 return nullptr; 396 397 if (BlockAddress *BA = dyn_cast<BlockAddress>(C)) 398 return mapBlockAddress(*BA); 399 400 auto mapValueOrNull = [this](Value *V) { 401 auto Mapped = mapValue(V); 402 assert((Mapped || (Flags & RF_NullMapMissingGlobalValues)) && 403 "Unexpected null mapping for constant operand without " 404 "NullMapMissingGlobalValues flag"); 405 return Mapped; 406 }; 407 408 // Otherwise, we have some other constant to remap. Start by checking to see 409 // if all operands have an identity remapping. 410 unsigned OpNo = 0, NumOperands = C->getNumOperands(); 411 Value *Mapped = nullptr; 412 for (; OpNo != NumOperands; ++OpNo) { 413 Value *Op = C->getOperand(OpNo); 414 Mapped = mapValueOrNull(Op); 415 if (!Mapped) 416 return nullptr; 417 if (Mapped != Op) 418 break; 419 } 420 421 // See if the type mapper wants to remap the type as well. 422 Type *NewTy = C->getType(); 423 if (TypeMapper) 424 NewTy = TypeMapper->remapType(NewTy); 425 426 // If the result type and all operands match up, then just insert an identity 427 // mapping. 428 if (OpNo == NumOperands && NewTy == C->getType()) 429 return getVM()[V] = C; 430 431 // Okay, we need to create a new constant. We've already processed some or 432 // all of the operands, set them all up now. 433 SmallVector<Constant*, 8> Ops; 434 Ops.reserve(NumOperands); 435 for (unsigned j = 0; j != OpNo; ++j) 436 Ops.push_back(cast<Constant>(C->getOperand(j))); 437 438 // If one of the operands mismatch, push it and the other mapped operands. 439 if (OpNo != NumOperands) { 440 Ops.push_back(cast<Constant>(Mapped)); 441 442 // Map the rest of the operands that aren't processed yet. 443 for (++OpNo; OpNo != NumOperands; ++OpNo) { 444 Mapped = mapValueOrNull(C->getOperand(OpNo)); 445 if (!Mapped) 446 return nullptr; 447 Ops.push_back(cast<Constant>(Mapped)); 448 } 449 } 450 Type *NewSrcTy = nullptr; 451 if (TypeMapper) 452 if (auto *GEPO = dyn_cast<GEPOperator>(C)) 453 NewSrcTy = TypeMapper->remapType(GEPO->getSourceElementType()); 454 455 if (ConstantExpr *CE = dyn_cast<ConstantExpr>(C)) 456 return getVM()[V] = CE->getWithOperands(Ops, NewTy, false, NewSrcTy); 457 if (isa<ConstantArray>(C)) 458 return getVM()[V] = ConstantArray::get(cast<ArrayType>(NewTy), Ops); 459 if (isa<ConstantStruct>(C)) 460 return getVM()[V] = ConstantStruct::get(cast<StructType>(NewTy), Ops); 461 if (isa<ConstantVector>(C)) 462 return getVM()[V] = ConstantVector::get(Ops); 463 // If this is a no-operand constant, it must be because the type was remapped. 464 if (isa<UndefValue>(C)) 465 return getVM()[V] = UndefValue::get(NewTy); 466 if (isa<ConstantAggregateZero>(C)) 467 return getVM()[V] = ConstantAggregateZero::get(NewTy); 468 assert(isa<ConstantPointerNull>(C)); 469 return getVM()[V] = ConstantPointerNull::get(cast<PointerType>(NewTy)); 470 } 471 472 Value *Mapper::mapBlockAddress(const BlockAddress &BA) { 473 Function *F = cast<Function>(mapValue(BA.getFunction())); 474 475 // F may not have materialized its initializer. In that case, create a 476 // dummy basic block for now, and replace it once we've materialized all 477 // the initializers. 478 BasicBlock *BB; 479 if (F->empty()) { 480 DelayedBBs.push_back(DelayedBasicBlock(BA)); 481 BB = DelayedBBs.back().TempBB.get(); 482 } else { 483 BB = cast_or_null<BasicBlock>(mapValue(BA.getBasicBlock())); 484 } 485 486 return getVM()[&BA] = BlockAddress::get(F, BB ? BB : BA.getBasicBlock()); 487 } 488 489 Metadata *Mapper::mapToMetadata(const Metadata *Key, Metadata *Val) { 490 getVM().MD()[Key].reset(Val); 491 return Val; 492 } 493 494 Metadata *Mapper::mapToSelf(const Metadata *MD) { 495 return mapToMetadata(MD, const_cast<Metadata *>(MD)); 496 } 497 498 Optional<Metadata *> MDNodeMapper::tryToMapOperand(const Metadata *Op) { 499 if (!Op) 500 return nullptr; 501 502 if (Optional<Metadata *> MappedOp = M.mapSimpleMetadata(Op)) { 503 #ifndef NDEBUG 504 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op)) 505 assert((!*MappedOp || M.getVM().count(CMD->getValue()) || 506 M.getVM().getMappedMD(Op)) && 507 "Expected Value to be memoized"); 508 else 509 assert((isa<MDString>(Op) || M.getVM().getMappedMD(Op)) && 510 "Expected result to be memoized"); 511 #endif 512 return *MappedOp; 513 } 514 515 const MDNode &N = *cast<MDNode>(Op); 516 if (N.isDistinct()) 517 return mapDistinctNode(N); 518 return None; 519 } 520 521 MDNode *MDNodeMapper::mapDistinctNode(const MDNode &N) { 522 assert(N.isDistinct() && "Expected a distinct node"); 523 assert(!M.getVM().getMappedMD(&N) && "Expected an unmapped node"); 524 DistinctWorklist.push_back(cast<MDNode>( 525 (M.Flags & RF_MoveDistinctMDs) 526 ? M.mapToSelf(&N) 527 : M.mapToMetadata(&N, MDNode::replaceWithDistinct(N.clone())))); 528 return DistinctWorklist.back(); 529 } 530 531 static ConstantAsMetadata *wrapConstantAsMetadata(const ConstantAsMetadata &CMD, 532 Value *MappedV) { 533 if (CMD.getValue() == MappedV) 534 return const_cast<ConstantAsMetadata *>(&CMD); 535 return MappedV ? ConstantAsMetadata::getConstant(MappedV) : nullptr; 536 } 537 538 Optional<Metadata *> MDNodeMapper::getMappedOp(const Metadata *Op) const { 539 if (!Op) 540 return nullptr; 541 542 if (Optional<Metadata *> MappedOp = M.getVM().getMappedMD(Op)) 543 return *MappedOp; 544 545 if (isa<MDString>(Op)) 546 return const_cast<Metadata *>(Op); 547 548 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op)) 549 return wrapConstantAsMetadata(*CMD, M.getVM().lookup(CMD->getValue())); 550 551 return None; 552 } 553 554 Metadata &MDNodeMapper::UniquedGraph::getFwdReference(MDNode &Op) { 555 auto Where = Info.find(&Op); 556 assert(Where != Info.end() && "Expected a valid reference"); 557 558 auto &OpD = Where->second; 559 if (!OpD.HasChanged) 560 return Op; 561 562 // Lazily construct a temporary node. 563 if (!OpD.Placeholder) 564 OpD.Placeholder = Op.clone(); 565 566 return *OpD.Placeholder; 567 } 568 569 template <class OperandMapper> 570 void MDNodeMapper::remapOperands(MDNode &N, OperandMapper mapOperand) { 571 assert(!N.isUniqued() && "Expected distinct or temporary nodes"); 572 for (unsigned I = 0, E = N.getNumOperands(); I != E; ++I) { 573 Metadata *Old = N.getOperand(I); 574 Metadata *New = mapOperand(Old); 575 576 if (Old != New) 577 N.replaceOperandWith(I, New); 578 } 579 } 580 581 namespace { 582 /// An entry in the worklist for the post-order traversal. 583 struct POTWorklistEntry { 584 MDNode *N; ///< Current node. 585 MDNode::op_iterator Op; ///< Current operand of \c N. 586 587 /// Keep a flag of whether operands have changed in the worklist to avoid 588 /// hitting the map in \a UniquedGraph. 589 bool HasChanged = false; 590 591 POTWorklistEntry(MDNode &N) : N(&N), Op(N.op_begin()) {} 592 }; 593 } // end namespace 594 595 bool MDNodeMapper::createPOT(UniquedGraph &G, const MDNode &FirstN) { 596 assert(G.Info.empty() && "Expected a fresh traversal"); 597 assert(FirstN.isUniqued() && "Expected uniqued node in POT"); 598 599 // Construct a post-order traversal of the uniqued subgraph under FirstN. 600 bool AnyChanges = false; 601 SmallVector<POTWorklistEntry, 16> Worklist; 602 Worklist.push_back(POTWorklistEntry(const_cast<MDNode &>(FirstN))); 603 (void)G.Info[&FirstN]; 604 while (!Worklist.empty()) { 605 // Start or continue the traversal through the this node's operands. 606 auto &WE = Worklist.back(); 607 if (MDNode *N = visitOperands(G, WE.Op, WE.N->op_end(), WE.HasChanged)) { 608 // Push a new node to traverse first. 609 Worklist.push_back(POTWorklistEntry(*N)); 610 continue; 611 } 612 613 // Push the node onto the POT. 614 assert(WE.N->isUniqued() && "Expected only uniqued nodes"); 615 assert(WE.Op == WE.N->op_end() && "Expected to visit all operands"); 616 auto &D = G.Info[WE.N]; 617 AnyChanges |= D.HasChanged = WE.HasChanged; 618 D.ID = G.POT.size(); 619 G.POT.push_back(WE.N); 620 621 // Pop the node off the worklist. 622 Worklist.pop_back(); 623 } 624 return AnyChanges; 625 } 626 627 MDNode *MDNodeMapper::visitOperands(UniquedGraph &G, MDNode::op_iterator &I, 628 MDNode::op_iterator E, bool &HasChanged) { 629 while (I != E) { 630 Metadata *Op = *I++; // Increment even on early return. 631 if (Optional<Metadata *> MappedOp = tryToMapOperand(Op)) { 632 // Check if the operand changes. 633 HasChanged |= Op != *MappedOp; 634 continue; 635 } 636 637 // A uniqued metadata node. 638 MDNode &OpN = *cast<MDNode>(Op); 639 assert(OpN.isUniqued() && 640 "Only uniqued operands cannot be mapped immediately"); 641 if (G.Info.insert(std::make_pair(&OpN, Data())).second) 642 return &OpN; // This is a new one. Return it. 643 } 644 return nullptr; 645 } 646 647 void MDNodeMapper::UniquedGraph::propagateChanges() { 648 bool AnyChanges; 649 do { 650 AnyChanges = false; 651 for (MDNode *N : POT) { 652 auto &D = Info[N]; 653 if (D.HasChanged) 654 continue; 655 656 if (none_of(N->operands(), [&](const Metadata *Op) { 657 auto Where = Info.find(Op); 658 return Where != Info.end() && Where->second.HasChanged; 659 })) 660 continue; 661 662 AnyChanges = D.HasChanged = true; 663 } 664 } while (AnyChanges); 665 } 666 667 void MDNodeMapper::mapNodesInPOT(UniquedGraph &G) { 668 // Construct uniqued nodes, building forward references as necessary. 669 SmallVector<MDNode *, 16> CyclicNodes; 670 for (auto *N : G.POT) { 671 auto &D = G.Info[N]; 672 if (!D.HasChanged) { 673 // The node hasn't changed. 674 M.mapToSelf(N); 675 continue; 676 } 677 678 // Remember whether this node had a placeholder. 679 bool HadPlaceholder(D.Placeholder); 680 681 // Clone the uniqued node and remap the operands. 682 TempMDNode ClonedN = D.Placeholder ? std::move(D.Placeholder) : N->clone(); 683 remapOperands(*ClonedN, [this, &D, &G](Metadata *Old) { 684 if (Optional<Metadata *> MappedOp = getMappedOp(Old)) 685 return *MappedOp; 686 (void)D; 687 assert(G.Info[Old].ID > D.ID && "Expected a forward reference"); 688 return &G.getFwdReference(*cast<MDNode>(Old)); 689 }); 690 691 auto *NewN = MDNode::replaceWithUniqued(std::move(ClonedN)); 692 M.mapToMetadata(N, NewN); 693 694 // Nodes that were referenced out of order in the POT are involved in a 695 // uniquing cycle. 696 if (HadPlaceholder) 697 CyclicNodes.push_back(NewN); 698 } 699 700 // Resolve cycles. 701 for (auto *N : CyclicNodes) 702 if (!N->isResolved()) 703 N->resolveCycles(); 704 } 705 706 Metadata *MDNodeMapper::map(const MDNode &N) { 707 assert(DistinctWorklist.empty() && "MDNodeMapper::map is not recursive"); 708 assert(!(M.Flags & RF_NoModuleLevelChanges) && 709 "MDNodeMapper::map assumes module-level changes"); 710 711 // Require resolved nodes whenever metadata might be remapped. 712 assert(N.isResolved() && "Unexpected unresolved node"); 713 714 Metadata *MappedN = 715 N.isUniqued() ? mapTopLevelUniquedNode(N) : mapDistinctNode(N); 716 while (!DistinctWorklist.empty()) 717 remapOperands(*DistinctWorklist.pop_back_val(), [this](Metadata *Old) { 718 if (Optional<Metadata *> MappedOp = tryToMapOperand(Old)) 719 return *MappedOp; 720 return mapTopLevelUniquedNode(*cast<MDNode>(Old)); 721 }); 722 return MappedN; 723 } 724 725 Metadata *MDNodeMapper::mapTopLevelUniquedNode(const MDNode &FirstN) { 726 assert(FirstN.isUniqued() && "Expected uniqued node"); 727 728 // Create a post-order traversal of uniqued nodes under FirstN. 729 UniquedGraph G; 730 if (!createPOT(G, FirstN)) { 731 // Return early if no nodes have changed. 732 for (const MDNode *N : G.POT) 733 M.mapToSelf(N); 734 return &const_cast<MDNode &>(FirstN); 735 } 736 737 // Update graph with all nodes that have changed. 738 G.propagateChanges(); 739 740 // Map all the nodes in the graph. 741 mapNodesInPOT(G); 742 743 // Return the original node, remapped. 744 return *getMappedOp(&FirstN); 745 } 746 747 namespace { 748 749 struct MapMetadataDisabler { 750 ValueToValueMapTy &VM; 751 752 MapMetadataDisabler(ValueToValueMapTy &VM) : VM(VM) { 753 VM.disableMapMetadata(); 754 } 755 ~MapMetadataDisabler() { VM.enableMapMetadata(); } 756 }; 757 758 } // end namespace 759 760 Optional<Metadata *> Mapper::mapSimpleMetadata(const Metadata *MD) { 761 // If the value already exists in the map, use it. 762 if (Optional<Metadata *> NewMD = getVM().getMappedMD(MD)) 763 return *NewMD; 764 765 if (isa<MDString>(MD)) 766 return const_cast<Metadata *>(MD); 767 768 // This is a module-level metadata. If nothing at the module level is 769 // changing, use an identity mapping. 770 if ((Flags & RF_NoModuleLevelChanges)) 771 return const_cast<Metadata *>(MD); 772 773 if (auto *CMD = dyn_cast<ConstantAsMetadata>(MD)) { 774 // Disallow recursion into metadata mapping through mapValue. 775 MapMetadataDisabler MMD(getVM()); 776 777 // Don't memoize ConstantAsMetadata. Instead of lasting until the 778 // LLVMContext is destroyed, they can be deleted when the GlobalValue they 779 // reference is destructed. These aren't super common, so the extra 780 // indirection isn't that expensive. 781 return wrapConstantAsMetadata(*CMD, mapValue(CMD->getValue())); 782 } 783 784 assert(isa<MDNode>(MD) && "Expected a metadata node"); 785 786 return None; 787 } 788 789 Metadata *Mapper::mapMetadata(const Metadata *MD) { 790 assert(MD && "Expected valid metadata"); 791 assert(!isa<LocalAsMetadata>(MD) && "Unexpected local metadata"); 792 793 if (Optional<Metadata *> NewMD = mapSimpleMetadata(MD)) 794 return *NewMD; 795 796 return MDNodeMapper(*this).map(*cast<MDNode>(MD)); 797 } 798 799 void Mapper::flush() { 800 // Flush out the worklist of global values. 801 while (!Worklist.empty()) { 802 WorklistEntry E = Worklist.pop_back_val(); 803 CurrentMCID = E.MCID; 804 switch (E.Kind) { 805 case WorklistEntry::MapGlobalInit: 806 E.Data.GVInit.GV->setInitializer(mapConstant(E.Data.GVInit.Init)); 807 remapGlobalObjectMetadata(*E.Data.GVInit.GV); 808 break; 809 case WorklistEntry::MapAppendingVar: { 810 unsigned PrefixSize = AppendingInits.size() - E.AppendingGVNumNewMembers; 811 mapAppendingVariable(*E.Data.AppendingGV.GV, 812 E.Data.AppendingGV.InitPrefix, 813 E.AppendingGVIsOldCtorDtor, 814 makeArrayRef(AppendingInits).slice(PrefixSize)); 815 AppendingInits.resize(PrefixSize); 816 break; 817 } 818 case WorklistEntry::MapGlobalAliasee: 819 E.Data.GlobalAliasee.GA->setAliasee( 820 mapConstant(E.Data.GlobalAliasee.Aliasee)); 821 break; 822 case WorklistEntry::RemapFunction: 823 remapFunction(*E.Data.RemapF); 824 break; 825 } 826 } 827 CurrentMCID = 0; 828 829 // Finish logic for block addresses now that all global values have been 830 // handled. 831 while (!DelayedBBs.empty()) { 832 DelayedBasicBlock DBB = DelayedBBs.pop_back_val(); 833 BasicBlock *BB = cast_or_null<BasicBlock>(mapValue(DBB.OldBB)); 834 DBB.TempBB->replaceAllUsesWith(BB ? BB : DBB.OldBB); 835 } 836 } 837 838 void Mapper::remapInstruction(Instruction *I) { 839 // Remap operands. 840 for (Use &Op : I->operands()) { 841 Value *V = mapValue(Op); 842 // If we aren't ignoring missing entries, assert that something happened. 843 if (V) 844 Op = V; 845 else 846 assert((Flags & RF_IgnoreMissingLocals) && 847 "Referenced value not in value map!"); 848 } 849 850 // Remap phi nodes' incoming blocks. 851 if (PHINode *PN = dyn_cast<PHINode>(I)) { 852 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) { 853 Value *V = mapValue(PN->getIncomingBlock(i)); 854 // If we aren't ignoring missing entries, assert that something happened. 855 if (V) 856 PN->setIncomingBlock(i, cast<BasicBlock>(V)); 857 else 858 assert((Flags & RF_IgnoreMissingLocals) && 859 "Referenced block not in value map!"); 860 } 861 } 862 863 // Remap attached metadata. 864 SmallVector<std::pair<unsigned, MDNode *>, 4> MDs; 865 I->getAllMetadata(MDs); 866 for (const auto &MI : MDs) { 867 MDNode *Old = MI.second; 868 MDNode *New = cast_or_null<MDNode>(mapMetadata(Old)); 869 if (New != Old) 870 I->setMetadata(MI.first, New); 871 } 872 873 if (!TypeMapper) 874 return; 875 876 // If the instruction's type is being remapped, do so now. 877 if (auto CS = CallSite(I)) { 878 SmallVector<Type *, 3> Tys; 879 FunctionType *FTy = CS.getFunctionType(); 880 Tys.reserve(FTy->getNumParams()); 881 for (Type *Ty : FTy->params()) 882 Tys.push_back(TypeMapper->remapType(Ty)); 883 CS.mutateFunctionType(FunctionType::get( 884 TypeMapper->remapType(I->getType()), Tys, FTy->isVarArg())); 885 return; 886 } 887 if (auto *AI = dyn_cast<AllocaInst>(I)) 888 AI->setAllocatedType(TypeMapper->remapType(AI->getAllocatedType())); 889 if (auto *GEP = dyn_cast<GetElementPtrInst>(I)) { 890 GEP->setSourceElementType( 891 TypeMapper->remapType(GEP->getSourceElementType())); 892 GEP->setResultElementType( 893 TypeMapper->remapType(GEP->getResultElementType())); 894 } 895 I->mutateType(TypeMapper->remapType(I->getType())); 896 } 897 898 void Mapper::remapGlobalObjectMetadata(GlobalObject &GO) { 899 SmallVector<std::pair<unsigned, MDNode *>, 8> MDs; 900 GO.getAllMetadata(MDs); 901 GO.clearMetadata(); 902 for (const auto &I : MDs) 903 GO.addMetadata(I.first, *cast<MDNode>(mapMetadata(I.second))); 904 } 905 906 void Mapper::remapFunction(Function &F) { 907 // Remap the operands. 908 for (Use &Op : F.operands()) 909 if (Op) 910 Op = mapValue(Op); 911 912 // Remap the metadata attachments. 913 remapGlobalObjectMetadata(F); 914 915 // Remap the argument types. 916 if (TypeMapper) 917 for (Argument &A : F.args()) 918 A.mutateType(TypeMapper->remapType(A.getType())); 919 920 // Remap the instructions. 921 for (BasicBlock &BB : F) 922 for (Instruction &I : BB) 923 remapInstruction(&I); 924 } 925 926 void Mapper::mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 927 bool IsOldCtorDtor, 928 ArrayRef<Constant *> NewMembers) { 929 SmallVector<Constant *, 16> Elements; 930 if (InitPrefix) { 931 unsigned NumElements = 932 cast<ArrayType>(InitPrefix->getType())->getNumElements(); 933 for (unsigned I = 0; I != NumElements; ++I) 934 Elements.push_back(InitPrefix->getAggregateElement(I)); 935 } 936 937 PointerType *VoidPtrTy; 938 Type *EltTy; 939 if (IsOldCtorDtor) { 940 // FIXME: This upgrade is done during linking to support the C API. See 941 // also IRLinker::linkAppendingVarProto() in IRMover.cpp. 942 VoidPtrTy = Type::getInt8Ty(GV.getContext())->getPointerTo(); 943 auto &ST = *cast<StructType>(NewMembers.front()->getType()); 944 Type *Tys[3] = {ST.getElementType(0), ST.getElementType(1), VoidPtrTy}; 945 EltTy = StructType::get(GV.getContext(), Tys, false); 946 } 947 948 for (auto *V : NewMembers) { 949 Constant *NewV; 950 if (IsOldCtorDtor) { 951 auto *S = cast<ConstantStruct>(V); 952 auto *E1 = cast<Constant>(mapValue(S->getOperand(0))); 953 auto *E2 = cast<Constant>(mapValue(S->getOperand(1))); 954 Constant *Null = Constant::getNullValue(VoidPtrTy); 955 NewV = ConstantStruct::get(cast<StructType>(EltTy), E1, E2, Null); 956 } else { 957 NewV = cast_or_null<Constant>(mapValue(V)); 958 } 959 Elements.push_back(NewV); 960 } 961 962 GV.setInitializer(ConstantArray::get( 963 cast<ArrayType>(GV.getType()->getElementType()), Elements)); 964 } 965 966 void Mapper::scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init, 967 unsigned MCID) { 968 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule"); 969 assert(MCID < MCs.size() && "Invalid mapping context"); 970 971 WorklistEntry WE; 972 WE.Kind = WorklistEntry::MapGlobalInit; 973 WE.MCID = MCID; 974 WE.Data.GVInit.GV = &GV; 975 WE.Data.GVInit.Init = &Init; 976 Worklist.push_back(WE); 977 } 978 979 void Mapper::scheduleMapAppendingVariable(GlobalVariable &GV, 980 Constant *InitPrefix, 981 bool IsOldCtorDtor, 982 ArrayRef<Constant *> NewMembers, 983 unsigned MCID) { 984 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule"); 985 assert(MCID < MCs.size() && "Invalid mapping context"); 986 987 WorklistEntry WE; 988 WE.Kind = WorklistEntry::MapAppendingVar; 989 WE.MCID = MCID; 990 WE.Data.AppendingGV.GV = &GV; 991 WE.Data.AppendingGV.InitPrefix = InitPrefix; 992 WE.AppendingGVIsOldCtorDtor = IsOldCtorDtor; 993 WE.AppendingGVNumNewMembers = NewMembers.size(); 994 Worklist.push_back(WE); 995 AppendingInits.append(NewMembers.begin(), NewMembers.end()); 996 } 997 998 void Mapper::scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 999 unsigned MCID) { 1000 assert(AlreadyScheduled.insert(&GA).second && "Should not reschedule"); 1001 assert(MCID < MCs.size() && "Invalid mapping context"); 1002 1003 WorklistEntry WE; 1004 WE.Kind = WorklistEntry::MapGlobalAliasee; 1005 WE.MCID = MCID; 1006 WE.Data.GlobalAliasee.GA = &GA; 1007 WE.Data.GlobalAliasee.Aliasee = &Aliasee; 1008 Worklist.push_back(WE); 1009 } 1010 1011 void Mapper::scheduleRemapFunction(Function &F, unsigned MCID) { 1012 assert(AlreadyScheduled.insert(&F).second && "Should not reschedule"); 1013 assert(MCID < MCs.size() && "Invalid mapping context"); 1014 1015 WorklistEntry WE; 1016 WE.Kind = WorklistEntry::RemapFunction; 1017 WE.MCID = MCID; 1018 WE.Data.RemapF = &F; 1019 Worklist.push_back(WE); 1020 } 1021 1022 void Mapper::addFlags(RemapFlags Flags) { 1023 assert(!hasWorkToDo() && "Expected to have flushed the worklist"); 1024 this->Flags = this->Flags | Flags; 1025 } 1026 1027 static Mapper *getAsMapper(void *pImpl) { 1028 return reinterpret_cast<Mapper *>(pImpl); 1029 } 1030 1031 namespace { 1032 1033 class FlushingMapper { 1034 Mapper &M; 1035 1036 public: 1037 explicit FlushingMapper(void *pImpl) : M(*getAsMapper(pImpl)) { 1038 assert(!M.hasWorkToDo() && "Expected to be flushed"); 1039 } 1040 ~FlushingMapper() { M.flush(); } 1041 Mapper *operator->() const { return &M; } 1042 }; 1043 1044 } // end namespace 1045 1046 ValueMapper::ValueMapper(ValueToValueMapTy &VM, RemapFlags Flags, 1047 ValueMapTypeRemapper *TypeMapper, 1048 ValueMaterializer *Materializer) 1049 : pImpl(new Mapper(VM, Flags, TypeMapper, Materializer)) {} 1050 1051 ValueMapper::~ValueMapper() { delete getAsMapper(pImpl); } 1052 1053 unsigned 1054 ValueMapper::registerAlternateMappingContext(ValueToValueMapTy &VM, 1055 ValueMaterializer *Materializer) { 1056 return getAsMapper(pImpl)->registerAlternateMappingContext(VM, Materializer); 1057 } 1058 1059 void ValueMapper::addFlags(RemapFlags Flags) { 1060 FlushingMapper(pImpl)->addFlags(Flags); 1061 } 1062 1063 Value *ValueMapper::mapValue(const Value &V) { 1064 return FlushingMapper(pImpl)->mapValue(&V); 1065 } 1066 1067 Constant *ValueMapper::mapConstant(const Constant &C) { 1068 return cast_or_null<Constant>(mapValue(C)); 1069 } 1070 1071 Metadata *ValueMapper::mapMetadata(const Metadata &MD) { 1072 return FlushingMapper(pImpl)->mapMetadata(&MD); 1073 } 1074 1075 MDNode *ValueMapper::mapMDNode(const MDNode &N) { 1076 return cast_or_null<MDNode>(mapMetadata(N)); 1077 } 1078 1079 void ValueMapper::remapInstruction(Instruction &I) { 1080 FlushingMapper(pImpl)->remapInstruction(&I); 1081 } 1082 1083 void ValueMapper::remapFunction(Function &F) { 1084 FlushingMapper(pImpl)->remapFunction(F); 1085 } 1086 1087 void ValueMapper::scheduleMapGlobalInitializer(GlobalVariable &GV, 1088 Constant &Init, 1089 unsigned MCID) { 1090 getAsMapper(pImpl)->scheduleMapGlobalInitializer(GV, Init, MCID); 1091 } 1092 1093 void ValueMapper::scheduleMapAppendingVariable(GlobalVariable &GV, 1094 Constant *InitPrefix, 1095 bool IsOldCtorDtor, 1096 ArrayRef<Constant *> NewMembers, 1097 unsigned MCID) { 1098 getAsMapper(pImpl)->scheduleMapAppendingVariable( 1099 GV, InitPrefix, IsOldCtorDtor, NewMembers, MCID); 1100 } 1101 1102 void ValueMapper::scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 1103 unsigned MCID) { 1104 getAsMapper(pImpl)->scheduleMapGlobalAliasee(GA, Aliasee, MCID); 1105 } 1106 1107 void ValueMapper::scheduleRemapFunction(Function &F, unsigned MCID) { 1108 getAsMapper(pImpl)->scheduleRemapFunction(F, MCID); 1109 } 1110