1 //===- ValueMapper.cpp - Interface shared by lib/Transforms/Utils ---------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines the MapValue function, which is shared by various parts of
11 // the lib/Transforms/Utils library.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/Transforms/Utils/ValueMapper.h"
16 #include "llvm/ADT/DenseSet.h"
17 #include "llvm/IR/CallSite.h"
18 #include "llvm/IR/Constants.h"
19 #include "llvm/IR/DebugInfoMetadata.h"
20 #include "llvm/IR/Function.h"
21 #include "llvm/IR/GlobalAlias.h"
22 #include "llvm/IR/GlobalVariable.h"
23 #include "llvm/IR/InlineAsm.h"
24 #include "llvm/IR/Instructions.h"
25 #include "llvm/IR/Metadata.h"
26 #include "llvm/IR/Operator.h"
27 using namespace llvm;
28 
29 // Out of line method to get vtable etc for class.
30 void ValueMapTypeRemapper::anchor() {}
31 void ValueMaterializer::anchor() {}
32 void ValueMaterializer::materializeInitFor(GlobalValue *New, GlobalValue *Old) {
33 }
34 
35 namespace {
36 
37 /// A basic block used in a BlockAddress whose function body is not yet
38 /// materialized.
39 struct DelayedBasicBlock {
40   BasicBlock *OldBB;
41   std::unique_ptr<BasicBlock> TempBB;
42 
43   // Explicit move for MSVC.
44   DelayedBasicBlock(DelayedBasicBlock &&X)
45       : OldBB(std::move(X.OldBB)), TempBB(std::move(X.TempBB)) {}
46   DelayedBasicBlock &operator=(DelayedBasicBlock &&X) {
47     OldBB = std::move(X.OldBB);
48     TempBB = std::move(X.TempBB);
49     return *this;
50   }
51 
52   DelayedBasicBlock(const BlockAddress &Old)
53       : OldBB(Old.getBasicBlock()),
54         TempBB(BasicBlock::Create(Old.getContext())) {}
55 };
56 
57 struct WorklistEntry {
58   enum EntryKind {
59     MapGlobalInit,
60     MapAppendingVar,
61     MapGlobalAliasee,
62     RemapFunction
63   };
64   struct GVInitTy {
65     GlobalVariable *GV;
66     Constant *Init;
67   };
68   struct AppendingGVTy {
69     GlobalVariable *GV;
70     Constant *InitPrefix;
71   };
72   struct GlobalAliaseeTy {
73     GlobalAlias *GA;
74     Constant *Aliasee;
75   };
76 
77   unsigned Kind : 2;
78   unsigned MCID : 29;
79   unsigned AppendingGVIsOldCtorDtor : 1;
80   unsigned AppendingGVNumNewMembers;
81   union {
82     GVInitTy GVInit;
83     AppendingGVTy AppendingGV;
84     GlobalAliaseeTy GlobalAliasee;
85     Function *RemapF;
86   } Data;
87 };
88 
89 struct MappingContext {
90   ValueToValueMapTy *VM;
91   ValueMaterializer *Materializer = nullptr;
92 
93   /// Construct a MappingContext with a value map and materializer.
94   explicit MappingContext(ValueToValueMapTy &VM,
95                           ValueMaterializer *Materializer = nullptr)
96       : VM(&VM), Materializer(Materializer) {}
97 };
98 
99 class MDNodeMapper;
100 class Mapper {
101   friend class MDNodeMapper;
102 
103 #ifndef NDEBUG
104   DenseSet<GlobalValue *> AlreadyScheduled;
105 #endif
106 
107   RemapFlags Flags;
108   ValueMapTypeRemapper *TypeMapper;
109   unsigned CurrentMCID = 0;
110   SmallVector<MappingContext, 2> MCs;
111   SmallVector<WorklistEntry, 4> Worklist;
112   SmallVector<DelayedBasicBlock, 1> DelayedBBs;
113   SmallVector<Constant *, 16> AppendingInits;
114 
115 public:
116   Mapper(ValueToValueMapTy &VM, RemapFlags Flags,
117          ValueMapTypeRemapper *TypeMapper, ValueMaterializer *Materializer)
118       : Flags(Flags), TypeMapper(TypeMapper),
119         MCs(1, MappingContext(VM, Materializer)) {}
120 
121   /// ValueMapper should explicitly call \a flush() before destruction.
122   ~Mapper() { assert(!hasWorkToDo() && "Expected to be flushed"); }
123 
124   bool hasWorkToDo() const { return !Worklist.empty(); }
125 
126   unsigned
127   registerAlternateMappingContext(ValueToValueMapTy &VM,
128                                   ValueMaterializer *Materializer = nullptr) {
129     MCs.push_back(MappingContext(VM, Materializer));
130     return MCs.size() - 1;
131   }
132 
133   void addFlags(RemapFlags Flags);
134 
135   Value *mapValue(const Value *V);
136   void remapInstruction(Instruction *I);
137   void remapFunction(Function &F);
138 
139   Constant *mapConstant(const Constant *C) {
140     return cast_or_null<Constant>(mapValue(C));
141   }
142 
143   /// Map metadata.
144   ///
145   /// Find the mapping for MD.  Guarantees that the return will be resolved
146   /// (not an MDNode, or MDNode::isResolved() returns true).
147   Metadata *mapMetadata(const Metadata *MD);
148 
149   // Map LocalAsMetadata, which never gets memoized.
150   //
151   // If the referenced local is not mapped, the principled return is nullptr.
152   // However, optimization passes sometimes move metadata operands *before* the
153   // SSA values they reference.  To prevent crashes in \a RemapInstruction(),
154   // return "!{}" when RF_IgnoreMissingLocals is not set.
155   //
156   // \note Adding a mapping for LocalAsMetadata is unsupported.  Add a mapping
157   // to the value map for the SSA value in question instead.
158   //
159   // FIXME: Once we have a verifier check for forward references to SSA values
160   // through metadata operands, always return nullptr on unmapped locals.
161   Metadata *mapLocalAsMetadata(const LocalAsMetadata &LAM);
162 
163   void scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init,
164                                     unsigned MCID);
165   void scheduleMapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix,
166                                     bool IsOldCtorDtor,
167                                     ArrayRef<Constant *> NewMembers,
168                                     unsigned MCID);
169   void scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee,
170                                 unsigned MCID);
171   void scheduleRemapFunction(Function &F, unsigned MCID);
172 
173   void flush();
174 
175 private:
176   void mapGlobalInitializer(GlobalVariable &GV, Constant &Init);
177   void mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix,
178                             bool IsOldCtorDtor,
179                             ArrayRef<Constant *> NewMembers);
180   void mapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee);
181   void remapFunction(Function &F, ValueToValueMapTy &VM);
182 
183   ValueToValueMapTy &getVM() { return *MCs[CurrentMCID].VM; }
184   ValueMaterializer *getMaterializer() { return MCs[CurrentMCID].Materializer; }
185 
186   Value *mapBlockAddress(const BlockAddress &BA);
187 
188   /// Map metadata that doesn't require visiting operands.
189   Optional<Metadata *> mapSimpleMetadata(const Metadata *MD);
190 
191   Metadata *mapToMetadata(const Metadata *Key, Metadata *Val);
192   Metadata *mapToSelf(const Metadata *MD);
193 };
194 
195 class MDNodeMapper {
196   Mapper &M;
197 
198   /// Data about a node in \a UniquedGraph.
199   struct Data {
200     bool HasChanged = false;
201     unsigned ID = ~0u;
202     TempMDNode Placeholder;
203 
204     Data() {}
205     Data(Data &&X)
206         : HasChanged(std::move(X.HasChanged)), ID(std::move(X.ID)),
207           Placeholder(std::move(X.Placeholder)) {}
208     Data &operator=(Data &&X) {
209       HasChanged = std::move(X.HasChanged);
210       ID = std::move(X.ID);
211       Placeholder = std::move(X.Placeholder);
212       return *this;
213     }
214   };
215 
216   /// A graph of uniqued nodes.
217   struct UniquedGraph {
218     SmallDenseMap<const Metadata *, Data, 32> Info; // Node properties.
219     SmallVector<MDNode *, 16> POT;                  // Post-order traversal.
220 
221     /// Propagate changed operands through the post-order traversal.
222     ///
223     /// Iteratively update \a Data::HasChanged for each node based on \a
224     /// Data::HasChanged of its operands, until fixed point.
225     void propagateChanges();
226 
227     /// Get a forward reference to a node to use as an operand.
228     Metadata &getFwdReference(MDNode &Op);
229   };
230 
231   /// Worklist of distinct nodes whose operands need to be remapped.
232   SmallVector<MDNode *, 16> DistinctWorklist;
233 
234   // Storage for a UniquedGraph.
235   SmallDenseMap<const Metadata *, Data, 32> InfoStorage;
236   SmallVector<MDNode *, 16> POTStorage;
237 
238 public:
239   MDNodeMapper(Mapper &M) : M(M) {}
240 
241   /// Map a metadata node (and its transitive operands).
242   ///
243   /// Map all the (unmapped) nodes in the subgraph under \c N.  The iterative
244   /// algorithm handles distinct nodes and uniqued node subgraphs using
245   /// different strategies.
246   ///
247   /// Distinct nodes are immediately mapped and added to \a DistinctWorklist
248   /// using \a mapDistinctNode().  Their mapping can always be computed
249   /// immediately without visiting operands, even if their operands change.
250   ///
251   /// The mapping for uniqued nodes depends on whether their operands change.
252   /// \a mapTopLevelUniquedNode() traverses the transitive uniqued subgraph of
253   /// a node to calculate uniqued node mappings in bulk.  Distinct leafs are
254   /// added to \a DistinctWorklist with \a mapDistinctNode().
255   ///
256   /// After mapping \c N itself, this function remaps the operands of the
257   /// distinct nodes in \a DistinctWorklist until the entire subgraph under \c
258   /// N has been mapped.
259   Metadata *map(const MDNode &N);
260 
261 private:
262   /// Map a top-level uniqued node and the uniqued subgraph underneath it.
263   ///
264   /// This builds up a post-order traversal of the (unmapped) uniqued subgraph
265   /// underneath \c FirstN and calculates the nodes' mapping.  Each node uses
266   /// the identity mapping (\a Mapper::mapToSelf()) as long as all of its
267   /// operands uses the identity mapping.
268   ///
269   /// The algorithm works as follows:
270   ///
271   ///  1. \a createPOT(): traverse the uniqued subgraph under \c FirstN and
272   ///     save the post-order traversal in the given \a UniquedGraph, tracking
273   ///     nodes' operands change.
274   ///
275   ///  2. \a UniquedGraph::propagateChanges(): propagate changed operands
276   ///     through the \a UniquedGraph until fixed point, following the rule
277   ///     that if a node changes, any node that references must also change.
278   ///
279   ///  3. \a mapNodesInPOT(): map the uniqued nodes, creating new uniqued nodes
280   ///     (referencing new operands) where necessary.
281   Metadata *mapTopLevelUniquedNode(const MDNode &FirstN);
282 
283   /// Try to map the operand of an \a MDNode.
284   ///
285   /// If \c Op is already mapped, return the mapping.  If it's not an \a
286   /// MDNode, compute and return the mapping.  If it's a distinct \a MDNode,
287   /// return the result of \a mapDistinctNode().
288   ///
289   /// \return None if \c Op is an unmapped uniqued \a MDNode.
290   /// \post getMappedOp(Op) only returns None if this returns None.
291   Optional<Metadata *> tryToMapOperand(const Metadata *Op);
292 
293   /// Map a distinct node.
294   ///
295   /// Return the mapping for the distinct node \c N, saving the result in \a
296   /// DistinctWorklist for later remapping.
297   ///
298   /// \pre \c N is not yet mapped.
299   /// \pre \c N.isDistinct().
300   MDNode *mapDistinctNode(const MDNode &N);
301 
302   /// Get a previously mapped node.
303   Optional<Metadata *> getMappedOp(const Metadata *Op) const;
304 
305   /// Create a post-order traversal of an unmapped uniqued node subgraph.
306   ///
307   /// This traverses the metadata graph deeply enough to map \c FirstN.  It
308   /// uses \a tryToMapOperand() (via \a Mapper::mapSimplifiedNode()), so any
309   /// metadata that has already been mapped will not be part of the POT.
310   ///
311   /// Each node that has a changed operand from outside the graph (e.g., a
312   /// distinct node, an already-mapped uniqued node, or \a ConstantAsMetadata)
313   /// is marked with \a Data::HasChanged.
314   ///
315   /// \return \c true if any nodes in \c G have \a Data::HasChanged.
316   /// \post \c G.POT is a post-order traversal ending with \c FirstN.
317   /// \post \a Data::hasChanged in \c G.Info indicates whether any node needs
318   /// to change because of operands outside the graph.
319   bool createPOT(UniquedGraph &G, const MDNode &FirstN);
320 
321   /// Map all the nodes in the given uniqued graph.
322   ///
323   /// This visits all the nodes in \c G in post-order, using the identity
324   /// mapping or creating a new node depending on \a Data::HasChanged.
325   ///
326   /// \pre \a getMappedOp() returns None for nodes in \c G, but not for any of
327   /// their operands outside of \c G.
328   /// \pre \a Data::HasChanged is true for a node in \c G iff any of its
329   /// operands have changed.
330   /// \post \a getMappedOp() returns the mapped node for every node in \c G.
331   void mapNodesInPOT(UniquedGraph &G);
332 
333   /// Remap a node's operands using the given functor.
334   ///
335   /// Iterate through the operands of \c N and update them in place using \c
336   /// mapOperand.
337   ///
338   /// \pre N.isDistinct() or N.isTemporary().
339   template <class OperandMapper>
340   void remapOperands(MDNode &N, OperandMapper mapOperand);
341 };
342 
343 } // end namespace
344 
345 Value *Mapper::mapValue(const Value *V) {
346   ValueToValueMapTy::iterator I = getVM().find(V);
347 
348   // If the value already exists in the map, use it.
349   if (I != getVM().end()) {
350     assert(I->second && "Unexpected null mapping");
351     return I->second;
352   }
353 
354   // If we have a materializer and it can materialize a value, use that.
355   if (auto *Materializer = getMaterializer()) {
356     if (Value *NewV =
357             Materializer->materializeDeclFor(const_cast<Value *>(V))) {
358       getVM()[V] = NewV;
359       if (auto *NewGV = dyn_cast<GlobalValue>(NewV))
360         Materializer->materializeInitFor(
361             NewGV, cast<GlobalValue>(const_cast<Value *>(V)));
362       return NewV;
363     }
364   }
365 
366   // Global values do not need to be seeded into the VM if they
367   // are using the identity mapping.
368   if (isa<GlobalValue>(V)) {
369     if (Flags & RF_NullMapMissingGlobalValues)
370       return nullptr;
371     return getVM()[V] = const_cast<Value *>(V);
372   }
373 
374   if (const InlineAsm *IA = dyn_cast<InlineAsm>(V)) {
375     // Inline asm may need *type* remapping.
376     FunctionType *NewTy = IA->getFunctionType();
377     if (TypeMapper) {
378       NewTy = cast<FunctionType>(TypeMapper->remapType(NewTy));
379 
380       if (NewTy != IA->getFunctionType())
381         V = InlineAsm::get(NewTy, IA->getAsmString(), IA->getConstraintString(),
382                            IA->hasSideEffects(), IA->isAlignStack());
383     }
384 
385     return getVM()[V] = const_cast<Value *>(V);
386   }
387 
388   if (const auto *MDV = dyn_cast<MetadataAsValue>(V)) {
389     const Metadata *MD = MDV->getMetadata();
390 
391     if (auto *LAM = dyn_cast<LocalAsMetadata>(MD)) {
392       // Look through to grab the local value.
393       if (Value *LV = mapValue(LAM->getValue())) {
394         if (V == LAM->getValue())
395           return const_cast<Value *>(V);
396         return MetadataAsValue::get(V->getContext(), ValueAsMetadata::get(LV));
397       }
398 
399       // FIXME: always return nullptr once Verifier::verifyDominatesUse()
400       // ensures metadata operands only reference defined SSA values.
401       return (Flags & RF_IgnoreMissingLocals)
402                  ? nullptr
403                  : MetadataAsValue::get(V->getContext(),
404                                         MDTuple::get(V->getContext(), None));
405     }
406 
407     // If this is a module-level metadata and we know that nothing at the module
408     // level is changing, then use an identity mapping.
409     if (Flags & RF_NoModuleLevelChanges)
410       return getVM()[V] = const_cast<Value *>(V);
411 
412     // Map the metadata and turn it into a value.
413     auto *MappedMD = mapMetadata(MD);
414     if (MD == MappedMD)
415       return getVM()[V] = const_cast<Value *>(V);
416     return getVM()[V] = MetadataAsValue::get(V->getContext(), MappedMD);
417   }
418 
419   // Okay, this either must be a constant (which may or may not be mappable) or
420   // is something that is not in the mapping table.
421   Constant *C = const_cast<Constant*>(dyn_cast<Constant>(V));
422   if (!C)
423     return nullptr;
424 
425   if (BlockAddress *BA = dyn_cast<BlockAddress>(C))
426     return mapBlockAddress(*BA);
427 
428   // Otherwise, we have some other constant to remap.  Start by checking to see
429   // if all operands have an identity remapping.
430   unsigned OpNo = 0, NumOperands = C->getNumOperands();
431   Value *Mapped = nullptr;
432   for (; OpNo != NumOperands; ++OpNo) {
433     Value *Op = C->getOperand(OpNo);
434     Mapped = mapValue(Op);
435     if (Mapped != C) break;
436   }
437 
438   // See if the type mapper wants to remap the type as well.
439   Type *NewTy = C->getType();
440   if (TypeMapper)
441     NewTy = TypeMapper->remapType(NewTy);
442 
443   // If the result type and all operands match up, then just insert an identity
444   // mapping.
445   if (OpNo == NumOperands && NewTy == C->getType())
446     return getVM()[V] = C;
447 
448   // Okay, we need to create a new constant.  We've already processed some or
449   // all of the operands, set them all up now.
450   SmallVector<Constant*, 8> Ops;
451   Ops.reserve(NumOperands);
452   for (unsigned j = 0; j != OpNo; ++j)
453     Ops.push_back(cast<Constant>(C->getOperand(j)));
454 
455   // If one of the operands mismatch, push it and the other mapped operands.
456   if (OpNo != NumOperands) {
457     Ops.push_back(cast<Constant>(Mapped));
458 
459     // Map the rest of the operands that aren't processed yet.
460     for (++OpNo; OpNo != NumOperands; ++OpNo)
461       Ops.push_back(cast<Constant>(mapValue(C->getOperand(OpNo))));
462   }
463   Type *NewSrcTy = nullptr;
464   if (TypeMapper)
465     if (auto *GEPO = dyn_cast<GEPOperator>(C))
466       NewSrcTy = TypeMapper->remapType(GEPO->getSourceElementType());
467 
468   if (ConstantExpr *CE = dyn_cast<ConstantExpr>(C))
469     return getVM()[V] = CE->getWithOperands(Ops, NewTy, false, NewSrcTy);
470   if (isa<ConstantArray>(C))
471     return getVM()[V] = ConstantArray::get(cast<ArrayType>(NewTy), Ops);
472   if (isa<ConstantStruct>(C))
473     return getVM()[V] = ConstantStruct::get(cast<StructType>(NewTy), Ops);
474   if (isa<ConstantVector>(C))
475     return getVM()[V] = ConstantVector::get(Ops);
476   // If this is a no-operand constant, it must be because the type was remapped.
477   if (isa<UndefValue>(C))
478     return getVM()[V] = UndefValue::get(NewTy);
479   if (isa<ConstantAggregateZero>(C))
480     return getVM()[V] = ConstantAggregateZero::get(NewTy);
481   assert(isa<ConstantPointerNull>(C));
482   return getVM()[V] = ConstantPointerNull::get(cast<PointerType>(NewTy));
483 }
484 
485 Value *Mapper::mapBlockAddress(const BlockAddress &BA) {
486   Function *F = cast<Function>(mapValue(BA.getFunction()));
487 
488   // F may not have materialized its initializer.  In that case, create a
489   // dummy basic block for now, and replace it once we've materialized all
490   // the initializers.
491   BasicBlock *BB;
492   if (F->empty()) {
493     DelayedBBs.push_back(DelayedBasicBlock(BA));
494     BB = DelayedBBs.back().TempBB.get();
495   } else {
496     BB = cast_or_null<BasicBlock>(mapValue(BA.getBasicBlock()));
497   }
498 
499   return getVM()[&BA] = BlockAddress::get(F, BB ? BB : BA.getBasicBlock());
500 }
501 
502 Metadata *Mapper::mapToMetadata(const Metadata *Key, Metadata *Val) {
503   getVM().MD()[Key].reset(Val);
504   return Val;
505 }
506 
507 Metadata *Mapper::mapToSelf(const Metadata *MD) {
508   return mapToMetadata(MD, const_cast<Metadata *>(MD));
509 }
510 
511 Optional<Metadata *> MDNodeMapper::tryToMapOperand(const Metadata *Op) {
512   if (!Op)
513     return nullptr;
514 
515   if (Optional<Metadata *> MappedOp = M.mapSimpleMetadata(Op)) {
516 #ifndef NDEBUG
517     if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op))
518       assert((!*MappedOp || M.getVM().count(CMD->getValue()) ||
519               M.getVM().getMappedMD(Op)) &&
520              "Expected Value to be memoized");
521     else
522       assert((isa<MDString>(Op) || M.getVM().getMappedMD(Op)) &&
523              "Expected result to be memoized");
524 #endif
525     return *MappedOp;
526   }
527 
528   const MDNode &N = *cast<MDNode>(Op);
529   if (N.isDistinct())
530     return mapDistinctNode(N);
531   return None;
532 }
533 
534 MDNode *MDNodeMapper::mapDistinctNode(const MDNode &N) {
535   assert(N.isDistinct() && "Expected a distinct node");
536   assert(!M.getVM().getMappedMD(&N) && "Expected an unmapped node");
537   DistinctWorklist.push_back(cast<MDNode>(
538       (M.Flags & RF_MoveDistinctMDs)
539           ? M.mapToSelf(&N)
540           : M.mapToMetadata(&N, MDNode::replaceWithDistinct(N.clone()))));
541   return DistinctWorklist.back();
542 }
543 
544 static ConstantAsMetadata *wrapConstantAsMetadata(const ConstantAsMetadata &CMD,
545                                                   Value *MappedV) {
546   if (CMD.getValue() == MappedV)
547     return const_cast<ConstantAsMetadata *>(&CMD);
548   return MappedV ? ConstantAsMetadata::getConstant(MappedV) : nullptr;
549 }
550 
551 Optional<Metadata *> MDNodeMapper::getMappedOp(const Metadata *Op) const {
552   if (!Op)
553     return nullptr;
554 
555   if (Optional<Metadata *> MappedOp = M.getVM().getMappedMD(Op))
556     return *MappedOp;
557 
558   if (isa<MDString>(Op))
559     return const_cast<Metadata *>(Op);
560 
561   if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op))
562     return wrapConstantAsMetadata(*CMD, M.getVM().lookup(CMD->getValue()));
563 
564   return None;
565 }
566 
567 Metadata &MDNodeMapper::UniquedGraph::getFwdReference(MDNode &Op) {
568   auto Where = Info.find(&Op);
569   assert(Where != Info.end() && "Expected a valid reference");
570 
571   auto &OpD = Where->second;
572   if (!OpD.HasChanged)
573     return Op;
574 
575   // Lazily construct a temporary node.
576   if (!OpD.Placeholder)
577     OpD.Placeholder = Op.clone();
578 
579   return *OpD.Placeholder;
580 }
581 
582 template <class OperandMapper>
583 void MDNodeMapper::remapOperands(MDNode &N, OperandMapper mapOperand) {
584   assert(!N.isUniqued() && "Expected distinct or temporary nodes");
585   for (unsigned I = 0, E = N.getNumOperands(); I != E; ++I) {
586     Metadata *Old = N.getOperand(I);
587     Metadata *New = mapOperand(Old);
588 
589     if (Old != New)
590       N.replaceOperandWith(I, New);
591   }
592 }
593 
594 bool MDNodeMapper::createPOT(UniquedGraph &G, const MDNode &FirstN) {
595   assert(G.Info.empty() && "Expected a fresh traversal");
596   assert(FirstN.isUniqued() && "Expected uniqued node in POT");
597 
598   // Construct a post-order traversal of the uniqued subgraph under FirstN.
599   bool AnyChanges = false;
600 
601   // The flag on the worklist indicates whether this is the first or second
602   // visit of a node.  The first visit looks through the operands; the second
603   // visit adds the node to POT.
604   SmallVector<std::pair<MDNode *, bool>, 16> Worklist;
605   Worklist.push_back(std::make_pair(&const_cast<MDNode &>(FirstN), false));
606   (void)G.Info[&FirstN];
607   while (!Worklist.empty()) {
608     MDNode &N = *Worklist.back().first;
609     if (Worklist.back().second) {
610       // We've already visited operands.  Add this to POT.
611       Worklist.pop_back();
612       G.Info[&N].ID = G.POT.size();
613       G.POT.push_back(&N);
614       continue;
615     }
616     Worklist.back().second = true;
617 
618     // Look through the operands for changes, pushing unmapped uniqued nodes
619     // onto to the worklist.
620     assert(N.isUniqued() && "Expected only uniqued nodes in POT");
621     bool LocalChanges = false;
622     for (Metadata *Op : N.operands()) {
623       assert(Op != &N && "Uniqued nodes cannot have self-references");
624       if (Optional<Metadata *> MappedOp = tryToMapOperand(Op)) {
625         AnyChanges |= LocalChanges |= Op != *MappedOp;
626         continue;
627       }
628 
629       MDNode &OpN = *cast<MDNode>(Op);
630       assert(OpN.isUniqued() &&
631              "Only uniqued operands cannot be mapped immediately");
632       if (G.Info.insert(std::make_pair(&OpN, Data())).second)
633         Worklist.push_back(std::make_pair(&OpN, false));
634     }
635 
636     if (LocalChanges)
637       G.Info[&N].HasChanged = true;
638   }
639   return AnyChanges;
640 }
641 
642 void MDNodeMapper::UniquedGraph::propagateChanges() {
643   bool AnyChanges;
644   do {
645     AnyChanges = false;
646     for (MDNode *N : POT) {
647       auto &D = Info[N];
648       if (D.HasChanged)
649         continue;
650 
651       if (!llvm::any_of(N->operands(), [&](const Metadata *Op) {
652             auto Where = Info.find(Op);
653             return Where != Info.end() && Where->second.HasChanged;
654           }))
655         continue;
656 
657       AnyChanges = D.HasChanged = true;
658     }
659   } while (AnyChanges);
660 }
661 
662 void MDNodeMapper::mapNodesInPOT(UniquedGraph &G) {
663   // Construct uniqued nodes, building forward references as necessary.
664   SmallVector<MDNode *, 16> CyclicNodes;
665   for (auto *N : G.POT) {
666     auto &D = G.Info[N];
667     if (!D.HasChanged) {
668       // The node hasn't changed.
669       M.mapToSelf(N);
670       continue;
671     }
672 
673     // Remember whether this node had a placeholder.
674     bool HadPlaceholder(D.Placeholder);
675 
676     // Clone the uniqued node and remap the operands.
677     TempMDNode ClonedN = D.Placeholder ? std::move(D.Placeholder) : N->clone();
678     remapOperands(*ClonedN, [this, &D, &G](Metadata *Old) {
679       if (Optional<Metadata *> MappedOp = getMappedOp(Old))
680         return *MappedOp;
681       assert(G.Info[Old].ID > D.ID && "Expected a forward reference");
682       return &G.getFwdReference(*cast<MDNode>(Old));
683     });
684 
685     auto *NewN = MDNode::replaceWithUniqued(std::move(ClonedN));
686     M.mapToMetadata(N, NewN);
687 
688     // Nodes that were referenced out of order in the POT are involved in a
689     // uniquing cycle.
690     if (HadPlaceholder)
691       CyclicNodes.push_back(NewN);
692   }
693 
694   // Resolve cycles.
695   for (auto *N : CyclicNodes)
696     if (!N->isResolved())
697       N->resolveCycles();
698 }
699 
700 Metadata *MDNodeMapper::map(const MDNode &N) {
701   assert(DistinctWorklist.empty() && "MDNodeMapper::map is not recursive");
702   assert(!(M.Flags & RF_NoModuleLevelChanges) &&
703          "MDNodeMapper::map assumes module-level changes");
704 
705   // Require resolved nodes whenever metadata might be remapped.
706   assert(N.isResolved() && "Unexpected unresolved node");
707 
708   Metadata *MappedN =
709       N.isUniqued() ? mapTopLevelUniquedNode(N) : mapDistinctNode(N);
710   while (!DistinctWorklist.empty())
711     remapOperands(*DistinctWorklist.pop_back_val(), [this](Metadata *Old) {
712       if (Optional<Metadata *> MappedOp = tryToMapOperand(Old))
713         return *MappedOp;
714       return mapTopLevelUniquedNode(*cast<MDNode>(Old));
715     });
716   return MappedN;
717 }
718 
719 Metadata *MDNodeMapper::mapTopLevelUniquedNode(const MDNode &FirstN) {
720   assert(FirstN.isUniqued() && "Expected uniqued node");
721 
722   // Create a post-order traversal of uniqued nodes under FirstN.
723   UniquedGraph G;
724   if (!createPOT(G, FirstN)) {
725     // Return early if no nodes have changed.
726     for (const MDNode *N : G.POT)
727       M.mapToSelf(N);
728     return &const_cast<MDNode &>(FirstN);
729   }
730 
731   // Update graph with all nodes that have changed.
732   G.propagateChanges();
733 
734   // Map all the nodes in the graph.
735   mapNodesInPOT(G);
736 
737   // Return the original node, remapped.
738   return *getMappedOp(&FirstN);
739 }
740 
741 namespace {
742 
743 struct MapMetadataDisabler {
744   ValueToValueMapTy &VM;
745 
746   MapMetadataDisabler(ValueToValueMapTy &VM) : VM(VM) {
747     VM.disableMapMetadata();
748   }
749   ~MapMetadataDisabler() { VM.enableMapMetadata(); }
750 };
751 
752 } // end namespace
753 
754 Optional<Metadata *> Mapper::mapSimpleMetadata(const Metadata *MD) {
755   // If the value already exists in the map, use it.
756   if (Optional<Metadata *> NewMD = getVM().getMappedMD(MD))
757     return *NewMD;
758 
759   if (isa<MDString>(MD))
760     return const_cast<Metadata *>(MD);
761 
762   // This is a module-level metadata.  If nothing at the module level is
763   // changing, use an identity mapping.
764   if ((Flags & RF_NoModuleLevelChanges))
765     return const_cast<Metadata *>(MD);
766 
767   if (auto *CMD = dyn_cast<ConstantAsMetadata>(MD)) {
768     // Disallow recursion into metadata mapping through mapValue.
769     MapMetadataDisabler MMD(getVM());
770 
771     // Don't memoize ConstantAsMetadata.  Instead of lasting until the
772     // LLVMContext is destroyed, they can be deleted when the GlobalValue they
773     // reference is destructed.  These aren't super common, so the extra
774     // indirection isn't that expensive.
775     return wrapConstantAsMetadata(*CMD, mapValue(CMD->getValue()));
776   }
777 
778   assert(isa<MDNode>(MD) && "Expected a metadata node");
779 
780   return None;
781 }
782 
783 Metadata *Mapper::mapLocalAsMetadata(const LocalAsMetadata &LAM) {
784   // Lookup the mapping for the value itself, and return the appropriate
785   // metadata.
786   if (Value *V = mapValue(LAM.getValue())) {
787     if (V == LAM.getValue())
788       return const_cast<LocalAsMetadata *>(&LAM);
789     return ValueAsMetadata::get(V);
790   }
791 
792   // FIXME: always return nullptr once Verifier::verifyDominatesUse() ensures
793   // metadata operands only reference defined SSA values.
794   return (Flags & RF_IgnoreMissingLocals)
795              ? nullptr
796              : MDTuple::get(LAM.getContext(), None);
797 }
798 
799 Metadata *Mapper::mapMetadata(const Metadata *MD) {
800   assert(MD && "Expected valid metadata");
801   assert(!isa<LocalAsMetadata>(MD) && "Unexpected local metadata");
802 
803   if (Optional<Metadata *> NewMD = mapSimpleMetadata(MD))
804     return *NewMD;
805 
806   return MDNodeMapper(*this).map(*cast<MDNode>(MD));
807 }
808 
809 void Mapper::flush() {
810   // Flush out the worklist of global values.
811   while (!Worklist.empty()) {
812     WorklistEntry E = Worklist.pop_back_val();
813     CurrentMCID = E.MCID;
814     switch (E.Kind) {
815     case WorklistEntry::MapGlobalInit:
816       E.Data.GVInit.GV->setInitializer(mapConstant(E.Data.GVInit.Init));
817       break;
818     case WorklistEntry::MapAppendingVar: {
819       unsigned PrefixSize = AppendingInits.size() - E.AppendingGVNumNewMembers;
820       mapAppendingVariable(*E.Data.AppendingGV.GV,
821                            E.Data.AppendingGV.InitPrefix,
822                            E.AppendingGVIsOldCtorDtor,
823                            makeArrayRef(AppendingInits).slice(PrefixSize));
824       AppendingInits.resize(PrefixSize);
825       break;
826     }
827     case WorklistEntry::MapGlobalAliasee:
828       E.Data.GlobalAliasee.GA->setAliasee(
829           mapConstant(E.Data.GlobalAliasee.Aliasee));
830       break;
831     case WorklistEntry::RemapFunction:
832       remapFunction(*E.Data.RemapF);
833       break;
834     }
835   }
836   CurrentMCID = 0;
837 
838   // Finish logic for block addresses now that all global values have been
839   // handled.
840   while (!DelayedBBs.empty()) {
841     DelayedBasicBlock DBB = DelayedBBs.pop_back_val();
842     BasicBlock *BB = cast_or_null<BasicBlock>(mapValue(DBB.OldBB));
843     DBB.TempBB->replaceAllUsesWith(BB ? BB : DBB.OldBB);
844   }
845 }
846 
847 void Mapper::remapInstruction(Instruction *I) {
848   // Remap operands.
849   for (Use &Op : I->operands()) {
850     Value *V = mapValue(Op);
851     // If we aren't ignoring missing entries, assert that something happened.
852     if (V)
853       Op = V;
854     else
855       assert((Flags & RF_IgnoreMissingLocals) &&
856              "Referenced value not in value map!");
857   }
858 
859   // Remap phi nodes' incoming blocks.
860   if (PHINode *PN = dyn_cast<PHINode>(I)) {
861     for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) {
862       Value *V = mapValue(PN->getIncomingBlock(i));
863       // If we aren't ignoring missing entries, assert that something happened.
864       if (V)
865         PN->setIncomingBlock(i, cast<BasicBlock>(V));
866       else
867         assert((Flags & RF_IgnoreMissingLocals) &&
868                "Referenced block not in value map!");
869     }
870   }
871 
872   // Remap attached metadata.
873   SmallVector<std::pair<unsigned, MDNode *>, 4> MDs;
874   I->getAllMetadata(MDs);
875   for (const auto &MI : MDs) {
876     MDNode *Old = MI.second;
877     MDNode *New = cast_or_null<MDNode>(mapMetadata(Old));
878     if (New != Old)
879       I->setMetadata(MI.first, New);
880   }
881 
882   if (!TypeMapper)
883     return;
884 
885   // If the instruction's type is being remapped, do so now.
886   if (auto CS = CallSite(I)) {
887     SmallVector<Type *, 3> Tys;
888     FunctionType *FTy = CS.getFunctionType();
889     Tys.reserve(FTy->getNumParams());
890     for (Type *Ty : FTy->params())
891       Tys.push_back(TypeMapper->remapType(Ty));
892     CS.mutateFunctionType(FunctionType::get(
893         TypeMapper->remapType(I->getType()), Tys, FTy->isVarArg()));
894     return;
895   }
896   if (auto *AI = dyn_cast<AllocaInst>(I))
897     AI->setAllocatedType(TypeMapper->remapType(AI->getAllocatedType()));
898   if (auto *GEP = dyn_cast<GetElementPtrInst>(I)) {
899     GEP->setSourceElementType(
900         TypeMapper->remapType(GEP->getSourceElementType()));
901     GEP->setResultElementType(
902         TypeMapper->remapType(GEP->getResultElementType()));
903   }
904   I->mutateType(TypeMapper->remapType(I->getType()));
905 }
906 
907 void Mapper::remapFunction(Function &F) {
908   // Remap the operands.
909   for (Use &Op : F.operands())
910     if (Op)
911       Op = mapValue(Op);
912 
913   // Remap the metadata attachments.
914   SmallVector<std::pair<unsigned, MDNode *>, 8> MDs;
915   F.getAllMetadata(MDs);
916   for (const auto &I : MDs)
917     F.setMetadata(I.first, cast_or_null<MDNode>(mapMetadata(I.second)));
918 
919   // Remap the argument types.
920   if (TypeMapper)
921     for (Argument &A : F.args())
922       A.mutateType(TypeMapper->remapType(A.getType()));
923 
924   // Remap the instructions.
925   for (BasicBlock &BB : F)
926     for (Instruction &I : BB)
927       remapInstruction(&I);
928 }
929 
930 void Mapper::mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix,
931                                   bool IsOldCtorDtor,
932                                   ArrayRef<Constant *> NewMembers) {
933   SmallVector<Constant *, 16> Elements;
934   if (InitPrefix) {
935     unsigned NumElements =
936         cast<ArrayType>(InitPrefix->getType())->getNumElements();
937     for (unsigned I = 0; I != NumElements; ++I)
938       Elements.push_back(InitPrefix->getAggregateElement(I));
939   }
940 
941   PointerType *VoidPtrTy;
942   Type *EltTy;
943   if (IsOldCtorDtor) {
944     // FIXME: This upgrade is done during linking to support the C API.  See
945     // also IRLinker::linkAppendingVarProto() in IRMover.cpp.
946     VoidPtrTy = Type::getInt8Ty(GV.getContext())->getPointerTo();
947     auto &ST = *cast<StructType>(NewMembers.front()->getType());
948     Type *Tys[3] = {ST.getElementType(0), ST.getElementType(1), VoidPtrTy};
949     EltTy = StructType::get(GV.getContext(), Tys, false);
950   }
951 
952   for (auto *V : NewMembers) {
953     Constant *NewV;
954     if (IsOldCtorDtor) {
955       auto *S = cast<ConstantStruct>(V);
956       auto *E1 = mapValue(S->getOperand(0));
957       auto *E2 = mapValue(S->getOperand(1));
958       Value *Null = Constant::getNullValue(VoidPtrTy);
959       NewV =
960           ConstantStruct::get(cast<StructType>(EltTy), E1, E2, Null, nullptr);
961     } else {
962       NewV = cast_or_null<Constant>(mapValue(V));
963     }
964     Elements.push_back(NewV);
965   }
966 
967   GV.setInitializer(ConstantArray::get(
968       cast<ArrayType>(GV.getType()->getElementType()), Elements));
969 }
970 
971 void Mapper::scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init,
972                                           unsigned MCID) {
973   assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule");
974   assert(MCID < MCs.size() && "Invalid mapping context");
975 
976   WorklistEntry WE;
977   WE.Kind = WorklistEntry::MapGlobalInit;
978   WE.MCID = MCID;
979   WE.Data.GVInit.GV = &GV;
980   WE.Data.GVInit.Init = &Init;
981   Worklist.push_back(WE);
982 }
983 
984 void Mapper::scheduleMapAppendingVariable(GlobalVariable &GV,
985                                           Constant *InitPrefix,
986                                           bool IsOldCtorDtor,
987                                           ArrayRef<Constant *> NewMembers,
988                                           unsigned MCID) {
989   assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule");
990   assert(MCID < MCs.size() && "Invalid mapping context");
991 
992   WorklistEntry WE;
993   WE.Kind = WorklistEntry::MapAppendingVar;
994   WE.MCID = MCID;
995   WE.Data.AppendingGV.GV = &GV;
996   WE.Data.AppendingGV.InitPrefix = InitPrefix;
997   WE.AppendingGVIsOldCtorDtor = IsOldCtorDtor;
998   WE.AppendingGVNumNewMembers = NewMembers.size();
999   Worklist.push_back(WE);
1000   AppendingInits.append(NewMembers.begin(), NewMembers.end());
1001 }
1002 
1003 void Mapper::scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee,
1004                                       unsigned MCID) {
1005   assert(AlreadyScheduled.insert(&GA).second && "Should not reschedule");
1006   assert(MCID < MCs.size() && "Invalid mapping context");
1007 
1008   WorklistEntry WE;
1009   WE.Kind = WorklistEntry::MapGlobalAliasee;
1010   WE.MCID = MCID;
1011   WE.Data.GlobalAliasee.GA = &GA;
1012   WE.Data.GlobalAliasee.Aliasee = &Aliasee;
1013   Worklist.push_back(WE);
1014 }
1015 
1016 void Mapper::scheduleRemapFunction(Function &F, unsigned MCID) {
1017   assert(AlreadyScheduled.insert(&F).second && "Should not reschedule");
1018   assert(MCID < MCs.size() && "Invalid mapping context");
1019 
1020   WorklistEntry WE;
1021   WE.Kind = WorklistEntry::RemapFunction;
1022   WE.MCID = MCID;
1023   WE.Data.RemapF = &F;
1024   Worklist.push_back(WE);
1025 }
1026 
1027 void Mapper::addFlags(RemapFlags Flags) {
1028   assert(!hasWorkToDo() && "Expected to have flushed the worklist");
1029   this->Flags = this->Flags | Flags;
1030 }
1031 
1032 static Mapper *getAsMapper(void *pImpl) {
1033   return reinterpret_cast<Mapper *>(pImpl);
1034 }
1035 
1036 namespace {
1037 
1038 class FlushingMapper {
1039   Mapper &M;
1040 
1041 public:
1042   explicit FlushingMapper(void *pImpl) : M(*getAsMapper(pImpl)) {
1043     assert(!M.hasWorkToDo() && "Expected to be flushed");
1044   }
1045   ~FlushingMapper() { M.flush(); }
1046   Mapper *operator->() const { return &M; }
1047 };
1048 
1049 } // end namespace
1050 
1051 ValueMapper::ValueMapper(ValueToValueMapTy &VM, RemapFlags Flags,
1052                          ValueMapTypeRemapper *TypeMapper,
1053                          ValueMaterializer *Materializer)
1054     : pImpl(new Mapper(VM, Flags, TypeMapper, Materializer)) {}
1055 
1056 ValueMapper::~ValueMapper() { delete getAsMapper(pImpl); }
1057 
1058 unsigned
1059 ValueMapper::registerAlternateMappingContext(ValueToValueMapTy &VM,
1060                                              ValueMaterializer *Materializer) {
1061   return getAsMapper(pImpl)->registerAlternateMappingContext(VM, Materializer);
1062 }
1063 
1064 void ValueMapper::addFlags(RemapFlags Flags) {
1065   FlushingMapper(pImpl)->addFlags(Flags);
1066 }
1067 
1068 Value *ValueMapper::mapValue(const Value &V) {
1069   return FlushingMapper(pImpl)->mapValue(&V);
1070 }
1071 
1072 Constant *ValueMapper::mapConstant(const Constant &C) {
1073   return cast_or_null<Constant>(mapValue(C));
1074 }
1075 
1076 Metadata *ValueMapper::mapMetadata(const Metadata &MD) {
1077   return FlushingMapper(pImpl)->mapMetadata(&MD);
1078 }
1079 
1080 MDNode *ValueMapper::mapMDNode(const MDNode &N) {
1081   return cast_or_null<MDNode>(mapMetadata(N));
1082 }
1083 
1084 void ValueMapper::remapInstruction(Instruction &I) {
1085   FlushingMapper(pImpl)->remapInstruction(&I);
1086 }
1087 
1088 void ValueMapper::remapFunction(Function &F) {
1089   FlushingMapper(pImpl)->remapFunction(F);
1090 }
1091 
1092 void ValueMapper::scheduleMapGlobalInitializer(GlobalVariable &GV,
1093                                                Constant &Init,
1094                                                unsigned MCID) {
1095   getAsMapper(pImpl)->scheduleMapGlobalInitializer(GV, Init, MCID);
1096 }
1097 
1098 void ValueMapper::scheduleMapAppendingVariable(GlobalVariable &GV,
1099                                                Constant *InitPrefix,
1100                                                bool IsOldCtorDtor,
1101                                                ArrayRef<Constant *> NewMembers,
1102                                                unsigned MCID) {
1103   getAsMapper(pImpl)->scheduleMapAppendingVariable(
1104       GV, InitPrefix, IsOldCtorDtor, NewMembers, MCID);
1105 }
1106 
1107 void ValueMapper::scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee,
1108                                            unsigned MCID) {
1109   getAsMapper(pImpl)->scheduleMapGlobalAliasee(GA, Aliasee, MCID);
1110 }
1111 
1112 void ValueMapper::scheduleRemapFunction(Function &F, unsigned MCID) {
1113   getAsMapper(pImpl)->scheduleRemapFunction(F, MCID);
1114 }
1115