1 //===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This pass implements whole program optimization of virtual calls in cases
11 // where we know (via !type metadata) that the list of callees is fixed. This
12 // includes the following:
13 // - Single implementation devirtualization: if a virtual call has a single
14 //   possible callee, replace all calls with a direct call to that callee.
15 // - Virtual constant propagation: if the virtual function's return type is an
16 //   integer <=64 bits and all possible callees are readnone, for each class and
17 //   each list of constant arguments: evaluate the function, store the return
18 //   value alongside the virtual table, and rewrite each virtual call as a load
19 //   from the virtual table.
20 // - Uniform return value optimization: if the conditions for virtual constant
21 //   propagation hold and each function returns the same constant value, replace
22 //   each virtual call with that constant.
23 // - Unique return value optimization for i1 return values: if the conditions
24 //   for virtual constant propagation hold and a single vtable's function
25 //   returns 0, or a single vtable's function returns 1, replace each virtual
26 //   call with a comparison of the vptr against that vtable's address.
27 //
28 // This pass is intended to be used during the regular and thin LTO pipelines.
29 // During regular LTO, the pass determines the best optimization for each
30 // virtual call and applies the resolutions directly to virtual calls that are
31 // eligible for virtual call optimization (i.e. calls that use either of the
32 // llvm.assume(llvm.type.test) or llvm.type.checked.load intrinsics). During
33 // ThinLTO, the pass operates in two phases:
34 // - Export phase: this is run during the thin link over a single merged module
35 //   that contains all vtables with !type metadata that participate in the link.
36 //   The pass computes a resolution for each virtual call and stores it in the
37 //   type identifier summary.
38 // - Import phase: this is run during the thin backends over the individual
39 //   modules. The pass applies the resolutions previously computed during the
40 //   import phase to each eligible virtual call.
41 //
42 //===----------------------------------------------------------------------===//
43 
44 #include "llvm/Transforms/IPO/WholeProgramDevirt.h"
45 #include "llvm/ADT/ArrayRef.h"
46 #include "llvm/ADT/DenseMap.h"
47 #include "llvm/ADT/DenseMapInfo.h"
48 #include "llvm/ADT/DenseSet.h"
49 #include "llvm/ADT/MapVector.h"
50 #include "llvm/ADT/SmallVector.h"
51 #include "llvm/ADT/iterator_range.h"
52 #include "llvm/Analysis/AliasAnalysis.h"
53 #include "llvm/Analysis/BasicAliasAnalysis.h"
54 #include "llvm/Analysis/OptimizationRemarkEmitter.h"
55 #include "llvm/Analysis/TypeMetadataUtils.h"
56 #include "llvm/IR/CallSite.h"
57 #include "llvm/IR/Constants.h"
58 #include "llvm/IR/DataLayout.h"
59 #include "llvm/IR/DebugLoc.h"
60 #include "llvm/IR/DerivedTypes.h"
61 #include "llvm/IR/Function.h"
62 #include "llvm/IR/GlobalAlias.h"
63 #include "llvm/IR/GlobalVariable.h"
64 #include "llvm/IR/IRBuilder.h"
65 #include "llvm/IR/InstrTypes.h"
66 #include "llvm/IR/Instruction.h"
67 #include "llvm/IR/Instructions.h"
68 #include "llvm/IR/Intrinsics.h"
69 #include "llvm/IR/LLVMContext.h"
70 #include "llvm/IR/Metadata.h"
71 #include "llvm/IR/Module.h"
72 #include "llvm/IR/ModuleSummaryIndexYAML.h"
73 #include "llvm/Pass.h"
74 #include "llvm/PassRegistry.h"
75 #include "llvm/PassSupport.h"
76 #include "llvm/Support/Casting.h"
77 #include "llvm/Support/Error.h"
78 #include "llvm/Support/FileSystem.h"
79 #include "llvm/Support/MathExtras.h"
80 #include "llvm/Transforms/IPO.h"
81 #include "llvm/Transforms/IPO/FunctionAttrs.h"
82 #include "llvm/Transforms/Utils/Evaluator.h"
83 #include <algorithm>
84 #include <cstddef>
85 #include <map>
86 #include <set>
87 #include <string>
88 
89 using namespace llvm;
90 using namespace wholeprogramdevirt;
91 
92 #define DEBUG_TYPE "wholeprogramdevirt"
93 
94 static cl::opt<PassSummaryAction> ClSummaryAction(
95     "wholeprogramdevirt-summary-action",
96     cl::desc("What to do with the summary when running this pass"),
97     cl::values(clEnumValN(PassSummaryAction::None, "none", "Do nothing"),
98                clEnumValN(PassSummaryAction::Import, "import",
99                           "Import typeid resolutions from summary and globals"),
100                clEnumValN(PassSummaryAction::Export, "export",
101                           "Export typeid resolutions to summary and globals")),
102     cl::Hidden);
103 
104 static cl::opt<std::string> ClReadSummary(
105     "wholeprogramdevirt-read-summary",
106     cl::desc("Read summary from given YAML file before running pass"),
107     cl::Hidden);
108 
109 static cl::opt<std::string> ClWriteSummary(
110     "wholeprogramdevirt-write-summary",
111     cl::desc("Write summary to given YAML file after running pass"),
112     cl::Hidden);
113 
114 static cl::opt<unsigned>
115     ClThreshold("wholeprogramdevirt-branch-funnel-threshold", cl::Hidden,
116                 cl::init(10), cl::ZeroOrMore,
117                 cl::desc("Maximum number of call targets per "
118                          "call site to enable branch funnels"));
119 
120 // Find the minimum offset that we may store a value of size Size bits at. If
121 // IsAfter is set, look for an offset before the object, otherwise look for an
122 // offset after the object.
123 uint64_t
124 wholeprogramdevirt::findLowestOffset(ArrayRef<VirtualCallTarget> Targets,
125                                      bool IsAfter, uint64_t Size) {
126   // Find a minimum offset taking into account only vtable sizes.
127   uint64_t MinByte = 0;
128   for (const VirtualCallTarget &Target : Targets) {
129     if (IsAfter)
130       MinByte = std::max(MinByte, Target.minAfterBytes());
131     else
132       MinByte = std::max(MinByte, Target.minBeforeBytes());
133   }
134 
135   // Build a vector of arrays of bytes covering, for each target, a slice of the
136   // used region (see AccumBitVector::BytesUsed in
137   // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively,
138   // this aligns the used regions to start at MinByte.
139   //
140   // In this example, A, B and C are vtables, # is a byte already allocated for
141   // a virtual function pointer, AAAA... (etc.) are the used regions for the
142   // vtables and Offset(X) is the value computed for the Offset variable below
143   // for X.
144   //
145   //                    Offset(A)
146   //                    |       |
147   //                            |MinByte
148   // A: ################AAAAAAAA|AAAAAAAA
149   // B: ########BBBBBBBBBBBBBBBB|BBBB
150   // C: ########################|CCCCCCCCCCCCCCCC
151   //            |   Offset(B)   |
152   //
153   // This code produces the slices of A, B and C that appear after the divider
154   // at MinByte.
155   std::vector<ArrayRef<uint8_t>> Used;
156   for (const VirtualCallTarget &Target : Targets) {
157     ArrayRef<uint8_t> VTUsed = IsAfter ? Target.TM->Bits->After.BytesUsed
158                                        : Target.TM->Bits->Before.BytesUsed;
159     uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes()
160                               : MinByte - Target.minBeforeBytes();
161 
162     // Disregard used regions that are smaller than Offset. These are
163     // effectively all-free regions that do not need to be checked.
164     if (VTUsed.size() > Offset)
165       Used.push_back(VTUsed.slice(Offset));
166   }
167 
168   if (Size == 1) {
169     // Find a free bit in each member of Used.
170     for (unsigned I = 0;; ++I) {
171       uint8_t BitsUsed = 0;
172       for (auto &&B : Used)
173         if (I < B.size())
174           BitsUsed |= B[I];
175       if (BitsUsed != 0xff)
176         return (MinByte + I) * 8 +
177                countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined);
178     }
179   } else {
180     // Find a free (Size/8) byte region in each member of Used.
181     // FIXME: see if alignment helps.
182     for (unsigned I = 0;; ++I) {
183       for (auto &&B : Used) {
184         unsigned Byte = 0;
185         while ((I + Byte) < B.size() && Byte < (Size / 8)) {
186           if (B[I + Byte])
187             goto NextI;
188           ++Byte;
189         }
190       }
191       return (MinByte + I) * 8;
192     NextI:;
193     }
194   }
195 }
196 
197 void wholeprogramdevirt::setBeforeReturnValues(
198     MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore,
199     unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
200   if (BitWidth == 1)
201     OffsetByte = -(AllocBefore / 8 + 1);
202   else
203     OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8);
204   OffsetBit = AllocBefore % 8;
205 
206   for (VirtualCallTarget &Target : Targets) {
207     if (BitWidth == 1)
208       Target.setBeforeBit(AllocBefore);
209     else
210       Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8);
211   }
212 }
213 
214 void wholeprogramdevirt::setAfterReturnValues(
215     MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter,
216     unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) {
217   if (BitWidth == 1)
218     OffsetByte = AllocAfter / 8;
219   else
220     OffsetByte = (AllocAfter + 7) / 8;
221   OffsetBit = AllocAfter % 8;
222 
223   for (VirtualCallTarget &Target : Targets) {
224     if (BitWidth == 1)
225       Target.setAfterBit(AllocAfter);
226     else
227       Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8);
228   }
229 }
230 
231 VirtualCallTarget::VirtualCallTarget(Function *Fn, const TypeMemberInfo *TM)
232     : Fn(Fn), TM(TM),
233       IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()), WasDevirt(false) {}
234 
235 namespace {
236 
237 // A slot in a set of virtual tables. The TypeID identifies the set of virtual
238 // tables, and the ByteOffset is the offset in bytes from the address point to
239 // the virtual function pointer.
240 struct VTableSlot {
241   Metadata *TypeID;
242   uint64_t ByteOffset;
243 };
244 
245 } // end anonymous namespace
246 
247 namespace llvm {
248 
249 template <> struct DenseMapInfo<VTableSlot> {
250   static VTableSlot getEmptyKey() {
251     return {DenseMapInfo<Metadata *>::getEmptyKey(),
252             DenseMapInfo<uint64_t>::getEmptyKey()};
253   }
254   static VTableSlot getTombstoneKey() {
255     return {DenseMapInfo<Metadata *>::getTombstoneKey(),
256             DenseMapInfo<uint64_t>::getTombstoneKey()};
257   }
258   static unsigned getHashValue(const VTableSlot &I) {
259     return DenseMapInfo<Metadata *>::getHashValue(I.TypeID) ^
260            DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset);
261   }
262   static bool isEqual(const VTableSlot &LHS,
263                       const VTableSlot &RHS) {
264     return LHS.TypeID == RHS.TypeID && LHS.ByteOffset == RHS.ByteOffset;
265   }
266 };
267 
268 } // end namespace llvm
269 
270 namespace {
271 
272 // A virtual call site. VTable is the loaded virtual table pointer, and CS is
273 // the indirect virtual call.
274 struct VirtualCallSite {
275   Value *VTable;
276   CallSite CS;
277 
278   // If non-null, this field points to the associated unsafe use count stored in
279   // the DevirtModule::NumUnsafeUsesForTypeTest map below. See the description
280   // of that field for details.
281   unsigned *NumUnsafeUses;
282 
283   void
284   emitRemark(const StringRef OptName, const StringRef TargetName,
285              function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter) {
286     Function *F = CS.getCaller();
287     DebugLoc DLoc = CS->getDebugLoc();
288     BasicBlock *Block = CS.getParent();
289 
290     using namespace ore;
291     OREGetter(F).emit(OptimizationRemark(DEBUG_TYPE, OptName, DLoc, Block)
292                       << NV("Optimization", OptName)
293                       << ": devirtualized a call to "
294                       << NV("FunctionName", TargetName));
295   }
296 
297   void replaceAndErase(
298       const StringRef OptName, const StringRef TargetName, bool RemarksEnabled,
299       function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter,
300       Value *New) {
301     if (RemarksEnabled)
302       emitRemark(OptName, TargetName, OREGetter);
303     CS->replaceAllUsesWith(New);
304     if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) {
305       BranchInst::Create(II->getNormalDest(), CS.getInstruction());
306       II->getUnwindDest()->removePredecessor(II->getParent());
307     }
308     CS->eraseFromParent();
309     // This use is no longer unsafe.
310     if (NumUnsafeUses)
311       --*NumUnsafeUses;
312   }
313 };
314 
315 // Call site information collected for a specific VTableSlot and possibly a list
316 // of constant integer arguments. The grouping by arguments is handled by the
317 // VTableSlotInfo class.
318 struct CallSiteInfo {
319   /// The set of call sites for this slot. Used during regular LTO and the
320   /// import phase of ThinLTO (as well as the export phase of ThinLTO for any
321   /// call sites that appear in the merged module itself); in each of these
322   /// cases we are directly operating on the call sites at the IR level.
323   std::vector<VirtualCallSite> CallSites;
324 
325   /// Whether all call sites represented by this CallSiteInfo, including those
326   /// in summaries, have been devirtualized. This starts off as true because a
327   /// default constructed CallSiteInfo represents no call sites.
328   bool AllCallSitesDevirted = true;
329 
330   // These fields are used during the export phase of ThinLTO and reflect
331   // information collected from function summaries.
332 
333   /// Whether any function summary contains an llvm.assume(llvm.type.test) for
334   /// this slot.
335   bool SummaryHasTypeTestAssumeUsers = false;
336 
337   /// CFI-specific: a vector containing the list of function summaries that use
338   /// the llvm.type.checked.load intrinsic and therefore will require
339   /// resolutions for llvm.type.test in order to implement CFI checks if
340   /// devirtualization was unsuccessful. If devirtualization was successful, the
341   /// pass will clear this vector by calling markDevirt(). If at the end of the
342   /// pass the vector is non-empty, we will need to add a use of llvm.type.test
343   /// to each of the function summaries in the vector.
344   std::vector<FunctionSummary *> SummaryTypeCheckedLoadUsers;
345 
346   bool isExported() const {
347     return SummaryHasTypeTestAssumeUsers ||
348            !SummaryTypeCheckedLoadUsers.empty();
349   }
350 
351   void markSummaryHasTypeTestAssumeUsers() {
352     SummaryHasTypeTestAssumeUsers = true;
353     AllCallSitesDevirted = false;
354   }
355 
356   void addSummaryTypeCheckedLoadUser(FunctionSummary *FS) {
357     SummaryTypeCheckedLoadUsers.push_back(FS);
358     AllCallSitesDevirted = false;
359   }
360 
361   void markDevirt() {
362     AllCallSitesDevirted = true;
363 
364     // As explained in the comment for SummaryTypeCheckedLoadUsers.
365     SummaryTypeCheckedLoadUsers.clear();
366   }
367 };
368 
369 // Call site information collected for a specific VTableSlot.
370 struct VTableSlotInfo {
371   // The set of call sites which do not have all constant integer arguments
372   // (excluding "this").
373   CallSiteInfo CSInfo;
374 
375   // The set of call sites with all constant integer arguments (excluding
376   // "this"), grouped by argument list.
377   std::map<std::vector<uint64_t>, CallSiteInfo> ConstCSInfo;
378 
379   void addCallSite(Value *VTable, CallSite CS, unsigned *NumUnsafeUses);
380 
381 private:
382   CallSiteInfo &findCallSiteInfo(CallSite CS);
383 };
384 
385 CallSiteInfo &VTableSlotInfo::findCallSiteInfo(CallSite CS) {
386   std::vector<uint64_t> Args;
387   auto *CI = dyn_cast<IntegerType>(CS.getType());
388   if (!CI || CI->getBitWidth() > 64 || CS.arg_empty())
389     return CSInfo;
390   for (auto &&Arg : make_range(CS.arg_begin() + 1, CS.arg_end())) {
391     auto *CI = dyn_cast<ConstantInt>(Arg);
392     if (!CI || CI->getBitWidth() > 64)
393       return CSInfo;
394     Args.push_back(CI->getZExtValue());
395   }
396   return ConstCSInfo[Args];
397 }
398 
399 void VTableSlotInfo::addCallSite(Value *VTable, CallSite CS,
400                                  unsigned *NumUnsafeUses) {
401   auto &CSI = findCallSiteInfo(CS);
402   CSI.AllCallSitesDevirted = false;
403   CSI.CallSites.push_back({VTable, CS, NumUnsafeUses});
404 }
405 
406 struct DevirtModule {
407   Module &M;
408   function_ref<AAResults &(Function &)> AARGetter;
409 
410   ModuleSummaryIndex *ExportSummary;
411   const ModuleSummaryIndex *ImportSummary;
412 
413   IntegerType *Int8Ty;
414   PointerType *Int8PtrTy;
415   IntegerType *Int32Ty;
416   IntegerType *Int64Ty;
417   IntegerType *IntPtrTy;
418 
419   bool RemarksEnabled;
420   function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter;
421 
422   MapVector<VTableSlot, VTableSlotInfo> CallSlots;
423 
424   // This map keeps track of the number of "unsafe" uses of a loaded function
425   // pointer. The key is the associated llvm.type.test intrinsic call generated
426   // by this pass. An unsafe use is one that calls the loaded function pointer
427   // directly. Every time we eliminate an unsafe use (for example, by
428   // devirtualizing it or by applying virtual constant propagation), we
429   // decrement the value stored in this map. If a value reaches zero, we can
430   // eliminate the type check by RAUWing the associated llvm.type.test call with
431   // true.
432   std::map<CallInst *, unsigned> NumUnsafeUsesForTypeTest;
433 
434   DevirtModule(Module &M, function_ref<AAResults &(Function &)> AARGetter,
435                function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter,
436                ModuleSummaryIndex *ExportSummary,
437                const ModuleSummaryIndex *ImportSummary)
438       : M(M), AARGetter(AARGetter), ExportSummary(ExportSummary),
439         ImportSummary(ImportSummary), Int8Ty(Type::getInt8Ty(M.getContext())),
440         Int8PtrTy(Type::getInt8PtrTy(M.getContext())),
441         Int32Ty(Type::getInt32Ty(M.getContext())),
442         Int64Ty(Type::getInt64Ty(M.getContext())),
443         IntPtrTy(M.getDataLayout().getIntPtrType(M.getContext(), 0)),
444         RemarksEnabled(areRemarksEnabled()), OREGetter(OREGetter) {
445     assert(!(ExportSummary && ImportSummary));
446   }
447 
448   bool areRemarksEnabled();
449 
450   void scanTypeTestUsers(Function *TypeTestFunc, Function *AssumeFunc);
451   void scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc);
452 
453   void buildTypeIdentifierMap(
454       std::vector<VTableBits> &Bits,
455       DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap);
456   Constant *getPointerAtOffset(Constant *I, uint64_t Offset);
457   bool
458   tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot,
459                             const std::set<TypeMemberInfo> &TypeMemberInfos,
460                             uint64_t ByteOffset);
461 
462   void applySingleImplDevirt(VTableSlotInfo &SlotInfo, Constant *TheFn,
463                              bool &IsExported);
464   bool trySingleImplDevirt(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
465                            VTableSlotInfo &SlotInfo,
466                            WholeProgramDevirtResolution *Res);
467 
468   void applyICallBranchFunnel(VTableSlotInfo &SlotInfo, Constant *JT,
469                               bool &IsExported);
470   void tryICallBranchFunnel(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
471                             VTableSlotInfo &SlotInfo,
472                             WholeProgramDevirtResolution *Res, VTableSlot Slot);
473 
474   bool tryEvaluateFunctionsWithArgs(
475       MutableArrayRef<VirtualCallTarget> TargetsForSlot,
476       ArrayRef<uint64_t> Args);
477 
478   void applyUniformRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
479                              uint64_t TheRetVal);
480   bool tryUniformRetValOpt(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
481                            CallSiteInfo &CSInfo,
482                            WholeProgramDevirtResolution::ByArg *Res);
483 
484   // Returns the global symbol name that is used to export information about the
485   // given vtable slot and list of arguments.
486   std::string getGlobalName(VTableSlot Slot, ArrayRef<uint64_t> Args,
487                             StringRef Name);
488 
489   bool shouldExportConstantsAsAbsoluteSymbols();
490 
491   // This function is called during the export phase to create a symbol
492   // definition containing information about the given vtable slot and list of
493   // arguments.
494   void exportGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args, StringRef Name,
495                     Constant *C);
496   void exportConstant(VTableSlot Slot, ArrayRef<uint64_t> Args, StringRef Name,
497                       uint32_t Const, uint32_t &Storage);
498 
499   // This function is called during the import phase to create a reference to
500   // the symbol definition created during the export phase.
501   Constant *importGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
502                          StringRef Name);
503   Constant *importConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
504                            StringRef Name, IntegerType *IntTy,
505                            uint32_t Storage);
506 
507   Constant *getMemberAddr(const TypeMemberInfo *M);
508 
509   void applyUniqueRetValOpt(CallSiteInfo &CSInfo, StringRef FnName, bool IsOne,
510                             Constant *UniqueMemberAddr);
511   bool tryUniqueRetValOpt(unsigned BitWidth,
512                           MutableArrayRef<VirtualCallTarget> TargetsForSlot,
513                           CallSiteInfo &CSInfo,
514                           WholeProgramDevirtResolution::ByArg *Res,
515                           VTableSlot Slot, ArrayRef<uint64_t> Args);
516 
517   void applyVirtualConstProp(CallSiteInfo &CSInfo, StringRef FnName,
518                              Constant *Byte, Constant *Bit);
519   bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot,
520                            VTableSlotInfo &SlotInfo,
521                            WholeProgramDevirtResolution *Res, VTableSlot Slot);
522 
523   void rebuildGlobal(VTableBits &B);
524 
525   // Apply the summary resolution for Slot to all virtual calls in SlotInfo.
526   void importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo);
527 
528   // If we were able to eliminate all unsafe uses for a type checked load,
529   // eliminate the associated type tests by replacing them with true.
530   void removeRedundantTypeTests();
531 
532   bool run();
533 
534   // Lower the module using the action and summary passed as command line
535   // arguments. For testing purposes only.
536   static bool runForTesting(
537       Module &M, function_ref<AAResults &(Function &)> AARGetter,
538       function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter);
539 };
540 
541 struct WholeProgramDevirt : public ModulePass {
542   static char ID;
543 
544   bool UseCommandLine = false;
545 
546   ModuleSummaryIndex *ExportSummary;
547   const ModuleSummaryIndex *ImportSummary;
548 
549   WholeProgramDevirt() : ModulePass(ID), UseCommandLine(true) {
550     initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry());
551   }
552 
553   WholeProgramDevirt(ModuleSummaryIndex *ExportSummary,
554                      const ModuleSummaryIndex *ImportSummary)
555       : ModulePass(ID), ExportSummary(ExportSummary),
556         ImportSummary(ImportSummary) {
557     initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry());
558   }
559 
560   bool runOnModule(Module &M) override {
561     if (skipModule(M))
562       return false;
563 
564     // In the new pass manager, we can request the optimization
565     // remark emitter pass on a per-function-basis, which the
566     // OREGetter will do for us.
567     // In the old pass manager, this is harder, so we just build
568     // an optimization remark emitter on the fly, when we need it.
569     std::unique_ptr<OptimizationRemarkEmitter> ORE;
570     auto OREGetter = [&](Function *F) -> OptimizationRemarkEmitter & {
571       ORE = make_unique<OptimizationRemarkEmitter>(F);
572       return *ORE;
573     };
574 
575     if (UseCommandLine)
576       return DevirtModule::runForTesting(M, LegacyAARGetter(*this), OREGetter);
577 
578     return DevirtModule(M, LegacyAARGetter(*this), OREGetter, ExportSummary,
579                         ImportSummary)
580         .run();
581   }
582 
583   void getAnalysisUsage(AnalysisUsage &AU) const override {
584     AU.addRequired<AssumptionCacheTracker>();
585     AU.addRequired<TargetLibraryInfoWrapperPass>();
586   }
587 };
588 
589 } // end anonymous namespace
590 
591 INITIALIZE_PASS_BEGIN(WholeProgramDevirt, "wholeprogramdevirt",
592                       "Whole program devirtualization", false, false)
593 INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
594 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
595 INITIALIZE_PASS_END(WholeProgramDevirt, "wholeprogramdevirt",
596                     "Whole program devirtualization", false, false)
597 char WholeProgramDevirt::ID = 0;
598 
599 ModulePass *
600 llvm::createWholeProgramDevirtPass(ModuleSummaryIndex *ExportSummary,
601                                    const ModuleSummaryIndex *ImportSummary) {
602   return new WholeProgramDevirt(ExportSummary, ImportSummary);
603 }
604 
605 PreservedAnalyses WholeProgramDevirtPass::run(Module &M,
606                                               ModuleAnalysisManager &AM) {
607   auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
608   auto AARGetter = [&](Function &F) -> AAResults & {
609     return FAM.getResult<AAManager>(F);
610   };
611   auto OREGetter = [&](Function *F) -> OptimizationRemarkEmitter & {
612     return FAM.getResult<OptimizationRemarkEmitterAnalysis>(*F);
613   };
614   if (!DevirtModule(M, AARGetter, OREGetter, nullptr, nullptr).run())
615     return PreservedAnalyses::all();
616   return PreservedAnalyses::none();
617 }
618 
619 bool DevirtModule::runForTesting(
620     Module &M, function_ref<AAResults &(Function &)> AARGetter,
621     function_ref<OptimizationRemarkEmitter &(Function *)> OREGetter) {
622   ModuleSummaryIndex Summary(/*IsPerformingAnalysis=*/false);
623 
624   // Handle the command-line summary arguments. This code is for testing
625   // purposes only, so we handle errors directly.
626   if (!ClReadSummary.empty()) {
627     ExitOnError ExitOnErr("-wholeprogramdevirt-read-summary: " + ClReadSummary +
628                           ": ");
629     auto ReadSummaryFile =
630         ExitOnErr(errorOrToExpected(MemoryBuffer::getFile(ClReadSummary)));
631 
632     yaml::Input In(ReadSummaryFile->getBuffer());
633     In >> Summary;
634     ExitOnErr(errorCodeToError(In.error()));
635   }
636 
637   bool Changed =
638       DevirtModule(
639           M, AARGetter, OREGetter,
640           ClSummaryAction == PassSummaryAction::Export ? &Summary : nullptr,
641           ClSummaryAction == PassSummaryAction::Import ? &Summary : nullptr)
642           .run();
643 
644   if (!ClWriteSummary.empty()) {
645     ExitOnError ExitOnErr(
646         "-wholeprogramdevirt-write-summary: " + ClWriteSummary + ": ");
647     std::error_code EC;
648     raw_fd_ostream OS(ClWriteSummary, EC, sys::fs::F_Text);
649     ExitOnErr(errorCodeToError(EC));
650 
651     yaml::Output Out(OS);
652     Out << Summary;
653   }
654 
655   return Changed;
656 }
657 
658 void DevirtModule::buildTypeIdentifierMap(
659     std::vector<VTableBits> &Bits,
660     DenseMap<Metadata *, std::set<TypeMemberInfo>> &TypeIdMap) {
661   DenseMap<GlobalVariable *, VTableBits *> GVToBits;
662   Bits.reserve(M.getGlobalList().size());
663   SmallVector<MDNode *, 2> Types;
664   for (GlobalVariable &GV : M.globals()) {
665     Types.clear();
666     GV.getMetadata(LLVMContext::MD_type, Types);
667     if (Types.empty())
668       continue;
669 
670     VTableBits *&BitsPtr = GVToBits[&GV];
671     if (!BitsPtr) {
672       Bits.emplace_back();
673       Bits.back().GV = &GV;
674       Bits.back().ObjectSize =
675           M.getDataLayout().getTypeAllocSize(GV.getInitializer()->getType());
676       BitsPtr = &Bits.back();
677     }
678 
679     for (MDNode *Type : Types) {
680       auto TypeID = Type->getOperand(1).get();
681 
682       uint64_t Offset =
683           cast<ConstantInt>(
684               cast<ConstantAsMetadata>(Type->getOperand(0))->getValue())
685               ->getZExtValue();
686 
687       TypeIdMap[TypeID].insert({BitsPtr, Offset});
688     }
689   }
690 }
691 
692 Constant *DevirtModule::getPointerAtOffset(Constant *I, uint64_t Offset) {
693   if (I->getType()->isPointerTy()) {
694     if (Offset == 0)
695       return I;
696     return nullptr;
697   }
698 
699   const DataLayout &DL = M.getDataLayout();
700 
701   if (auto *C = dyn_cast<ConstantStruct>(I)) {
702     const StructLayout *SL = DL.getStructLayout(C->getType());
703     if (Offset >= SL->getSizeInBytes())
704       return nullptr;
705 
706     unsigned Op = SL->getElementContainingOffset(Offset);
707     return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
708                               Offset - SL->getElementOffset(Op));
709   }
710   if (auto *C = dyn_cast<ConstantArray>(I)) {
711     ArrayType *VTableTy = C->getType();
712     uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType());
713 
714     unsigned Op = Offset / ElemSize;
715     if (Op >= C->getNumOperands())
716       return nullptr;
717 
718     return getPointerAtOffset(cast<Constant>(I->getOperand(Op)),
719                               Offset % ElemSize);
720   }
721   return nullptr;
722 }
723 
724 bool DevirtModule::tryFindVirtualCallTargets(
725     std::vector<VirtualCallTarget> &TargetsForSlot,
726     const std::set<TypeMemberInfo> &TypeMemberInfos, uint64_t ByteOffset) {
727   for (const TypeMemberInfo &TM : TypeMemberInfos) {
728     if (!TM.Bits->GV->isConstant())
729       return false;
730 
731     Constant *Ptr = getPointerAtOffset(TM.Bits->GV->getInitializer(),
732                                        TM.Offset + ByteOffset);
733     if (!Ptr)
734       return false;
735 
736     auto Fn = dyn_cast<Function>(Ptr->stripPointerCasts());
737     if (!Fn)
738       return false;
739 
740     // We can disregard __cxa_pure_virtual as a possible call target, as
741     // calls to pure virtuals are UB.
742     if (Fn->getName() == "__cxa_pure_virtual")
743       continue;
744 
745     TargetsForSlot.push_back({Fn, &TM});
746   }
747 
748   // Give up if we couldn't find any targets.
749   return !TargetsForSlot.empty();
750 }
751 
752 void DevirtModule::applySingleImplDevirt(VTableSlotInfo &SlotInfo,
753                                          Constant *TheFn, bool &IsExported) {
754   auto Apply = [&](CallSiteInfo &CSInfo) {
755     for (auto &&VCallSite : CSInfo.CallSites) {
756       if (RemarksEnabled)
757         VCallSite.emitRemark("single-impl", TheFn->getName(), OREGetter);
758       VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast(
759           TheFn, VCallSite.CS.getCalledValue()->getType()));
760       // This use is no longer unsafe.
761       if (VCallSite.NumUnsafeUses)
762         --*VCallSite.NumUnsafeUses;
763     }
764     if (CSInfo.isExported())
765       IsExported = true;
766     CSInfo.markDevirt();
767   };
768   Apply(SlotInfo.CSInfo);
769   for (auto &P : SlotInfo.ConstCSInfo)
770     Apply(P.second);
771 }
772 
773 bool DevirtModule::trySingleImplDevirt(
774     MutableArrayRef<VirtualCallTarget> TargetsForSlot,
775     VTableSlotInfo &SlotInfo, WholeProgramDevirtResolution *Res) {
776   // See if the program contains a single implementation of this virtual
777   // function.
778   Function *TheFn = TargetsForSlot[0].Fn;
779   for (auto &&Target : TargetsForSlot)
780     if (TheFn != Target.Fn)
781       return false;
782 
783   // If so, update each call site to call that implementation directly.
784   if (RemarksEnabled)
785     TargetsForSlot[0].WasDevirt = true;
786 
787   bool IsExported = false;
788   applySingleImplDevirt(SlotInfo, TheFn, IsExported);
789   if (!IsExported)
790     return false;
791 
792   // If the only implementation has local linkage, we must promote to external
793   // to make it visible to thin LTO objects. We can only get here during the
794   // ThinLTO export phase.
795   if (TheFn->hasLocalLinkage()) {
796     std::string NewName = (TheFn->getName() + "$merged").str();
797 
798     // Since we are renaming the function, any comdats with the same name must
799     // also be renamed. This is required when targeting COFF, as the comdat name
800     // must match one of the names of the symbols in the comdat.
801     if (Comdat *C = TheFn->getComdat()) {
802       if (C->getName() == TheFn->getName()) {
803         Comdat *NewC = M.getOrInsertComdat(NewName);
804         NewC->setSelectionKind(C->getSelectionKind());
805         for (GlobalObject &GO : M.global_objects())
806           if (GO.getComdat() == C)
807             GO.setComdat(NewC);
808       }
809     }
810 
811     TheFn->setLinkage(GlobalValue::ExternalLinkage);
812     TheFn->setVisibility(GlobalValue::HiddenVisibility);
813     TheFn->setName(NewName);
814   }
815 
816   Res->TheKind = WholeProgramDevirtResolution::SingleImpl;
817   Res->SingleImplName = TheFn->getName();
818 
819   return true;
820 }
821 
822 void DevirtModule::tryICallBranchFunnel(
823     MutableArrayRef<VirtualCallTarget> TargetsForSlot, VTableSlotInfo &SlotInfo,
824     WholeProgramDevirtResolution *Res, VTableSlot Slot) {
825   Triple T(M.getTargetTriple());
826   if (T.getArch() != Triple::x86_64)
827     return;
828 
829   if (TargetsForSlot.size() > ClThreshold)
830     return;
831 
832   bool HasNonDevirt = !SlotInfo.CSInfo.AllCallSitesDevirted;
833   if (!HasNonDevirt)
834     for (auto &P : SlotInfo.ConstCSInfo)
835       if (!P.second.AllCallSitesDevirted) {
836         HasNonDevirt = true;
837         break;
838       }
839 
840   if (!HasNonDevirt)
841     return;
842 
843   FunctionType *FT =
844       FunctionType::get(Type::getVoidTy(M.getContext()), {Int8PtrTy}, true);
845   Function *JT;
846   if (isa<MDString>(Slot.TypeID)) {
847     JT = Function::Create(FT, Function::ExternalLinkage,
848                           getGlobalName(Slot, {}, "branch_funnel"), &M);
849     JT->setVisibility(GlobalValue::HiddenVisibility);
850   } else {
851     JT = Function::Create(FT, Function::InternalLinkage, "branch_funnel", &M);
852   }
853   JT->addAttribute(1, Attribute::Nest);
854 
855   std::vector<Value *> JTArgs;
856   JTArgs.push_back(JT->arg_begin());
857   for (auto &T : TargetsForSlot) {
858     JTArgs.push_back(getMemberAddr(T.TM));
859     JTArgs.push_back(T.Fn);
860   }
861 
862   BasicBlock *BB = BasicBlock::Create(M.getContext(), "", JT, nullptr);
863   Constant *Intr =
864       Intrinsic::getDeclaration(&M, llvm::Intrinsic::icall_branch_funnel, {});
865 
866   auto *CI = CallInst::Create(Intr, JTArgs, "", BB);
867   CI->setTailCallKind(CallInst::TCK_MustTail);
868   ReturnInst::Create(M.getContext(), nullptr, BB);
869 
870   bool IsExported = false;
871   applyICallBranchFunnel(SlotInfo, JT, IsExported);
872   if (IsExported)
873     Res->TheKind = WholeProgramDevirtResolution::BranchFunnel;
874 }
875 
876 void DevirtModule::applyICallBranchFunnel(VTableSlotInfo &SlotInfo,
877                                           Constant *JT, bool &IsExported) {
878   auto Apply = [&](CallSiteInfo &CSInfo) {
879     if (CSInfo.isExported())
880       IsExported = true;
881     if (CSInfo.AllCallSitesDevirted)
882       return;
883     for (auto &&VCallSite : CSInfo.CallSites) {
884       CallSite CS = VCallSite.CS;
885 
886       // Jump tables are only profitable if the retpoline mitigation is enabled.
887       Attribute FSAttr = CS.getCaller()->getFnAttribute("target-features");
888       if (FSAttr.hasAttribute(Attribute::None) ||
889           !FSAttr.getValueAsString().contains("+retpoline"))
890         continue;
891 
892       if (RemarksEnabled)
893         VCallSite.emitRemark("branch-funnel", JT->getName(), OREGetter);
894 
895       // Pass the address of the vtable in the nest register, which is r10 on
896       // x86_64.
897       std::vector<Type *> NewArgs;
898       NewArgs.push_back(Int8PtrTy);
899       for (Type *T : CS.getFunctionType()->params())
900         NewArgs.push_back(T);
901       PointerType *NewFT = PointerType::getUnqual(
902           FunctionType::get(CS.getFunctionType()->getReturnType(), NewArgs,
903                             CS.getFunctionType()->isVarArg()));
904 
905       IRBuilder<> IRB(CS.getInstruction());
906       std::vector<Value *> Args;
907       Args.push_back(IRB.CreateBitCast(VCallSite.VTable, Int8PtrTy));
908       for (unsigned I = 0; I != CS.getNumArgOperands(); ++I)
909         Args.push_back(CS.getArgOperand(I));
910 
911       CallSite NewCS;
912       if (CS.isCall())
913         NewCS = IRB.CreateCall(IRB.CreateBitCast(JT, NewFT), Args);
914       else
915         NewCS = IRB.CreateInvoke(
916             IRB.CreateBitCast(JT, NewFT),
917             cast<InvokeInst>(CS.getInstruction())->getNormalDest(),
918             cast<InvokeInst>(CS.getInstruction())->getUnwindDest(), Args);
919       NewCS.setCallingConv(CS.getCallingConv());
920 
921       AttributeList Attrs = CS.getAttributes();
922       std::vector<AttributeSet> NewArgAttrs;
923       NewArgAttrs.push_back(AttributeSet::get(
924           M.getContext(), ArrayRef<Attribute>{Attribute::get(
925                               M.getContext(), Attribute::Nest)}));
926       for (unsigned I = 0; I + 2 <  Attrs.getNumAttrSets(); ++I)
927         NewArgAttrs.push_back(Attrs.getParamAttributes(I));
928       NewCS.setAttributes(
929           AttributeList::get(M.getContext(), Attrs.getFnAttributes(),
930                              Attrs.getRetAttributes(), NewArgAttrs));
931 
932       CS->replaceAllUsesWith(NewCS.getInstruction());
933       CS->eraseFromParent();
934 
935       // This use is no longer unsafe.
936       if (VCallSite.NumUnsafeUses)
937         --*VCallSite.NumUnsafeUses;
938     }
939     // Don't mark as devirtualized because there may be callers compiled without
940     // retpoline mitigation, which would mean that they are lowered to
941     // llvm.type.test and therefore require an llvm.type.test resolution for the
942     // type identifier.
943   };
944   Apply(SlotInfo.CSInfo);
945   for (auto &P : SlotInfo.ConstCSInfo)
946     Apply(P.second);
947 }
948 
949 bool DevirtModule::tryEvaluateFunctionsWithArgs(
950     MutableArrayRef<VirtualCallTarget> TargetsForSlot,
951     ArrayRef<uint64_t> Args) {
952   // Evaluate each function and store the result in each target's RetVal
953   // field.
954   for (VirtualCallTarget &Target : TargetsForSlot) {
955     if (Target.Fn->arg_size() != Args.size() + 1)
956       return false;
957 
958     Evaluator Eval(M.getDataLayout(), nullptr);
959     SmallVector<Constant *, 2> EvalArgs;
960     EvalArgs.push_back(
961         Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0)));
962     for (unsigned I = 0; I != Args.size(); ++I) {
963       auto *ArgTy = dyn_cast<IntegerType>(
964           Target.Fn->getFunctionType()->getParamType(I + 1));
965       if (!ArgTy)
966         return false;
967       EvalArgs.push_back(ConstantInt::get(ArgTy, Args[I]));
968     }
969 
970     Constant *RetVal;
971     if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) ||
972         !isa<ConstantInt>(RetVal))
973       return false;
974     Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue();
975   }
976   return true;
977 }
978 
979 void DevirtModule::applyUniformRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
980                                          uint64_t TheRetVal) {
981   for (auto Call : CSInfo.CallSites)
982     Call.replaceAndErase(
983         "uniform-ret-val", FnName, RemarksEnabled, OREGetter,
984         ConstantInt::get(cast<IntegerType>(Call.CS.getType()), TheRetVal));
985   CSInfo.markDevirt();
986 }
987 
988 bool DevirtModule::tryUniformRetValOpt(
989     MutableArrayRef<VirtualCallTarget> TargetsForSlot, CallSiteInfo &CSInfo,
990     WholeProgramDevirtResolution::ByArg *Res) {
991   // Uniform return value optimization. If all functions return the same
992   // constant, replace all calls with that constant.
993   uint64_t TheRetVal = TargetsForSlot[0].RetVal;
994   for (const VirtualCallTarget &Target : TargetsForSlot)
995     if (Target.RetVal != TheRetVal)
996       return false;
997 
998   if (CSInfo.isExported()) {
999     Res->TheKind = WholeProgramDevirtResolution::ByArg::UniformRetVal;
1000     Res->Info = TheRetVal;
1001   }
1002 
1003   applyUniformRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), TheRetVal);
1004   if (RemarksEnabled)
1005     for (auto &&Target : TargetsForSlot)
1006       Target.WasDevirt = true;
1007   return true;
1008 }
1009 
1010 std::string DevirtModule::getGlobalName(VTableSlot Slot,
1011                                         ArrayRef<uint64_t> Args,
1012                                         StringRef Name) {
1013   std::string FullName = "__typeid_";
1014   raw_string_ostream OS(FullName);
1015   OS << cast<MDString>(Slot.TypeID)->getString() << '_' << Slot.ByteOffset;
1016   for (uint64_t Arg : Args)
1017     OS << '_' << Arg;
1018   OS << '_' << Name;
1019   return OS.str();
1020 }
1021 
1022 bool DevirtModule::shouldExportConstantsAsAbsoluteSymbols() {
1023   Triple T(M.getTargetTriple());
1024   return (T.getArch() == Triple::x86 || T.getArch() == Triple::x86_64) &&
1025          T.getObjectFormat() == Triple::ELF;
1026 }
1027 
1028 void DevirtModule::exportGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
1029                                 StringRef Name, Constant *C) {
1030   GlobalAlias *GA = GlobalAlias::create(Int8Ty, 0, GlobalValue::ExternalLinkage,
1031                                         getGlobalName(Slot, Args, Name), C, &M);
1032   GA->setVisibility(GlobalValue::HiddenVisibility);
1033 }
1034 
1035 void DevirtModule::exportConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
1036                                   StringRef Name, uint32_t Const,
1037                                   uint32_t &Storage) {
1038   if (shouldExportConstantsAsAbsoluteSymbols()) {
1039     exportGlobal(
1040         Slot, Args, Name,
1041         ConstantExpr::getIntToPtr(ConstantInt::get(Int32Ty, Const), Int8PtrTy));
1042     return;
1043   }
1044 
1045   Storage = Const;
1046 }
1047 
1048 Constant *DevirtModule::importGlobal(VTableSlot Slot, ArrayRef<uint64_t> Args,
1049                                      StringRef Name) {
1050   Constant *C = M.getOrInsertGlobal(getGlobalName(Slot, Args, Name), Int8Ty);
1051   auto *GV = dyn_cast<GlobalVariable>(C);
1052   if (GV)
1053     GV->setVisibility(GlobalValue::HiddenVisibility);
1054   return C;
1055 }
1056 
1057 Constant *DevirtModule::importConstant(VTableSlot Slot, ArrayRef<uint64_t> Args,
1058                                        StringRef Name, IntegerType *IntTy,
1059                                        uint32_t Storage) {
1060   if (!shouldExportConstantsAsAbsoluteSymbols())
1061     return ConstantInt::get(IntTy, Storage);
1062 
1063   Constant *C = importGlobal(Slot, Args, Name);
1064   auto *GV = cast<GlobalVariable>(C->stripPointerCasts());
1065   C = ConstantExpr::getPtrToInt(C, IntTy);
1066 
1067   // We only need to set metadata if the global is newly created, in which
1068   // case it would not have hidden visibility.
1069   if (GV->getMetadata(LLVMContext::MD_absolute_symbol))
1070     return C;
1071 
1072   auto SetAbsRange = [&](uint64_t Min, uint64_t Max) {
1073     auto *MinC = ConstantAsMetadata::get(ConstantInt::get(IntPtrTy, Min));
1074     auto *MaxC = ConstantAsMetadata::get(ConstantInt::get(IntPtrTy, Max));
1075     GV->setMetadata(LLVMContext::MD_absolute_symbol,
1076                     MDNode::get(M.getContext(), {MinC, MaxC}));
1077   };
1078   unsigned AbsWidth = IntTy->getBitWidth();
1079   if (AbsWidth == IntPtrTy->getBitWidth())
1080     SetAbsRange(~0ull, ~0ull); // Full set.
1081   else
1082     SetAbsRange(0, 1ull << AbsWidth);
1083   return C;
1084 }
1085 
1086 void DevirtModule::applyUniqueRetValOpt(CallSiteInfo &CSInfo, StringRef FnName,
1087                                         bool IsOne,
1088                                         Constant *UniqueMemberAddr) {
1089   for (auto &&Call : CSInfo.CallSites) {
1090     IRBuilder<> B(Call.CS.getInstruction());
1091     Value *Cmp =
1092         B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE,
1093                      B.CreateBitCast(Call.VTable, Int8PtrTy), UniqueMemberAddr);
1094     Cmp = B.CreateZExt(Cmp, Call.CS->getType());
1095     Call.replaceAndErase("unique-ret-val", FnName, RemarksEnabled, OREGetter,
1096                          Cmp);
1097   }
1098   CSInfo.markDevirt();
1099 }
1100 
1101 Constant *DevirtModule::getMemberAddr(const TypeMemberInfo *M) {
1102   Constant *C = ConstantExpr::getBitCast(M->Bits->GV, Int8PtrTy);
1103   return ConstantExpr::getGetElementPtr(Int8Ty, C,
1104                                         ConstantInt::get(Int64Ty, M->Offset));
1105 }
1106 
1107 bool DevirtModule::tryUniqueRetValOpt(
1108     unsigned BitWidth, MutableArrayRef<VirtualCallTarget> TargetsForSlot,
1109     CallSiteInfo &CSInfo, WholeProgramDevirtResolution::ByArg *Res,
1110     VTableSlot Slot, ArrayRef<uint64_t> Args) {
1111   // IsOne controls whether we look for a 0 or a 1.
1112   auto tryUniqueRetValOptFor = [&](bool IsOne) {
1113     const TypeMemberInfo *UniqueMember = nullptr;
1114     for (const VirtualCallTarget &Target : TargetsForSlot) {
1115       if (Target.RetVal == (IsOne ? 1 : 0)) {
1116         if (UniqueMember)
1117           return false;
1118         UniqueMember = Target.TM;
1119       }
1120     }
1121 
1122     // We should have found a unique member or bailed out by now. We already
1123     // checked for a uniform return value in tryUniformRetValOpt.
1124     assert(UniqueMember);
1125 
1126     Constant *UniqueMemberAddr = getMemberAddr(UniqueMember);
1127     if (CSInfo.isExported()) {
1128       Res->TheKind = WholeProgramDevirtResolution::ByArg::UniqueRetVal;
1129       Res->Info = IsOne;
1130 
1131       exportGlobal(Slot, Args, "unique_member", UniqueMemberAddr);
1132     }
1133 
1134     // Replace each call with the comparison.
1135     applyUniqueRetValOpt(CSInfo, TargetsForSlot[0].Fn->getName(), IsOne,
1136                          UniqueMemberAddr);
1137 
1138     // Update devirtualization statistics for targets.
1139     if (RemarksEnabled)
1140       for (auto &&Target : TargetsForSlot)
1141         Target.WasDevirt = true;
1142 
1143     return true;
1144   };
1145 
1146   if (BitWidth == 1) {
1147     if (tryUniqueRetValOptFor(true))
1148       return true;
1149     if (tryUniqueRetValOptFor(false))
1150       return true;
1151   }
1152   return false;
1153 }
1154 
1155 void DevirtModule::applyVirtualConstProp(CallSiteInfo &CSInfo, StringRef FnName,
1156                                          Constant *Byte, Constant *Bit) {
1157   for (auto Call : CSInfo.CallSites) {
1158     auto *RetType = cast<IntegerType>(Call.CS.getType());
1159     IRBuilder<> B(Call.CS.getInstruction());
1160     Value *Addr =
1161         B.CreateGEP(Int8Ty, B.CreateBitCast(Call.VTable, Int8PtrTy), Byte);
1162     if (RetType->getBitWidth() == 1) {
1163       Value *Bits = B.CreateLoad(Addr);
1164       Value *BitsAndBit = B.CreateAnd(Bits, Bit);
1165       auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0));
1166       Call.replaceAndErase("virtual-const-prop-1-bit", FnName, RemarksEnabled,
1167                            OREGetter, IsBitSet);
1168     } else {
1169       Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo());
1170       Value *Val = B.CreateLoad(RetType, ValAddr);
1171       Call.replaceAndErase("virtual-const-prop", FnName, RemarksEnabled,
1172                            OREGetter, Val);
1173     }
1174   }
1175   CSInfo.markDevirt();
1176 }
1177 
1178 bool DevirtModule::tryVirtualConstProp(
1179     MutableArrayRef<VirtualCallTarget> TargetsForSlot, VTableSlotInfo &SlotInfo,
1180     WholeProgramDevirtResolution *Res, VTableSlot Slot) {
1181   // This only works if the function returns an integer.
1182   auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType());
1183   if (!RetType)
1184     return false;
1185   unsigned BitWidth = RetType->getBitWidth();
1186   if (BitWidth > 64)
1187     return false;
1188 
1189   // Make sure that each function is defined, does not access memory, takes at
1190   // least one argument, does not use its first argument (which we assume is
1191   // 'this'), and has the same return type.
1192   //
1193   // Note that we test whether this copy of the function is readnone, rather
1194   // than testing function attributes, which must hold for any copy of the
1195   // function, even a less optimized version substituted at link time. This is
1196   // sound because the virtual constant propagation optimizations effectively
1197   // inline all implementations of the virtual function into each call site,
1198   // rather than using function attributes to perform local optimization.
1199   for (VirtualCallTarget &Target : TargetsForSlot) {
1200     if (Target.Fn->isDeclaration() ||
1201         computeFunctionBodyMemoryAccess(*Target.Fn, AARGetter(*Target.Fn)) !=
1202             MAK_ReadNone ||
1203         Target.Fn->arg_empty() || !Target.Fn->arg_begin()->use_empty() ||
1204         Target.Fn->getReturnType() != RetType)
1205       return false;
1206   }
1207 
1208   for (auto &&CSByConstantArg : SlotInfo.ConstCSInfo) {
1209     if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first))
1210       continue;
1211 
1212     WholeProgramDevirtResolution::ByArg *ResByArg = nullptr;
1213     if (Res)
1214       ResByArg = &Res->ResByArg[CSByConstantArg.first];
1215 
1216     if (tryUniformRetValOpt(TargetsForSlot, CSByConstantArg.second, ResByArg))
1217       continue;
1218 
1219     if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second,
1220                            ResByArg, Slot, CSByConstantArg.first))
1221       continue;
1222 
1223     // Find an allocation offset in bits in all vtables associated with the
1224     // type.
1225     uint64_t AllocBefore =
1226         findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth);
1227     uint64_t AllocAfter =
1228         findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth);
1229 
1230     // Calculate the total amount of padding needed to store a value at both
1231     // ends of the object.
1232     uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0;
1233     for (auto &&Target : TargetsForSlot) {
1234       TotalPaddingBefore += std::max<int64_t>(
1235           (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0);
1236       TotalPaddingAfter += std::max<int64_t>(
1237           (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0);
1238     }
1239 
1240     // If the amount of padding is too large, give up.
1241     // FIXME: do something smarter here.
1242     if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128)
1243       continue;
1244 
1245     // Calculate the offset to the value as a (possibly negative) byte offset
1246     // and (if applicable) a bit offset, and store the values in the targets.
1247     int64_t OffsetByte;
1248     uint64_t OffsetBit;
1249     if (TotalPaddingBefore <= TotalPaddingAfter)
1250       setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte,
1251                             OffsetBit);
1252     else
1253       setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte,
1254                            OffsetBit);
1255 
1256     if (RemarksEnabled)
1257       for (auto &&Target : TargetsForSlot)
1258         Target.WasDevirt = true;
1259 
1260 
1261     if (CSByConstantArg.second.isExported()) {
1262       ResByArg->TheKind = WholeProgramDevirtResolution::ByArg::VirtualConstProp;
1263       exportConstant(Slot, CSByConstantArg.first, "byte", OffsetByte,
1264                      ResByArg->Byte);
1265       exportConstant(Slot, CSByConstantArg.first, "bit", 1ULL << OffsetBit,
1266                      ResByArg->Bit);
1267     }
1268 
1269     // Rewrite each call to a load from OffsetByte/OffsetBit.
1270     Constant *ByteConst = ConstantInt::get(Int32Ty, OffsetByte);
1271     Constant *BitConst = ConstantInt::get(Int8Ty, 1ULL << OffsetBit);
1272     applyVirtualConstProp(CSByConstantArg.second,
1273                           TargetsForSlot[0].Fn->getName(), ByteConst, BitConst);
1274   }
1275   return true;
1276 }
1277 
1278 void DevirtModule::rebuildGlobal(VTableBits &B) {
1279   if (B.Before.Bytes.empty() && B.After.Bytes.empty())
1280     return;
1281 
1282   // Align each byte array to pointer width.
1283   unsigned PointerSize = M.getDataLayout().getPointerSize();
1284   B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize));
1285   B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize));
1286 
1287   // Before was stored in reverse order; flip it now.
1288   for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I)
1289     std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]);
1290 
1291   // Build an anonymous global containing the before bytes, followed by the
1292   // original initializer, followed by the after bytes.
1293   auto NewInit = ConstantStruct::getAnon(
1294       {ConstantDataArray::get(M.getContext(), B.Before.Bytes),
1295        B.GV->getInitializer(),
1296        ConstantDataArray::get(M.getContext(), B.After.Bytes)});
1297   auto NewGV =
1298       new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(),
1299                          GlobalVariable::PrivateLinkage, NewInit, "", B.GV);
1300   NewGV->setSection(B.GV->getSection());
1301   NewGV->setComdat(B.GV->getComdat());
1302 
1303   // Copy the original vtable's metadata to the anonymous global, adjusting
1304   // offsets as required.
1305   NewGV->copyMetadata(B.GV, B.Before.Bytes.size());
1306 
1307   // Build an alias named after the original global, pointing at the second
1308   // element (the original initializer).
1309   auto Alias = GlobalAlias::create(
1310       B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "",
1311       ConstantExpr::getGetElementPtr(
1312           NewInit->getType(), NewGV,
1313           ArrayRef<Constant *>{ConstantInt::get(Int32Ty, 0),
1314                                ConstantInt::get(Int32Ty, 1)}),
1315       &M);
1316   Alias->setVisibility(B.GV->getVisibility());
1317   Alias->takeName(B.GV);
1318 
1319   B.GV->replaceAllUsesWith(Alias);
1320   B.GV->eraseFromParent();
1321 }
1322 
1323 bool DevirtModule::areRemarksEnabled() {
1324   const auto &FL = M.getFunctionList();
1325   if (FL.empty())
1326     return false;
1327   const Function &Fn = FL.front();
1328 
1329   const auto &BBL = Fn.getBasicBlockList();
1330   if (BBL.empty())
1331     return false;
1332   auto DI = OptimizationRemark(DEBUG_TYPE, "", DebugLoc(), &BBL.front());
1333   return DI.isEnabled();
1334 }
1335 
1336 void DevirtModule::scanTypeTestUsers(Function *TypeTestFunc,
1337                                      Function *AssumeFunc) {
1338   // Find all virtual calls via a virtual table pointer %p under an assumption
1339   // of the form llvm.assume(llvm.type.test(%p, %md)). This indicates that %p
1340   // points to a member of the type identifier %md. Group calls by (type ID,
1341   // offset) pair (effectively the identity of the virtual function) and store
1342   // to CallSlots.
1343   DenseSet<Value *> SeenPtrs;
1344   for (auto I = TypeTestFunc->use_begin(), E = TypeTestFunc->use_end();
1345        I != E;) {
1346     auto CI = dyn_cast<CallInst>(I->getUser());
1347     ++I;
1348     if (!CI)
1349       continue;
1350 
1351     // Search for virtual calls based on %p and add them to DevirtCalls.
1352     SmallVector<DevirtCallSite, 1> DevirtCalls;
1353     SmallVector<CallInst *, 1> Assumes;
1354     findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI);
1355 
1356     // If we found any, add them to CallSlots. Only do this if we haven't seen
1357     // the vtable pointer before, as it may have been CSE'd with pointers from
1358     // other call sites, and we don't want to process call sites multiple times.
1359     if (!Assumes.empty()) {
1360       Metadata *TypeId =
1361           cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata();
1362       Value *Ptr = CI->getArgOperand(0)->stripPointerCasts();
1363       if (SeenPtrs.insert(Ptr).second) {
1364         for (DevirtCallSite Call : DevirtCalls) {
1365           CallSlots[{TypeId, Call.Offset}].addCallSite(Ptr, Call.CS, nullptr);
1366         }
1367       }
1368     }
1369 
1370     // We no longer need the assumes or the type test.
1371     for (auto Assume : Assumes)
1372       Assume->eraseFromParent();
1373     // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we
1374     // may use the vtable argument later.
1375     if (CI->use_empty())
1376       CI->eraseFromParent();
1377   }
1378 }
1379 
1380 void DevirtModule::scanTypeCheckedLoadUsers(Function *TypeCheckedLoadFunc) {
1381   Function *TypeTestFunc = Intrinsic::getDeclaration(&M, Intrinsic::type_test);
1382 
1383   for (auto I = TypeCheckedLoadFunc->use_begin(),
1384             E = TypeCheckedLoadFunc->use_end();
1385        I != E;) {
1386     auto CI = dyn_cast<CallInst>(I->getUser());
1387     ++I;
1388     if (!CI)
1389       continue;
1390 
1391     Value *Ptr = CI->getArgOperand(0);
1392     Value *Offset = CI->getArgOperand(1);
1393     Value *TypeIdValue = CI->getArgOperand(2);
1394     Metadata *TypeId = cast<MetadataAsValue>(TypeIdValue)->getMetadata();
1395 
1396     SmallVector<DevirtCallSite, 1> DevirtCalls;
1397     SmallVector<Instruction *, 1> LoadedPtrs;
1398     SmallVector<Instruction *, 1> Preds;
1399     bool HasNonCallUses = false;
1400     findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds,
1401                                                HasNonCallUses, CI);
1402 
1403     // Start by generating "pessimistic" code that explicitly loads the function
1404     // pointer from the vtable and performs the type check. If possible, we will
1405     // eliminate the load and the type check later.
1406 
1407     // If possible, only generate the load at the point where it is used.
1408     // This helps avoid unnecessary spills.
1409     IRBuilder<> LoadB(
1410         (LoadedPtrs.size() == 1 && !HasNonCallUses) ? LoadedPtrs[0] : CI);
1411     Value *GEP = LoadB.CreateGEP(Int8Ty, Ptr, Offset);
1412     Value *GEPPtr = LoadB.CreateBitCast(GEP, PointerType::getUnqual(Int8PtrTy));
1413     Value *LoadedValue = LoadB.CreateLoad(Int8PtrTy, GEPPtr);
1414 
1415     for (Instruction *LoadedPtr : LoadedPtrs) {
1416       LoadedPtr->replaceAllUsesWith(LoadedValue);
1417       LoadedPtr->eraseFromParent();
1418     }
1419 
1420     // Likewise for the type test.
1421     IRBuilder<> CallB((Preds.size() == 1 && !HasNonCallUses) ? Preds[0] : CI);
1422     CallInst *TypeTestCall = CallB.CreateCall(TypeTestFunc, {Ptr, TypeIdValue});
1423 
1424     for (Instruction *Pred : Preds) {
1425       Pred->replaceAllUsesWith(TypeTestCall);
1426       Pred->eraseFromParent();
1427     }
1428 
1429     // We have already erased any extractvalue instructions that refer to the
1430     // intrinsic call, but the intrinsic may have other non-extractvalue uses
1431     // (although this is unlikely). In that case, explicitly build a pair and
1432     // RAUW it.
1433     if (!CI->use_empty()) {
1434       Value *Pair = UndefValue::get(CI->getType());
1435       IRBuilder<> B(CI);
1436       Pair = B.CreateInsertValue(Pair, LoadedValue, {0});
1437       Pair = B.CreateInsertValue(Pair, TypeTestCall, {1});
1438       CI->replaceAllUsesWith(Pair);
1439     }
1440 
1441     // The number of unsafe uses is initially the number of uses.
1442     auto &NumUnsafeUses = NumUnsafeUsesForTypeTest[TypeTestCall];
1443     NumUnsafeUses = DevirtCalls.size();
1444 
1445     // If the function pointer has a non-call user, we cannot eliminate the type
1446     // check, as one of those users may eventually call the pointer. Increment
1447     // the unsafe use count to make sure it cannot reach zero.
1448     if (HasNonCallUses)
1449       ++NumUnsafeUses;
1450     for (DevirtCallSite Call : DevirtCalls) {
1451       CallSlots[{TypeId, Call.Offset}].addCallSite(Ptr, Call.CS,
1452                                                    &NumUnsafeUses);
1453     }
1454 
1455     CI->eraseFromParent();
1456   }
1457 }
1458 
1459 void DevirtModule::importResolution(VTableSlot Slot, VTableSlotInfo &SlotInfo) {
1460   const TypeIdSummary *TidSummary =
1461       ImportSummary->getTypeIdSummary(cast<MDString>(Slot.TypeID)->getString());
1462   if (!TidSummary)
1463     return;
1464   auto ResI = TidSummary->WPDRes.find(Slot.ByteOffset);
1465   if (ResI == TidSummary->WPDRes.end())
1466     return;
1467   const WholeProgramDevirtResolution &Res = ResI->second;
1468 
1469   if (Res.TheKind == WholeProgramDevirtResolution::SingleImpl) {
1470     // The type of the function in the declaration is irrelevant because every
1471     // call site will cast it to the correct type.
1472     auto *SingleImpl = M.getOrInsertFunction(
1473         Res.SingleImplName, Type::getVoidTy(M.getContext()));
1474 
1475     // This is the import phase so we should not be exporting anything.
1476     bool IsExported = false;
1477     applySingleImplDevirt(SlotInfo, SingleImpl, IsExported);
1478     assert(!IsExported);
1479   }
1480 
1481   for (auto &CSByConstantArg : SlotInfo.ConstCSInfo) {
1482     auto I = Res.ResByArg.find(CSByConstantArg.first);
1483     if (I == Res.ResByArg.end())
1484       continue;
1485     auto &ResByArg = I->second;
1486     // FIXME: We should figure out what to do about the "function name" argument
1487     // to the apply* functions, as the function names are unavailable during the
1488     // importing phase. For now we just pass the empty string. This does not
1489     // impact correctness because the function names are just used for remarks.
1490     switch (ResByArg.TheKind) {
1491     case WholeProgramDevirtResolution::ByArg::UniformRetVal:
1492       applyUniformRetValOpt(CSByConstantArg.second, "", ResByArg.Info);
1493       break;
1494     case WholeProgramDevirtResolution::ByArg::UniqueRetVal: {
1495       Constant *UniqueMemberAddr =
1496           importGlobal(Slot, CSByConstantArg.first, "unique_member");
1497       applyUniqueRetValOpt(CSByConstantArg.second, "", ResByArg.Info,
1498                            UniqueMemberAddr);
1499       break;
1500     }
1501     case WholeProgramDevirtResolution::ByArg::VirtualConstProp: {
1502       Constant *Byte = importConstant(Slot, CSByConstantArg.first, "byte",
1503                                       Int32Ty, ResByArg.Byte);
1504       Constant *Bit = importConstant(Slot, CSByConstantArg.first, "bit", Int8Ty,
1505                                      ResByArg.Bit);
1506       applyVirtualConstProp(CSByConstantArg.second, "", Byte, Bit);
1507       break;
1508     }
1509     default:
1510       break;
1511     }
1512   }
1513 
1514   if (Res.TheKind == WholeProgramDevirtResolution::BranchFunnel) {
1515     auto *JT = M.getOrInsertFunction(getGlobalName(Slot, {}, "branch_funnel"),
1516                                      Type::getVoidTy(M.getContext()));
1517     bool IsExported = false;
1518     applyICallBranchFunnel(SlotInfo, JT, IsExported);
1519     assert(!IsExported);
1520   }
1521 }
1522 
1523 void DevirtModule::removeRedundantTypeTests() {
1524   auto True = ConstantInt::getTrue(M.getContext());
1525   for (auto &&U : NumUnsafeUsesForTypeTest) {
1526     if (U.second == 0) {
1527       U.first->replaceAllUsesWith(True);
1528       U.first->eraseFromParent();
1529     }
1530   }
1531 }
1532 
1533 bool DevirtModule::run() {
1534   Function *TypeTestFunc =
1535       M.getFunction(Intrinsic::getName(Intrinsic::type_test));
1536   Function *TypeCheckedLoadFunc =
1537       M.getFunction(Intrinsic::getName(Intrinsic::type_checked_load));
1538   Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume));
1539 
1540   // Normally if there are no users of the devirtualization intrinsics in the
1541   // module, this pass has nothing to do. But if we are exporting, we also need
1542   // to handle any users that appear only in the function summaries.
1543   if (!ExportSummary &&
1544       (!TypeTestFunc || TypeTestFunc->use_empty() || !AssumeFunc ||
1545        AssumeFunc->use_empty()) &&
1546       (!TypeCheckedLoadFunc || TypeCheckedLoadFunc->use_empty()))
1547     return false;
1548 
1549   if (TypeTestFunc && AssumeFunc)
1550     scanTypeTestUsers(TypeTestFunc, AssumeFunc);
1551 
1552   if (TypeCheckedLoadFunc)
1553     scanTypeCheckedLoadUsers(TypeCheckedLoadFunc);
1554 
1555   if (ImportSummary) {
1556     for (auto &S : CallSlots)
1557       importResolution(S.first, S.second);
1558 
1559     removeRedundantTypeTests();
1560 
1561     // The rest of the code is only necessary when exporting or during regular
1562     // LTO, so we are done.
1563     return true;
1564   }
1565 
1566   // Rebuild type metadata into a map for easy lookup.
1567   std::vector<VTableBits> Bits;
1568   DenseMap<Metadata *, std::set<TypeMemberInfo>> TypeIdMap;
1569   buildTypeIdentifierMap(Bits, TypeIdMap);
1570   if (TypeIdMap.empty())
1571     return true;
1572 
1573   // Collect information from summary about which calls to try to devirtualize.
1574   if (ExportSummary) {
1575     DenseMap<GlobalValue::GUID, TinyPtrVector<Metadata *>> MetadataByGUID;
1576     for (auto &P : TypeIdMap) {
1577       if (auto *TypeId = dyn_cast<MDString>(P.first))
1578         MetadataByGUID[GlobalValue::getGUID(TypeId->getString())].push_back(
1579             TypeId);
1580     }
1581 
1582     for (auto &P : *ExportSummary) {
1583       for (auto &S : P.second.SummaryList) {
1584         auto *FS = dyn_cast<FunctionSummary>(S.get());
1585         if (!FS)
1586           continue;
1587         // FIXME: Only add live functions.
1588         for (FunctionSummary::VFuncId VF : FS->type_test_assume_vcalls()) {
1589           for (Metadata *MD : MetadataByGUID[VF.GUID]) {
1590             CallSlots[{MD, VF.Offset}]
1591                 .CSInfo.markSummaryHasTypeTestAssumeUsers();
1592           }
1593         }
1594         for (FunctionSummary::VFuncId VF : FS->type_checked_load_vcalls()) {
1595           for (Metadata *MD : MetadataByGUID[VF.GUID]) {
1596             CallSlots[{MD, VF.Offset}].CSInfo.addSummaryTypeCheckedLoadUser(FS);
1597           }
1598         }
1599         for (const FunctionSummary::ConstVCall &VC :
1600              FS->type_test_assume_const_vcalls()) {
1601           for (Metadata *MD : MetadataByGUID[VC.VFunc.GUID]) {
1602             CallSlots[{MD, VC.VFunc.Offset}]
1603                 .ConstCSInfo[VC.Args]
1604                 .markSummaryHasTypeTestAssumeUsers();
1605           }
1606         }
1607         for (const FunctionSummary::ConstVCall &VC :
1608              FS->type_checked_load_const_vcalls()) {
1609           for (Metadata *MD : MetadataByGUID[VC.VFunc.GUID]) {
1610             CallSlots[{MD, VC.VFunc.Offset}]
1611                 .ConstCSInfo[VC.Args]
1612                 .addSummaryTypeCheckedLoadUser(FS);
1613           }
1614         }
1615       }
1616     }
1617   }
1618 
1619   // For each (type, offset) pair:
1620   bool DidVirtualConstProp = false;
1621   std::map<std::string, Function*> DevirtTargets;
1622   for (auto &S : CallSlots) {
1623     // Search each of the members of the type identifier for the virtual
1624     // function implementation at offset S.first.ByteOffset, and add to
1625     // TargetsForSlot.
1626     std::vector<VirtualCallTarget> TargetsForSlot;
1627     if (tryFindVirtualCallTargets(TargetsForSlot, TypeIdMap[S.first.TypeID],
1628                                   S.first.ByteOffset)) {
1629       WholeProgramDevirtResolution *Res = nullptr;
1630       if (ExportSummary && isa<MDString>(S.first.TypeID))
1631         Res = &ExportSummary
1632                    ->getOrInsertTypeIdSummary(
1633                        cast<MDString>(S.first.TypeID)->getString())
1634                    .WPDRes[S.first.ByteOffset];
1635 
1636       if (!trySingleImplDevirt(TargetsForSlot, S.second, Res)) {
1637         DidVirtualConstProp |=
1638             tryVirtualConstProp(TargetsForSlot, S.second, Res, S.first);
1639 
1640         tryICallBranchFunnel(TargetsForSlot, S.second, Res, S.first);
1641       }
1642 
1643       // Collect functions devirtualized at least for one call site for stats.
1644       if (RemarksEnabled)
1645         for (const auto &T : TargetsForSlot)
1646           if (T.WasDevirt)
1647             DevirtTargets[T.Fn->getName()] = T.Fn;
1648     }
1649 
1650     // CFI-specific: if we are exporting and any llvm.type.checked.load
1651     // intrinsics were *not* devirtualized, we need to add the resulting
1652     // llvm.type.test intrinsics to the function summaries so that the
1653     // LowerTypeTests pass will export them.
1654     if (ExportSummary && isa<MDString>(S.first.TypeID)) {
1655       auto GUID =
1656           GlobalValue::getGUID(cast<MDString>(S.first.TypeID)->getString());
1657       for (auto FS : S.second.CSInfo.SummaryTypeCheckedLoadUsers)
1658         FS->addTypeTest(GUID);
1659       for (auto &CCS : S.second.ConstCSInfo)
1660         for (auto FS : CCS.second.SummaryTypeCheckedLoadUsers)
1661           FS->addTypeTest(GUID);
1662     }
1663   }
1664 
1665   if (RemarksEnabled) {
1666     // Generate remarks for each devirtualized function.
1667     for (const auto &DT : DevirtTargets) {
1668       Function *F = DT.second;
1669 
1670       using namespace ore;
1671       OREGetter(F).emit(OptimizationRemark(DEBUG_TYPE, "Devirtualized", F)
1672                         << "devirtualized "
1673                         << NV("FunctionName", F->getName()));
1674     }
1675   }
1676 
1677   removeRedundantTypeTests();
1678 
1679   // Rebuild each global we touched as part of virtual constant propagation to
1680   // include the before and after bytes.
1681   if (DidVirtualConstProp)
1682     for (VTableBits &B : Bits)
1683       rebuildGlobal(B);
1684 
1685   return true;
1686 }
1687