1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling.
10 // It also builds the data structures and initialization code needed for
11 // updating execution counts and emitting the profile at runtime.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h"
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/SmallVector.h"
18 #include "llvm/ADT/StringRef.h"
19 #include "llvm/ADT/Triple.h"
20 #include "llvm/ADT/Twine.h"
21 #include "llvm/Analysis/BlockFrequencyInfo.h"
22 #include "llvm/Analysis/BranchProbabilityInfo.h"
23 #include "llvm/Analysis/LoopInfo.h"
24 #include "llvm/Analysis/TargetLibraryInfo.h"
25 #include "llvm/IR/Attributes.h"
26 #include "llvm/IR/BasicBlock.h"
27 #include "llvm/IR/Constant.h"
28 #include "llvm/IR/Constants.h"
29 #include "llvm/IR/DerivedTypes.h"
30 #include "llvm/IR/Dominators.h"
31 #include "llvm/IR/Function.h"
32 #include "llvm/IR/GlobalValue.h"
33 #include "llvm/IR/GlobalVariable.h"
34 #include "llvm/IR/IRBuilder.h"
35 #include "llvm/IR/Instruction.h"
36 #include "llvm/IR/Instructions.h"
37 #include "llvm/IR/IntrinsicInst.h"
38 #include "llvm/IR/Module.h"
39 #include "llvm/IR/Type.h"
40 #include "llvm/InitializePasses.h"
41 #include "llvm/Pass.h"
42 #include "llvm/ProfileData/InstrProf.h"
43 #include "llvm/Support/Casting.h"
44 #include "llvm/Support/CommandLine.h"
45 #include "llvm/Support/Error.h"
46 #include "llvm/Support/ErrorHandling.h"
47 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
48 #include "llvm/Transforms/Utils/ModuleUtils.h"
49 #include "llvm/Transforms/Utils/SSAUpdater.h"
50 #include <algorithm>
51 #include <cassert>
52 #include <cstddef>
53 #include <cstdint>
54 #include <string>
55 
56 using namespace llvm;
57 
58 #define DEBUG_TYPE "instrprof"
59 
60 namespace {
61 
62 cl::opt<bool> DoHashBasedCounterSplit(
63     "hash-based-counter-split",
64     cl::desc("Rename counter variable of a comdat function based on cfg hash"),
65     cl::init(true));
66 
67 cl::opt<bool> RuntimeCounterRelocation(
68     "runtime-counter-relocation",
69     cl::desc("Enable relocating counters at runtime."),
70     cl::init(false));
71 
72 cl::opt<bool> ValueProfileStaticAlloc(
73     "vp-static-alloc",
74     cl::desc("Do static counter allocation for value profiler"),
75     cl::init(true));
76 
77 cl::opt<double> NumCountersPerValueSite(
78     "vp-counters-per-site",
79     cl::desc("The average number of profile counters allocated "
80              "per value profiling site."),
81     // This is set to a very small value because in real programs, only
82     // a very small percentage of value sites have non-zero targets, e.g, 1/30.
83     // For those sites with non-zero profile, the average number of targets
84     // is usually smaller than 2.
85     cl::init(1.0));
86 
87 cl::opt<bool> AtomicCounterUpdateAll(
88     "instrprof-atomic-counter-update-all", cl::ZeroOrMore,
89     cl::desc("Make all profile counter updates atomic (for testing only)"),
90     cl::init(false));
91 
92 cl::opt<bool> AtomicCounterUpdatePromoted(
93     "atomic-counter-update-promoted", cl::ZeroOrMore,
94     cl::desc("Do counter update using atomic fetch add "
95              " for promoted counters only"),
96     cl::init(false));
97 
98 cl::opt<bool> AtomicFirstCounter(
99     "atomic-first-counter", cl::ZeroOrMore,
100     cl::desc("Use atomic fetch add for first counter in a function (usually "
101              "the entry counter)"),
102     cl::init(false));
103 
104 // If the option is not specified, the default behavior about whether
105 // counter promotion is done depends on how instrumentaiton lowering
106 // pipeline is setup, i.e., the default value of true of this option
107 // does not mean the promotion will be done by default. Explicitly
108 // setting this option can override the default behavior.
109 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore,
110                                  cl::desc("Do counter register promotion"),
111                                  cl::init(false));
112 cl::opt<unsigned> MaxNumOfPromotionsPerLoop(
113     cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20),
114     cl::desc("Max number counter promotions per loop to avoid"
115              " increasing register pressure too much"));
116 
117 // A debug option
118 cl::opt<int>
119     MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1),
120                        cl::desc("Max number of allowed counter promotions"));
121 
122 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting(
123     cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3),
124     cl::desc("The max number of exiting blocks of a loop to allow "
125              " speculative counter promotion"));
126 
127 cl::opt<bool> SpeculativeCounterPromotionToLoop(
128     cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false),
129     cl::desc("When the option is false, if the target block is in a loop, "
130              "the promotion will be disallowed unless the promoted counter "
131              " update can be further/iteratively promoted into an acyclic "
132              " region."));
133 
134 cl::opt<bool> IterativeCounterPromotion(
135     cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true),
136     cl::desc("Allow counter promotion across the whole loop nest."));
137 
138 cl::opt<bool> SkipRetExitBlock(
139     cl::ZeroOrMore, "skip-ret-exit-block", cl::init(true),
140     cl::desc("Suppress counter promotion if exit blocks contain ret."));
141 
142 class InstrProfilingLegacyPass : public ModulePass {
143   InstrProfiling InstrProf;
144 
145 public:
146   static char ID;
147 
148   InstrProfilingLegacyPass() : ModulePass(ID) {}
149   InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false)
150       : ModulePass(ID), InstrProf(Options, IsCS) {
151     initializeInstrProfilingLegacyPassPass(*PassRegistry::getPassRegistry());
152   }
153 
154   StringRef getPassName() const override {
155     return "Frontend instrumentation-based coverage lowering";
156   }
157 
158   bool runOnModule(Module &M) override {
159     auto GetTLI = [this](Function &F) -> TargetLibraryInfo & {
160       return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
161     };
162     return InstrProf.run(M, GetTLI);
163   }
164 
165   void getAnalysisUsage(AnalysisUsage &AU) const override {
166     AU.setPreservesCFG();
167     AU.addRequired<TargetLibraryInfoWrapperPass>();
168   }
169 };
170 
171 ///
172 /// A helper class to promote one counter RMW operation in the loop
173 /// into register update.
174 ///
175 /// RWM update for the counter will be sinked out of the loop after
176 /// the transformation.
177 ///
178 class PGOCounterPromoterHelper : public LoadAndStorePromoter {
179 public:
180   PGOCounterPromoterHelper(
181       Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init,
182       BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks,
183       ArrayRef<Instruction *> InsertPts,
184       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
185       LoopInfo &LI)
186       : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks),
187         InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) {
188     assert(isa<LoadInst>(L));
189     assert(isa<StoreInst>(S));
190     SSA.AddAvailableValue(PH, Init);
191   }
192 
193   void doExtraRewritesBeforeFinalDeletion() override {
194     for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) {
195       BasicBlock *ExitBlock = ExitBlocks[i];
196       Instruction *InsertPos = InsertPts[i];
197       // Get LiveIn value into the ExitBlock. If there are multiple
198       // predecessors, the value is defined by a PHI node in this
199       // block.
200       Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock);
201       Value *Addr = cast<StoreInst>(Store)->getPointerOperand();
202       Type *Ty = LiveInValue->getType();
203       IRBuilder<> Builder(InsertPos);
204       if (AtomicCounterUpdatePromoted)
205         // automic update currently can only be promoted across the current
206         // loop, not the whole loop nest.
207         Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue,
208                                 MaybeAlign(),
209                                 AtomicOrdering::SequentiallyConsistent);
210       else {
211         LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted");
212         auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue);
213         auto *NewStore = Builder.CreateStore(NewVal, Addr);
214 
215         // Now update the parent loop's candidate list:
216         if (IterativeCounterPromotion) {
217           auto *TargetLoop = LI.getLoopFor(ExitBlock);
218           if (TargetLoop)
219             LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore);
220         }
221       }
222     }
223   }
224 
225 private:
226   Instruction *Store;
227   ArrayRef<BasicBlock *> ExitBlocks;
228   ArrayRef<Instruction *> InsertPts;
229   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
230   LoopInfo &LI;
231 };
232 
233 /// A helper class to do register promotion for all profile counter
234 /// updates in a loop.
235 ///
236 class PGOCounterPromoter {
237 public:
238   PGOCounterPromoter(
239       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
240       Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI)
241       : LoopToCandidates(LoopToCands), ExitBlocks(), InsertPts(), L(CurLoop),
242         LI(LI), BFI(BFI) {
243 
244     // Skip collection of ExitBlocks and InsertPts for loops that will not be
245     // able to have counters promoted.
246     SmallVector<BasicBlock *, 8> LoopExitBlocks;
247     SmallPtrSet<BasicBlock *, 8> BlockSet;
248 
249     L.getExitBlocks(LoopExitBlocks);
250     if (!isPromotionPossible(&L, LoopExitBlocks))
251       return;
252 
253     for (BasicBlock *ExitBlock : LoopExitBlocks) {
254       if (BlockSet.insert(ExitBlock).second) {
255         ExitBlocks.push_back(ExitBlock);
256         InsertPts.push_back(&*ExitBlock->getFirstInsertionPt());
257       }
258     }
259   }
260 
261   bool run(int64_t *NumPromoted) {
262     // Skip 'infinite' loops:
263     if (ExitBlocks.size() == 0)
264       return false;
265 
266     // Skip if any of the ExitBlocks contains a ret instruction.
267     // This is to prevent dumping of incomplete profile -- if the
268     // the loop is a long running loop and dump is called in the middle
269     // of the loop, the result profile is incomplete.
270     // FIXME: add other heuristics to detect long running loops.
271     if (SkipRetExitBlock) {
272       for (auto BB : ExitBlocks)
273         if (isa<ReturnInst>(BB->getTerminator()))
274           return false;
275     }
276 
277     unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L);
278     if (MaxProm == 0)
279       return false;
280 
281     unsigned Promoted = 0;
282     for (auto &Cand : LoopToCandidates[&L]) {
283 
284       SmallVector<PHINode *, 4> NewPHIs;
285       SSAUpdater SSA(&NewPHIs);
286       Value *InitVal = ConstantInt::get(Cand.first->getType(), 0);
287 
288       // If BFI is set, we will use it to guide the promotions.
289       if (BFI) {
290         auto *BB = Cand.first->getParent();
291         auto InstrCount = BFI->getBlockProfileCount(BB);
292         if (!InstrCount)
293           continue;
294         auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader());
295         // If the average loop trip count is not greater than 1.5, we skip
296         // promotion.
297         if (PreheaderCount &&
298             (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2))
299           continue;
300       }
301 
302       PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal,
303                                         L.getLoopPreheader(), ExitBlocks,
304                                         InsertPts, LoopToCandidates, LI);
305       Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second}));
306       Promoted++;
307       if (Promoted >= MaxProm)
308         break;
309 
310       (*NumPromoted)++;
311       if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions)
312         break;
313     }
314 
315     LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth="
316                       << L.getLoopDepth() << ")\n");
317     return Promoted != 0;
318   }
319 
320 private:
321   bool allowSpeculativeCounterPromotion(Loop *LP) {
322     SmallVector<BasicBlock *, 8> ExitingBlocks;
323     L.getExitingBlocks(ExitingBlocks);
324     // Not considierered speculative.
325     if (ExitingBlocks.size() == 1)
326       return true;
327     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
328       return false;
329     return true;
330   }
331 
332   // Check whether the loop satisfies the basic conditions needed to perform
333   // Counter Promotions.
334   bool isPromotionPossible(Loop *LP,
335                            const SmallVectorImpl<BasicBlock *> &LoopExitBlocks) {
336     // We can't insert into a catchswitch.
337     if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) {
338           return isa<CatchSwitchInst>(Exit->getTerminator());
339         }))
340       return false;
341 
342     if (!LP->hasDedicatedExits())
343       return false;
344 
345     BasicBlock *PH = LP->getLoopPreheader();
346     if (!PH)
347       return false;
348 
349     return true;
350   }
351 
352   // Returns the max number of Counter Promotions for LP.
353   unsigned getMaxNumOfPromotionsInLoop(Loop *LP) {
354     SmallVector<BasicBlock *, 8> LoopExitBlocks;
355     LP->getExitBlocks(LoopExitBlocks);
356     if (!isPromotionPossible(LP, LoopExitBlocks))
357       return 0;
358 
359     SmallVector<BasicBlock *, 8> ExitingBlocks;
360     LP->getExitingBlocks(ExitingBlocks);
361 
362     // If BFI is set, we do more aggressive promotions based on BFI.
363     if (BFI)
364       return (unsigned)-1;
365 
366     // Not considierered speculative.
367     if (ExitingBlocks.size() == 1)
368       return MaxNumOfPromotionsPerLoop;
369 
370     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
371       return 0;
372 
373     // Whether the target block is in a loop does not matter:
374     if (SpeculativeCounterPromotionToLoop)
375       return MaxNumOfPromotionsPerLoop;
376 
377     // Now check the target block:
378     unsigned MaxProm = MaxNumOfPromotionsPerLoop;
379     for (auto *TargetBlock : LoopExitBlocks) {
380       auto *TargetLoop = LI.getLoopFor(TargetBlock);
381       if (!TargetLoop)
382         continue;
383       unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop);
384       unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size();
385       MaxProm =
386           std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) -
387                                 PendingCandsInTarget);
388     }
389     return MaxProm;
390   }
391 
392   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
393   SmallVector<BasicBlock *, 8> ExitBlocks;
394   SmallVector<Instruction *, 8> InsertPts;
395   Loop &L;
396   LoopInfo &LI;
397   BlockFrequencyInfo *BFI;
398 };
399 
400 enum class ValueProfilingCallType {
401   // Individual values are tracked. Currently used for indiret call target
402   // profiling.
403   Default,
404 
405   // MemOp: the memop size value profiling.
406   MemOp
407 };
408 
409 } // end anonymous namespace
410 
411 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) {
412   FunctionAnalysisManager &FAM =
413       AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
414   auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
415     return FAM.getResult<TargetLibraryAnalysis>(F);
416   };
417   if (!run(M, GetTLI))
418     return PreservedAnalyses::all();
419 
420   return PreservedAnalyses::none();
421 }
422 
423 char InstrProfilingLegacyPass::ID = 0;
424 INITIALIZE_PASS_BEGIN(
425     InstrProfilingLegacyPass, "instrprof",
426     "Frontend instrumentation-based coverage lowering.", false, false)
427 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
428 INITIALIZE_PASS_END(
429     InstrProfilingLegacyPass, "instrprof",
430     "Frontend instrumentation-based coverage lowering.", false, false)
431 
432 ModulePass *
433 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options,
434                                      bool IsCS) {
435   return new InstrProfilingLegacyPass(Options, IsCS);
436 }
437 
438 static InstrProfIncrementInst *castToIncrementInst(Instruction *Instr) {
439   InstrProfIncrementInst *Inc = dyn_cast<InstrProfIncrementInstStep>(Instr);
440   if (Inc)
441     return Inc;
442   return dyn_cast<InstrProfIncrementInst>(Instr);
443 }
444 
445 bool InstrProfiling::lowerIntrinsics(Function *F) {
446   bool MadeChange = false;
447   PromotionCandidates.clear();
448   for (BasicBlock &BB : *F) {
449     for (Instruction &Instr : llvm::make_early_inc_range(BB)) {
450       InstrProfIncrementInst *Inc = castToIncrementInst(&Instr);
451       if (Inc) {
452         lowerIncrement(Inc);
453         MadeChange = true;
454       } else if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(&Instr)) {
455         lowerValueProfileInst(Ind);
456         MadeChange = true;
457       }
458     }
459   }
460 
461   if (!MadeChange)
462     return false;
463 
464   promoteCounterLoadStores(F);
465   return true;
466 }
467 
468 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const {
469   // Mach-O don't support weak external references.
470   if (TT.isOSBinFormatMachO())
471     return false;
472 
473   if (RuntimeCounterRelocation.getNumOccurrences() > 0)
474     return RuntimeCounterRelocation;
475 
476   // Fuchsia uses runtime counter relocation by default.
477   return TT.isOSFuchsia();
478 }
479 
480 bool InstrProfiling::isCounterPromotionEnabled() const {
481   if (DoCounterPromotion.getNumOccurrences() > 0)
482     return DoCounterPromotion;
483 
484   return Options.DoCounterPromotion;
485 }
486 
487 void InstrProfiling::promoteCounterLoadStores(Function *F) {
488   if (!isCounterPromotionEnabled())
489     return;
490 
491   DominatorTree DT(*F);
492   LoopInfo LI(DT);
493   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates;
494 
495   std::unique_ptr<BlockFrequencyInfo> BFI;
496   if (Options.UseBFIInPromotion) {
497     std::unique_ptr<BranchProbabilityInfo> BPI;
498     BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F)));
499     BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI));
500   }
501 
502   for (const auto &LoadStore : PromotionCandidates) {
503     auto *CounterLoad = LoadStore.first;
504     auto *CounterStore = LoadStore.second;
505     BasicBlock *BB = CounterLoad->getParent();
506     Loop *ParentLoop = LI.getLoopFor(BB);
507     if (!ParentLoop)
508       continue;
509     LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore);
510   }
511 
512   SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder();
513 
514   // Do a post-order traversal of the loops so that counter updates can be
515   // iteratively hoisted outside the loop nest.
516   for (auto *Loop : llvm::reverse(Loops)) {
517     PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get());
518     Promoter.run(&TotalCountersPromoted);
519   }
520 }
521 
522 static bool needsRuntimeHookUnconditionally(const Triple &TT) {
523   // On Fuchsia, we only need runtime hook if any counters are present.
524   if (TT.isOSFuchsia())
525     return false;
526 
527   return true;
528 }
529 
530 /// Check if the module contains uses of any profiling intrinsics.
531 static bool containsProfilingIntrinsics(Module &M) {
532   if (auto *F = M.getFunction(
533           Intrinsic::getName(llvm::Intrinsic::instrprof_increment)))
534     if (!F->use_empty())
535       return true;
536   if (auto *F = M.getFunction(
537           Intrinsic::getName(llvm::Intrinsic::instrprof_increment_step)))
538     if (!F->use_empty())
539       return true;
540   if (auto *F = M.getFunction(
541           Intrinsic::getName(llvm::Intrinsic::instrprof_value_profile)))
542     if (!F->use_empty())
543       return true;
544   return false;
545 }
546 
547 bool InstrProfiling::run(
548     Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) {
549   this->M = &M;
550   this->GetTLI = std::move(GetTLI);
551   NamesVar = nullptr;
552   NamesSize = 0;
553   ProfileDataMap.clear();
554   CompilerUsedVars.clear();
555   UsedVars.clear();
556   TT = Triple(M.getTargetTriple());
557 
558   bool MadeChange = false;
559 
560   // Emit the runtime hook even if no counters are present.
561   if (needsRuntimeHookUnconditionally(TT))
562     MadeChange = emitRuntimeHook();
563 
564   // Improve compile time by avoiding linear scans when there is no work.
565   GlobalVariable *CoverageNamesVar =
566       M.getNamedGlobal(getCoverageUnusedNamesVarName());
567   if (!containsProfilingIntrinsics(M) && !CoverageNamesVar)
568     return MadeChange;
569 
570   // We did not know how many value sites there would be inside
571   // the instrumented function. This is counting the number of instrumented
572   // target value sites to enter it as field in the profile data variable.
573   for (Function &F : M) {
574     InstrProfIncrementInst *FirstProfIncInst = nullptr;
575     for (BasicBlock &BB : F)
576       for (auto I = BB.begin(), E = BB.end(); I != E; I++)
577         if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I))
578           computeNumValueSiteCounts(Ind);
579         else if (FirstProfIncInst == nullptr)
580           FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I);
581 
582     // Value profiling intrinsic lowering requires per-function profile data
583     // variable to be created first.
584     if (FirstProfIncInst != nullptr)
585       static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst));
586   }
587 
588   for (Function &F : M)
589     MadeChange |= lowerIntrinsics(&F);
590 
591   if (CoverageNamesVar) {
592     lowerCoverageData(CoverageNamesVar);
593     MadeChange = true;
594   }
595 
596   if (!MadeChange)
597     return false;
598 
599   emitVNodes();
600   emitNameData();
601   emitRuntimeHook();
602   emitRegistration();
603   emitUses();
604   emitInitialization();
605   return true;
606 }
607 
608 static FunctionCallee getOrInsertValueProfilingCall(
609     Module &M, const TargetLibraryInfo &TLI,
610     ValueProfilingCallType CallType = ValueProfilingCallType::Default) {
611   LLVMContext &Ctx = M.getContext();
612   auto *ReturnTy = Type::getVoidTy(M.getContext());
613 
614   AttributeList AL;
615   if (auto AK = TLI.getExtAttrForI32Param(false))
616     AL = AL.addParamAttribute(M.getContext(), 2, AK);
617 
618   assert((CallType == ValueProfilingCallType::Default ||
619           CallType == ValueProfilingCallType::MemOp) &&
620          "Must be Default or MemOp");
621   Type *ParamTypes[] = {
622 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType
623 #include "llvm/ProfileData/InstrProfData.inc"
624   };
625   auto *ValueProfilingCallTy =
626       FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false);
627   StringRef FuncName = CallType == ValueProfilingCallType::Default
628                            ? getInstrProfValueProfFuncName()
629                            : getInstrProfValueProfMemOpFuncName();
630   return M.getOrInsertFunction(FuncName, ValueProfilingCallTy, AL);
631 }
632 
633 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) {
634   GlobalVariable *Name = Ind->getName();
635   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
636   uint64_t Index = Ind->getIndex()->getZExtValue();
637   auto It = ProfileDataMap.find(Name);
638   if (It == ProfileDataMap.end()) {
639     PerFunctionProfileData PD;
640     PD.NumValueSites[ValueKind] = Index + 1;
641     ProfileDataMap[Name] = PD;
642   } else if (It->second.NumValueSites[ValueKind] <= Index)
643     It->second.NumValueSites[ValueKind] = Index + 1;
644 }
645 
646 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) {
647   GlobalVariable *Name = Ind->getName();
648   auto It = ProfileDataMap.find(Name);
649   assert(It != ProfileDataMap.end() && It->second.DataVar &&
650          "value profiling detected in function with no counter incerement");
651 
652   GlobalVariable *DataVar = It->second.DataVar;
653   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
654   uint64_t Index = Ind->getIndex()->getZExtValue();
655   for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind)
656     Index += It->second.NumValueSites[Kind];
657 
658   IRBuilder<> Builder(Ind);
659   bool IsMemOpSize = (Ind->getValueKind()->getZExtValue() ==
660                       llvm::InstrProfValueKind::IPVK_MemOPSize);
661   CallInst *Call = nullptr;
662   auto *TLI = &GetTLI(*Ind->getFunction());
663 
664   // To support value profiling calls within Windows exception handlers, funclet
665   // information contained within operand bundles needs to be copied over to
666   // the library call. This is required for the IR to be processed by the
667   // WinEHPrepare pass.
668   SmallVector<OperandBundleDef, 1> OpBundles;
669   Ind->getOperandBundlesAsDefs(OpBundles);
670   if (!IsMemOpSize) {
671     Value *Args[3] = {Ind->getTargetValue(),
672                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
673                       Builder.getInt32(Index)};
674     Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args,
675                               OpBundles);
676   } else {
677     Value *Args[3] = {Ind->getTargetValue(),
678                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
679                       Builder.getInt32(Index)};
680     Call = Builder.CreateCall(
681         getOrInsertValueProfilingCall(*M, *TLI, ValueProfilingCallType::MemOp),
682         Args, OpBundles);
683   }
684   if (auto AK = TLI->getExtAttrForI32Param(false))
685     Call->addParamAttr(2, AK);
686   Ind->replaceAllUsesWith(Call);
687   Ind->eraseFromParent();
688 }
689 
690 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) {
691   GlobalVariable *Counters = getOrCreateRegionCounters(Inc);
692 
693   IRBuilder<> Builder(Inc);
694   uint64_t Index = Inc->getIndex()->getZExtValue();
695   Value *Addr = Builder.CreateConstInBoundsGEP2_64(Counters->getValueType(),
696                                                    Counters, 0, Index);
697 
698   if (isRuntimeCounterRelocationEnabled()) {
699     Type *Int64Ty = Type::getInt64Ty(M->getContext());
700     Type *Int64PtrTy = Type::getInt64PtrTy(M->getContext());
701     Function *Fn = Inc->getParent()->getParent();
702     Instruction &I = Fn->getEntryBlock().front();
703     LoadInst *LI = dyn_cast<LoadInst>(&I);
704     if (!LI) {
705       IRBuilder<> Builder(&I);
706       GlobalVariable *Bias = M->getGlobalVariable(getInstrProfCounterBiasVarName());
707       if (!Bias) {
708         // Compiler must define this variable when runtime counter relocation
709         // is being used. Runtime has a weak external reference that is used
710         // to check whether that's the case or not.
711         Bias = new GlobalVariable(*M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage,
712                                   Constant::getNullValue(Int64Ty),
713                                   getInstrProfCounterBiasVarName());
714         Bias->setVisibility(GlobalVariable::HiddenVisibility);
715         // A definition that's weak (linkonce_odr) without being in a COMDAT
716         // section wouldn't lead to link errors, but it would lead to a dead
717         // data word from every TU but one. Putting it in COMDAT ensures there
718         // will be exactly one data slot in the link.
719         if (TT.supportsCOMDAT())
720           Bias->setComdat(M->getOrInsertComdat(Bias->getName()));
721       }
722       LI = Builder.CreateLoad(Int64Ty, Bias);
723     }
724     auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), LI);
725     Addr = Builder.CreateIntToPtr(Add, Int64PtrTy);
726   }
727 
728   if (Options.Atomic || AtomicCounterUpdateAll ||
729       (Index == 0 && AtomicFirstCounter)) {
730     Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(),
731                             MaybeAlign(), AtomicOrdering::Monotonic);
732   } else {
733     Value *IncStep = Inc->getStep();
734     Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount");
735     auto *Count = Builder.CreateAdd(Load, Inc->getStep());
736     auto *Store = Builder.CreateStore(Count, Addr);
737     if (isCounterPromotionEnabled())
738       PromotionCandidates.emplace_back(cast<Instruction>(Load), Store);
739   }
740   Inc->eraseFromParent();
741 }
742 
743 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) {
744   ConstantArray *Names =
745       cast<ConstantArray>(CoverageNamesVar->getInitializer());
746   for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) {
747     Constant *NC = Names->getOperand(I);
748     Value *V = NC->stripPointerCasts();
749     assert(isa<GlobalVariable>(V) && "Missing reference to function name");
750     GlobalVariable *Name = cast<GlobalVariable>(V);
751 
752     Name->setLinkage(GlobalValue::PrivateLinkage);
753     ReferencedNames.push_back(Name);
754     NC->dropAllReferences();
755   }
756   CoverageNamesVar->eraseFromParent();
757 }
758 
759 /// Get the name of a profiling variable for a particular function.
760 static std::string getVarName(InstrProfIncrementInst *Inc, StringRef Prefix,
761                               bool &Renamed) {
762   StringRef NamePrefix = getInstrProfNameVarPrefix();
763   StringRef Name = Inc->getName()->getName().substr(NamePrefix.size());
764   Function *F = Inc->getParent()->getParent();
765   Module *M = F->getParent();
766   if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) ||
767       !canRenameComdatFunc(*F)) {
768     Renamed = false;
769     return (Prefix + Name).str();
770   }
771   Renamed = true;
772   uint64_t FuncHash = Inc->getHash()->getZExtValue();
773   SmallVector<char, 24> HashPostfix;
774   if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix)))
775     return (Prefix + Name).str();
776   return (Prefix + Name + "." + Twine(FuncHash)).str();
777 }
778 
779 static uint64_t getIntModuleFlagOrZero(const Module &M, StringRef Flag) {
780   auto *MD = dyn_cast_or_null<ConstantAsMetadata>(M.getModuleFlag(Flag));
781   if (!MD)
782     return 0;
783 
784   // If the flag is a ConstantAsMetadata, it should be an integer representable
785   // in 64-bits.
786   return cast<ConstantInt>(MD->getValue())->getZExtValue();
787 }
788 
789 static bool enablesValueProfiling(const Module &M) {
790   return isIRPGOFlagSet(&M) ||
791          getIntModuleFlagOrZero(M, "EnableValueProfiling") != 0;
792 }
793 
794 // Conservatively returns true if data variables may be referenced by code.
795 static bool profDataReferencedByCode(const Module &M) {
796   return enablesValueProfiling(M);
797 }
798 
799 static inline bool shouldRecordFunctionAddr(Function *F) {
800   // Only record function addresses if IR PGO is enabled or if clang value
801   // profiling is enabled. Recording function addresses greatly increases object
802   // file size, because it prevents the inliner from deleting functions that
803   // have been inlined everywhere.
804   if (!profDataReferencedByCode(*F->getParent()))
805     return false;
806 
807   // Check the linkage
808   bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage();
809   if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() &&
810       !HasAvailableExternallyLinkage)
811     return true;
812 
813   // A function marked 'alwaysinline' with available_externally linkage can't
814   // have its address taken. Doing so would create an undefined external ref to
815   // the function, which would fail to link.
816   if (HasAvailableExternallyLinkage &&
817       F->hasFnAttribute(Attribute::AlwaysInline))
818     return false;
819 
820   // Prohibit function address recording if the function is both internal and
821   // COMDAT. This avoids the profile data variable referencing internal symbols
822   // in COMDAT.
823   if (F->hasLocalLinkage() && F->hasComdat())
824     return false;
825 
826   // Check uses of this function for other than direct calls or invokes to it.
827   // Inline virtual functions have linkeOnceODR linkage. When a key method
828   // exists, the vtable will only be emitted in the TU where the key method
829   // is defined. In a TU where vtable is not available, the function won't
830   // be 'addresstaken'. If its address is not recorded here, the profile data
831   // with missing address may be picked by the linker leading  to missing
832   // indirect call target info.
833   return F->hasAddressTaken() || F->hasLinkOnceLinkage();
834 }
835 
836 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) {
837   // Don't do this for Darwin.  compiler-rt uses linker magic.
838   if (TT.isOSDarwin())
839     return false;
840   // Use linker script magic to get data/cnts/name start/end.
841   if (TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() ||
842       TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4CPU() ||
843       TT.isOSWindows())
844     return false;
845 
846   return true;
847 }
848 
849 GlobalVariable *
850 InstrProfiling::getOrCreateRegionCounters(InstrProfIncrementInst *Inc) {
851   GlobalVariable *NamePtr = Inc->getName();
852   auto It = ProfileDataMap.find(NamePtr);
853   PerFunctionProfileData PD;
854   if (It != ProfileDataMap.end()) {
855     if (It->second.RegionCounters)
856       return It->second.RegionCounters;
857     PD = It->second;
858   }
859 
860   // Match the linkage and visibility of the name global.
861   Function *Fn = Inc->getParent()->getParent();
862   GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage();
863   GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility();
864 
865   // Due to the limitation of binder as of 2021/09/28, the duplicate weak
866   // symbols in the same csect won't be discarded. When there are duplicate weak
867   // symbols, we can NOT guarantee that the relocations get resolved to the
868   // intended weak symbol, so we can not ensure the correctness of the relative
869   // CounterPtr, so we have to use private linkage for counter and data symbols.
870   if (TT.isOSBinFormatXCOFF()) {
871     Linkage = GlobalValue::PrivateLinkage;
872     Visibility = GlobalValue::DefaultVisibility;
873   }
874   // Move the name variable to the right section. Place them in a COMDAT group
875   // if the associated function is a COMDAT. This will make sure that only one
876   // copy of counters of the COMDAT function will be emitted after linking. Keep
877   // in mind that this pass may run before the inliner, so we need to create a
878   // new comdat group for the counters and profiling data. If we use the comdat
879   // of the parent function, that will result in relocations against discarded
880   // sections.
881   //
882   // If the data variable is referenced by code,  counters and data have to be
883   // in different comdats for COFF because the Visual C++ linker will report
884   // duplicate symbol errors if there are multiple external symbols with the
885   // same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE.
886   //
887   // For ELF, when not using COMDAT, put counters, data and values into a
888   // nodeduplicate COMDAT which is lowered to a zero-flag section group. This
889   // allows -z start-stop-gc to discard the entire group when the function is
890   // discarded.
891   bool DataReferencedByCode = profDataReferencedByCode(*M);
892   bool NeedComdat = needsComdatForCounter(*Fn, *M);
893   bool Renamed;
894   std::string CntsVarName =
895       getVarName(Inc, getInstrProfCountersVarPrefix(), Renamed);
896   std::string DataVarName =
897       getVarName(Inc, getInstrProfDataVarPrefix(), Renamed);
898   auto MaybeSetComdat = [&](GlobalVariable *GV) {
899     bool UseComdat = (NeedComdat || TT.isOSBinFormatELF());
900     if (UseComdat) {
901       StringRef GroupName = TT.isOSBinFormatCOFF() && DataReferencedByCode
902                                 ? GV->getName()
903                                 : CntsVarName;
904       Comdat *C = M->getOrInsertComdat(GroupName);
905       if (!NeedComdat)
906         C->setSelectionKind(Comdat::NoDeduplicate);
907       GV->setComdat(C);
908     }
909   };
910 
911   uint64_t NumCounters = Inc->getNumCounters()->getZExtValue();
912   LLVMContext &Ctx = M->getContext();
913   ArrayType *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters);
914 
915   // Create the counters variable.
916   auto *CounterPtr =
917       new GlobalVariable(*M, CounterTy, false, Linkage,
918                          Constant::getNullValue(CounterTy), CntsVarName);
919   CounterPtr->setVisibility(Visibility);
920   CounterPtr->setSection(
921       getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat()));
922   CounterPtr->setAlignment(Align(8));
923   MaybeSetComdat(CounterPtr);
924   CounterPtr->setLinkage(Linkage);
925 
926   auto *Int8PtrTy = Type::getInt8PtrTy(Ctx);
927   // Allocate statically the array of pointers to value profile nodes for
928   // the current function.
929   Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy);
930   uint64_t NS = 0;
931   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
932     NS += PD.NumValueSites[Kind];
933   if (NS > 0 && ValueProfileStaticAlloc &&
934       !needsRuntimeRegistrationOfSectionRange(TT)) {
935     ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS);
936     auto *ValuesVar = new GlobalVariable(
937         *M, ValuesTy, false, Linkage, Constant::getNullValue(ValuesTy),
938         getVarName(Inc, getInstrProfValuesVarPrefix(), Renamed));
939     ValuesVar->setVisibility(Visibility);
940     ValuesVar->setSection(
941         getInstrProfSectionName(IPSK_vals, TT.getObjectFormat()));
942     ValuesVar->setAlignment(Align(8));
943     MaybeSetComdat(ValuesVar);
944     ValuesPtrExpr =
945         ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx));
946   }
947 
948   // Create data variable.
949   auto *IntPtrTy = M->getDataLayout().getIntPtrType(M->getContext());
950   auto *Int16Ty = Type::getInt16Ty(Ctx);
951   auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1);
952   Type *DataTypes[] = {
953 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType,
954 #include "llvm/ProfileData/InstrProfData.inc"
955   };
956   auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes));
957 
958   Constant *FunctionAddr = shouldRecordFunctionAddr(Fn)
959                                ? ConstantExpr::getBitCast(Fn, Int8PtrTy)
960                                : ConstantPointerNull::get(Int8PtrTy);
961 
962   Constant *Int16ArrayVals[IPVK_Last + 1];
963   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
964     Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]);
965 
966   // If the data variable is not referenced by code (if we don't emit
967   // @llvm.instrprof.value.profile, NS will be 0), and the counter keeps the
968   // data variable live under linker GC, the data variable can be private. This
969   // optimization applies to ELF.
970   //
971   // On COFF, a comdat leader cannot be local so we require DataReferencedByCode
972   // to be false.
973   //
974   // If profd is in a deduplicate comdat, NS==0 with a hash suffix guarantees
975   // that other copies must have the same CFG and cannot have value profiling.
976   // If no hash suffix, other profd copies may be referenced by code.
977   if (NS == 0 && !(DataReferencedByCode && NeedComdat && !Renamed) &&
978       (TT.isOSBinFormatELF() ||
979        (!DataReferencedByCode && TT.isOSBinFormatCOFF()))) {
980     Linkage = GlobalValue::PrivateLinkage;
981     Visibility = GlobalValue::DefaultVisibility;
982   }
983   auto *Data =
984       new GlobalVariable(*M, DataTy, false, Linkage, nullptr, DataVarName);
985   // Reference the counter variable with a label difference (link-time
986   // constant).
987   auto *RelativeCounterPtr =
988       ConstantExpr::getSub(ConstantExpr::getPtrToInt(CounterPtr, IntPtrTy),
989                            ConstantExpr::getPtrToInt(Data, IntPtrTy));
990 
991   Constant *DataVals[] = {
992 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init,
993 #include "llvm/ProfileData/InstrProfData.inc"
994   };
995   Data->setInitializer(ConstantStruct::get(DataTy, DataVals));
996 
997   Data->setVisibility(Visibility);
998   Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat()));
999   Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT));
1000   MaybeSetComdat(Data);
1001   Data->setLinkage(Linkage);
1002 
1003   PD.RegionCounters = CounterPtr;
1004   PD.DataVar = Data;
1005   ProfileDataMap[NamePtr] = PD;
1006 
1007   // Mark the data variable as used so that it isn't stripped out.
1008   CompilerUsedVars.push_back(Data);
1009   // Now that the linkage set by the FE has been passed to the data and counter
1010   // variables, reset Name variable's linkage and visibility to private so that
1011   // it can be removed later by the compiler.
1012   NamePtr->setLinkage(GlobalValue::PrivateLinkage);
1013   // Collect the referenced names to be used by emitNameData.
1014   ReferencedNames.push_back(NamePtr);
1015 
1016   return CounterPtr;
1017 }
1018 
1019 void InstrProfiling::emitVNodes() {
1020   if (!ValueProfileStaticAlloc)
1021     return;
1022 
1023   // For now only support this on platforms that do
1024   // not require runtime registration to discover
1025   // named section start/end.
1026   if (needsRuntimeRegistrationOfSectionRange(TT))
1027     return;
1028 
1029   size_t TotalNS = 0;
1030   for (auto &PD : ProfileDataMap) {
1031     for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1032       TotalNS += PD.second.NumValueSites[Kind];
1033   }
1034 
1035   if (!TotalNS)
1036     return;
1037 
1038   uint64_t NumCounters = TotalNS * NumCountersPerValueSite;
1039 // Heuristic for small programs with very few total value sites.
1040 // The default value of vp-counters-per-site is chosen based on
1041 // the observation that large apps usually have a low percentage
1042 // of value sites that actually have any profile data, and thus
1043 // the average number of counters per site is low. For small
1044 // apps with very few sites, this may not be true. Bump up the
1045 // number of counters in this case.
1046 #define INSTR_PROF_MIN_VAL_COUNTS 10
1047   if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS)
1048     NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2);
1049 
1050   auto &Ctx = M->getContext();
1051   Type *VNodeTypes[] = {
1052 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType,
1053 #include "llvm/ProfileData/InstrProfData.inc"
1054   };
1055   auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes));
1056 
1057   ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters);
1058   auto *VNodesVar = new GlobalVariable(
1059       *M, VNodesTy, false, GlobalValue::PrivateLinkage,
1060       Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName());
1061   VNodesVar->setSection(
1062       getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat()));
1063   // VNodesVar is used by runtime but not referenced via relocation by other
1064   // sections. Conservatively make it linker retained.
1065   UsedVars.push_back(VNodesVar);
1066 }
1067 
1068 void InstrProfiling::emitNameData() {
1069   std::string UncompressedData;
1070 
1071   if (ReferencedNames.empty())
1072     return;
1073 
1074   std::string CompressedNameStr;
1075   if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr,
1076                                           DoInstrProfNameCompression)) {
1077     report_fatal_error(Twine(toString(std::move(E))), false);
1078   }
1079 
1080   auto &Ctx = M->getContext();
1081   auto *NamesVal = ConstantDataArray::getString(
1082       Ctx, StringRef(CompressedNameStr), false);
1083   NamesVar = new GlobalVariable(*M, NamesVal->getType(), true,
1084                                 GlobalValue::PrivateLinkage, NamesVal,
1085                                 getInstrProfNamesVarName());
1086   NamesSize = CompressedNameStr.size();
1087   NamesVar->setSection(
1088       getInstrProfSectionName(IPSK_name, TT.getObjectFormat()));
1089   // On COFF, it's important to reduce the alignment down to 1 to prevent the
1090   // linker from inserting padding before the start of the names section or
1091   // between names entries.
1092   NamesVar->setAlignment(Align(1));
1093   // NamesVar is used by runtime but not referenced via relocation by other
1094   // sections. Conservatively make it linker retained.
1095   UsedVars.push_back(NamesVar);
1096 
1097   for (auto *NamePtr : ReferencedNames)
1098     NamePtr->eraseFromParent();
1099 }
1100 
1101 void InstrProfiling::emitRegistration() {
1102   if (!needsRuntimeRegistrationOfSectionRange(TT))
1103     return;
1104 
1105   // Construct the function.
1106   auto *VoidTy = Type::getVoidTy(M->getContext());
1107   auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext());
1108   auto *Int64Ty = Type::getInt64Ty(M->getContext());
1109   auto *RegisterFTy = FunctionType::get(VoidTy, false);
1110   auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage,
1111                                      getInstrProfRegFuncsName(), M);
1112   RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1113   if (Options.NoRedZone)
1114     RegisterF->addFnAttr(Attribute::NoRedZone);
1115 
1116   auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false);
1117   auto *RuntimeRegisterF =
1118       Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage,
1119                        getInstrProfRegFuncName(), M);
1120 
1121   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF));
1122   for (Value *Data : CompilerUsedVars)
1123     if (!isa<Function>(Data))
1124       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1125   for (Value *Data : UsedVars)
1126     if (Data != NamesVar && !isa<Function>(Data))
1127       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1128 
1129   if (NamesVar) {
1130     Type *ParamTypes[] = {VoidPtrTy, Int64Ty};
1131     auto *NamesRegisterTy =
1132         FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false);
1133     auto *NamesRegisterF =
1134         Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage,
1135                          getInstrProfNamesRegFuncName(), M);
1136     IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy),
1137                                     IRB.getInt64(NamesSize)});
1138   }
1139 
1140   IRB.CreateRetVoid();
1141 }
1142 
1143 bool InstrProfiling::emitRuntimeHook() {
1144   // We expect the linker to be invoked with -u<hook_var> flag for Linux
1145   // in which case there is no need to emit the external variable.
1146   if (TT.isOSLinux())
1147     return false;
1148 
1149   // If the module's provided its own runtime, we don't need to do anything.
1150   if (M->getGlobalVariable(getInstrProfRuntimeHookVarName()))
1151     return false;
1152 
1153   // Declare an external variable that will pull in the runtime initialization.
1154   auto *Int32Ty = Type::getInt32Ty(M->getContext());
1155   auto *Var =
1156       new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage,
1157                          nullptr, getInstrProfRuntimeHookVarName());
1158 
1159   if (TT.isOSBinFormatELF()) {
1160     // Mark the user variable as used so that it isn't stripped out.
1161     CompilerUsedVars.push_back(Var);
1162   } else {
1163     // Make a function that uses it.
1164     auto *User = Function::Create(FunctionType::get(Int32Ty, false),
1165                                   GlobalValue::LinkOnceODRLinkage,
1166                                   getInstrProfRuntimeHookVarUseFuncName(), M);
1167     User->addFnAttr(Attribute::NoInline);
1168     if (Options.NoRedZone)
1169       User->addFnAttr(Attribute::NoRedZone);
1170     User->setVisibility(GlobalValue::HiddenVisibility);
1171     if (TT.supportsCOMDAT())
1172       User->setComdat(M->getOrInsertComdat(User->getName()));
1173 
1174     IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User));
1175     auto *Load = IRB.CreateLoad(Int32Ty, Var);
1176     IRB.CreateRet(Load);
1177 
1178     // Mark the function as used so that it isn't stripped out.
1179     CompilerUsedVars.push_back(User);
1180   }
1181   return true;
1182 }
1183 
1184 void InstrProfiling::emitUses() {
1185   // The metadata sections are parallel arrays. Optimizers (e.g.
1186   // GlobalOpt/ConstantMerge) may not discard associated sections as a unit, so
1187   // we conservatively retain all unconditionally in the compiler.
1188   //
1189   // On ELF and Mach-O, the linker can guarantee the associated sections will be
1190   // retained or discarded as a unit, so llvm.compiler.used is sufficient.
1191   // Similarly on COFF, if prof data is not referenced by code we use one comdat
1192   // and ensure this GC property as well. Otherwise, we have to conservatively
1193   // make all of the sections retained by the linker.
1194   if (TT.isOSBinFormatELF() || TT.isOSBinFormatMachO() ||
1195       (TT.isOSBinFormatCOFF() && !profDataReferencedByCode(*M)))
1196     appendToCompilerUsed(*M, CompilerUsedVars);
1197   else
1198     appendToUsed(*M, CompilerUsedVars);
1199 
1200   // We do not add proper references from used metadata sections to NamesVar and
1201   // VNodesVar, so we have to be conservative and place them in llvm.used
1202   // regardless of the target,
1203   appendToUsed(*M, UsedVars);
1204 }
1205 
1206 void InstrProfiling::emitInitialization() {
1207   // Create ProfileFileName variable. Don't don't this for the
1208   // context-sensitive instrumentation lowering: This lowering is after
1209   // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should
1210   // have already create the variable before LTO/ThinLTO linking.
1211   if (!IsCS)
1212     createProfileFileNameVar(*M, Options.InstrProfileOutput);
1213   Function *RegisterF = M->getFunction(getInstrProfRegFuncsName());
1214   if (!RegisterF)
1215     return;
1216 
1217   // Create the initialization function.
1218   auto *VoidTy = Type::getVoidTy(M->getContext());
1219   auto *F = Function::Create(FunctionType::get(VoidTy, false),
1220                              GlobalValue::InternalLinkage,
1221                              getInstrProfInitFuncName(), M);
1222   F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1223   F->addFnAttr(Attribute::NoInline);
1224   if (Options.NoRedZone)
1225     F->addFnAttr(Attribute::NoRedZone);
1226 
1227   // Add the basic block and the necessary calls.
1228   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F));
1229   IRB.CreateCall(RegisterF, {});
1230   IRB.CreateRetVoid();
1231 
1232   appendToGlobalCtors(*M, F, 0);
1233 }
1234