1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling.
10 // It also builds the data structures and initialization code needed for
11 // updating execution counts and emitting the profile at runtime.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h"
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/SmallVector.h"
18 #include "llvm/ADT/StringRef.h"
19 #include "llvm/ADT/Triple.h"
20 #include "llvm/ADT/Twine.h"
21 #include "llvm/Analysis/BlockFrequencyInfo.h"
22 #include "llvm/Analysis/BranchProbabilityInfo.h"
23 #include "llvm/Analysis/LoopInfo.h"
24 #include "llvm/Analysis/TargetLibraryInfo.h"
25 #include "llvm/IR/Attributes.h"
26 #include "llvm/IR/BasicBlock.h"
27 #include "llvm/IR/Constant.h"
28 #include "llvm/IR/Constants.h"
29 #include "llvm/IR/DIBuilder.h"
30 #include "llvm/IR/DerivedTypes.h"
31 #include "llvm/IR/DiagnosticInfo.h"
32 #include "llvm/IR/Dominators.h"
33 #include "llvm/IR/Function.h"
34 #include "llvm/IR/GlobalValue.h"
35 #include "llvm/IR/GlobalVariable.h"
36 #include "llvm/IR/IRBuilder.h"
37 #include "llvm/IR/Instruction.h"
38 #include "llvm/IR/Instructions.h"
39 #include "llvm/IR/IntrinsicInst.h"
40 #include "llvm/IR/Module.h"
41 #include "llvm/IR/Type.h"
42 #include "llvm/InitializePasses.h"
43 #include "llvm/Pass.h"
44 #include "llvm/ProfileData/InstrProf.h"
45 #include "llvm/ProfileData/InstrProfCorrelator.h"
46 #include "llvm/Support/Casting.h"
47 #include "llvm/Support/CommandLine.h"
48 #include "llvm/Support/Error.h"
49 #include "llvm/Support/ErrorHandling.h"
50 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
51 #include "llvm/Transforms/Utils/ModuleUtils.h"
52 #include "llvm/Transforms/Utils/SSAUpdater.h"
53 #include <algorithm>
54 #include <cassert>
55 #include <cstddef>
56 #include <cstdint>
57 #include <string>
58 
59 using namespace llvm;
60 
61 #define DEBUG_TYPE "instrprof"
62 
63 namespace llvm {
64 cl::opt<bool>
65     DebugInfoCorrelate("debug-info-correlate", cl::ZeroOrMore,
66                        cl::desc("Use debug info to correlate profiles."),
67                        cl::init(false));
68 } // namespace llvm
69 
70 namespace {
71 
72 cl::opt<bool> DoHashBasedCounterSplit(
73     "hash-based-counter-split",
74     cl::desc("Rename counter variable of a comdat function based on cfg hash"),
75     cl::init(true));
76 
77 cl::opt<bool>
78     RuntimeCounterRelocation("runtime-counter-relocation",
79                              cl::desc("Enable relocating counters at runtime."),
80                              cl::init(false));
81 
82 cl::opt<bool> ValueProfileStaticAlloc(
83     "vp-static-alloc",
84     cl::desc("Do static counter allocation for value profiler"),
85     cl::init(true));
86 
87 cl::opt<double> NumCountersPerValueSite(
88     "vp-counters-per-site",
89     cl::desc("The average number of profile counters allocated "
90              "per value profiling site."),
91     // This is set to a very small value because in real programs, only
92     // a very small percentage of value sites have non-zero targets, e.g, 1/30.
93     // For those sites with non-zero profile, the average number of targets
94     // is usually smaller than 2.
95     cl::init(1.0));
96 
97 cl::opt<bool> AtomicCounterUpdateAll(
98     "instrprof-atomic-counter-update-all", cl::ZeroOrMore,
99     cl::desc("Make all profile counter updates atomic (for testing only)"),
100     cl::init(false));
101 
102 cl::opt<bool> AtomicCounterUpdatePromoted(
103     "atomic-counter-update-promoted", cl::ZeroOrMore,
104     cl::desc("Do counter update using atomic fetch add "
105              " for promoted counters only"),
106     cl::init(false));
107 
108 cl::opt<bool> AtomicFirstCounter(
109     "atomic-first-counter", cl::ZeroOrMore,
110     cl::desc("Use atomic fetch add for first counter in a function (usually "
111              "the entry counter)"),
112     cl::init(false));
113 
114 // If the option is not specified, the default behavior about whether
115 // counter promotion is done depends on how instrumentaiton lowering
116 // pipeline is setup, i.e., the default value of true of this option
117 // does not mean the promotion will be done by default. Explicitly
118 // setting this option can override the default behavior.
119 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore,
120                                  cl::desc("Do counter register promotion"),
121                                  cl::init(false));
122 cl::opt<unsigned> MaxNumOfPromotionsPerLoop(
123     cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20),
124     cl::desc("Max number counter promotions per loop to avoid"
125              " increasing register pressure too much"));
126 
127 // A debug option
128 cl::opt<int>
129     MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1),
130                        cl::desc("Max number of allowed counter promotions"));
131 
132 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting(
133     cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3),
134     cl::desc("The max number of exiting blocks of a loop to allow "
135              " speculative counter promotion"));
136 
137 cl::opt<bool> SpeculativeCounterPromotionToLoop(
138     cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false),
139     cl::desc("When the option is false, if the target block is in a loop, "
140              "the promotion will be disallowed unless the promoted counter "
141              " update can be further/iteratively promoted into an acyclic "
142              " region."));
143 
144 cl::opt<bool> IterativeCounterPromotion(
145     cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true),
146     cl::desc("Allow counter promotion across the whole loop nest."));
147 
148 cl::opt<bool> SkipRetExitBlock(
149     cl::ZeroOrMore, "skip-ret-exit-block", cl::init(true),
150     cl::desc("Suppress counter promotion if exit blocks contain ret."));
151 
152 class InstrProfilingLegacyPass : public ModulePass {
153   InstrProfiling InstrProf;
154 
155 public:
156   static char ID;
157 
158   InstrProfilingLegacyPass() : ModulePass(ID) {}
159   InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false)
160       : ModulePass(ID), InstrProf(Options, IsCS) {
161     initializeInstrProfilingLegacyPassPass(*PassRegistry::getPassRegistry());
162   }
163 
164   StringRef getPassName() const override {
165     return "Frontend instrumentation-based coverage lowering";
166   }
167 
168   bool runOnModule(Module &M) override {
169     auto GetTLI = [this](Function &F) -> TargetLibraryInfo & {
170       return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
171     };
172     return InstrProf.run(M, GetTLI);
173   }
174 
175   void getAnalysisUsage(AnalysisUsage &AU) const override {
176     AU.setPreservesCFG();
177     AU.addRequired<TargetLibraryInfoWrapperPass>();
178   }
179 };
180 
181 ///
182 /// A helper class to promote one counter RMW operation in the loop
183 /// into register update.
184 ///
185 /// RWM update for the counter will be sinked out of the loop after
186 /// the transformation.
187 ///
188 class PGOCounterPromoterHelper : public LoadAndStorePromoter {
189 public:
190   PGOCounterPromoterHelper(
191       Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init,
192       BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks,
193       ArrayRef<Instruction *> InsertPts,
194       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
195       LoopInfo &LI)
196       : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks),
197         InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) {
198     assert(isa<LoadInst>(L));
199     assert(isa<StoreInst>(S));
200     SSA.AddAvailableValue(PH, Init);
201   }
202 
203   void doExtraRewritesBeforeFinalDeletion() override {
204     for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) {
205       BasicBlock *ExitBlock = ExitBlocks[i];
206       Instruction *InsertPos = InsertPts[i];
207       // Get LiveIn value into the ExitBlock. If there are multiple
208       // predecessors, the value is defined by a PHI node in this
209       // block.
210       Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock);
211       Value *Addr = cast<StoreInst>(Store)->getPointerOperand();
212       Type *Ty = LiveInValue->getType();
213       IRBuilder<> Builder(InsertPos);
214       if (AtomicCounterUpdatePromoted)
215         // automic update currently can only be promoted across the current
216         // loop, not the whole loop nest.
217         Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue,
218                                 MaybeAlign(),
219                                 AtomicOrdering::SequentiallyConsistent);
220       else {
221         LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted");
222         auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue);
223         auto *NewStore = Builder.CreateStore(NewVal, Addr);
224 
225         // Now update the parent loop's candidate list:
226         if (IterativeCounterPromotion) {
227           auto *TargetLoop = LI.getLoopFor(ExitBlock);
228           if (TargetLoop)
229             LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore);
230         }
231       }
232     }
233   }
234 
235 private:
236   Instruction *Store;
237   ArrayRef<BasicBlock *> ExitBlocks;
238   ArrayRef<Instruction *> InsertPts;
239   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
240   LoopInfo &LI;
241 };
242 
243 /// A helper class to do register promotion for all profile counter
244 /// updates in a loop.
245 ///
246 class PGOCounterPromoter {
247 public:
248   PGOCounterPromoter(
249       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
250       Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI)
251       : LoopToCandidates(LoopToCands), L(CurLoop), LI(LI), BFI(BFI) {
252 
253     // Skip collection of ExitBlocks and InsertPts for loops that will not be
254     // able to have counters promoted.
255     SmallVector<BasicBlock *, 8> LoopExitBlocks;
256     SmallPtrSet<BasicBlock *, 8> BlockSet;
257 
258     L.getExitBlocks(LoopExitBlocks);
259     if (!isPromotionPossible(&L, LoopExitBlocks))
260       return;
261 
262     for (BasicBlock *ExitBlock : LoopExitBlocks) {
263       if (BlockSet.insert(ExitBlock).second) {
264         ExitBlocks.push_back(ExitBlock);
265         InsertPts.push_back(&*ExitBlock->getFirstInsertionPt());
266       }
267     }
268   }
269 
270   bool run(int64_t *NumPromoted) {
271     // Skip 'infinite' loops:
272     if (ExitBlocks.size() == 0)
273       return false;
274 
275     // Skip if any of the ExitBlocks contains a ret instruction.
276     // This is to prevent dumping of incomplete profile -- if the
277     // the loop is a long running loop and dump is called in the middle
278     // of the loop, the result profile is incomplete.
279     // FIXME: add other heuristics to detect long running loops.
280     if (SkipRetExitBlock) {
281       for (auto BB : ExitBlocks)
282         if (isa<ReturnInst>(BB->getTerminator()))
283           return false;
284     }
285 
286     unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L);
287     if (MaxProm == 0)
288       return false;
289 
290     unsigned Promoted = 0;
291     for (auto &Cand : LoopToCandidates[&L]) {
292 
293       SmallVector<PHINode *, 4> NewPHIs;
294       SSAUpdater SSA(&NewPHIs);
295       Value *InitVal = ConstantInt::get(Cand.first->getType(), 0);
296 
297       // If BFI is set, we will use it to guide the promotions.
298       if (BFI) {
299         auto *BB = Cand.first->getParent();
300         auto InstrCount = BFI->getBlockProfileCount(BB);
301         if (!InstrCount)
302           continue;
303         auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader());
304         // If the average loop trip count is not greater than 1.5, we skip
305         // promotion.
306         if (PreheaderCount &&
307             (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2))
308           continue;
309       }
310 
311       PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal,
312                                         L.getLoopPreheader(), ExitBlocks,
313                                         InsertPts, LoopToCandidates, LI);
314       Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second}));
315       Promoted++;
316       if (Promoted >= MaxProm)
317         break;
318 
319       (*NumPromoted)++;
320       if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions)
321         break;
322     }
323 
324     LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth="
325                       << L.getLoopDepth() << ")\n");
326     return Promoted != 0;
327   }
328 
329 private:
330   bool allowSpeculativeCounterPromotion(Loop *LP) {
331     SmallVector<BasicBlock *, 8> ExitingBlocks;
332     L.getExitingBlocks(ExitingBlocks);
333     // Not considierered speculative.
334     if (ExitingBlocks.size() == 1)
335       return true;
336     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
337       return false;
338     return true;
339   }
340 
341   // Check whether the loop satisfies the basic conditions needed to perform
342   // Counter Promotions.
343   bool
344   isPromotionPossible(Loop *LP,
345                       const SmallVectorImpl<BasicBlock *> &LoopExitBlocks) {
346     // We can't insert into a catchswitch.
347     if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) {
348           return isa<CatchSwitchInst>(Exit->getTerminator());
349         }))
350       return false;
351 
352     if (!LP->hasDedicatedExits())
353       return false;
354 
355     BasicBlock *PH = LP->getLoopPreheader();
356     if (!PH)
357       return false;
358 
359     return true;
360   }
361 
362   // Returns the max number of Counter Promotions for LP.
363   unsigned getMaxNumOfPromotionsInLoop(Loop *LP) {
364     SmallVector<BasicBlock *, 8> LoopExitBlocks;
365     LP->getExitBlocks(LoopExitBlocks);
366     if (!isPromotionPossible(LP, LoopExitBlocks))
367       return 0;
368 
369     SmallVector<BasicBlock *, 8> ExitingBlocks;
370     LP->getExitingBlocks(ExitingBlocks);
371 
372     // If BFI is set, we do more aggressive promotions based on BFI.
373     if (BFI)
374       return (unsigned)-1;
375 
376     // Not considierered speculative.
377     if (ExitingBlocks.size() == 1)
378       return MaxNumOfPromotionsPerLoop;
379 
380     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
381       return 0;
382 
383     // Whether the target block is in a loop does not matter:
384     if (SpeculativeCounterPromotionToLoop)
385       return MaxNumOfPromotionsPerLoop;
386 
387     // Now check the target block:
388     unsigned MaxProm = MaxNumOfPromotionsPerLoop;
389     for (auto *TargetBlock : LoopExitBlocks) {
390       auto *TargetLoop = LI.getLoopFor(TargetBlock);
391       if (!TargetLoop)
392         continue;
393       unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop);
394       unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size();
395       MaxProm =
396           std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) -
397                                 PendingCandsInTarget);
398     }
399     return MaxProm;
400   }
401 
402   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
403   SmallVector<BasicBlock *, 8> ExitBlocks;
404   SmallVector<Instruction *, 8> InsertPts;
405   Loop &L;
406   LoopInfo &LI;
407   BlockFrequencyInfo *BFI;
408 };
409 
410 enum class ValueProfilingCallType {
411   // Individual values are tracked. Currently used for indiret call target
412   // profiling.
413   Default,
414 
415   // MemOp: the memop size value profiling.
416   MemOp
417 };
418 
419 } // end anonymous namespace
420 
421 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) {
422   FunctionAnalysisManager &FAM =
423       AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
424   auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
425     return FAM.getResult<TargetLibraryAnalysis>(F);
426   };
427   if (!run(M, GetTLI))
428     return PreservedAnalyses::all();
429 
430   return PreservedAnalyses::none();
431 }
432 
433 char InstrProfilingLegacyPass::ID = 0;
434 INITIALIZE_PASS_BEGIN(InstrProfilingLegacyPass, "instrprof",
435                       "Frontend instrumentation-based coverage lowering.",
436                       false, false)
437 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
438 INITIALIZE_PASS_END(InstrProfilingLegacyPass, "instrprof",
439                     "Frontend instrumentation-based coverage lowering.", false,
440                     false)
441 
442 ModulePass *
443 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options,
444                                      bool IsCS) {
445   return new InstrProfilingLegacyPass(Options, IsCS);
446 }
447 
448 static InstrProfIncrementInst *castToIncrementInst(Instruction *Instr) {
449   InstrProfIncrementInst *Inc = dyn_cast<InstrProfIncrementInstStep>(Instr);
450   if (Inc)
451     return Inc;
452   return dyn_cast<InstrProfIncrementInst>(Instr);
453 }
454 
455 bool InstrProfiling::lowerIntrinsics(Function *F) {
456   bool MadeChange = false;
457   PromotionCandidates.clear();
458   for (BasicBlock &BB : *F) {
459     for (Instruction &Instr : llvm::make_early_inc_range(BB)) {
460       InstrProfIncrementInst *Inc = castToIncrementInst(&Instr);
461       if (Inc) {
462         lowerIncrement(Inc);
463         MadeChange = true;
464       } else if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(&Instr)) {
465         lowerValueProfileInst(Ind);
466         MadeChange = true;
467       }
468     }
469   }
470 
471   if (!MadeChange)
472     return false;
473 
474   promoteCounterLoadStores(F);
475   return true;
476 }
477 
478 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const {
479   // Mach-O don't support weak external references.
480   if (TT.isOSBinFormatMachO())
481     return false;
482 
483   if (RuntimeCounterRelocation.getNumOccurrences() > 0)
484     return RuntimeCounterRelocation;
485 
486   // Fuchsia uses runtime counter relocation by default.
487   return TT.isOSFuchsia();
488 }
489 
490 bool InstrProfiling::isCounterPromotionEnabled() const {
491   if (DoCounterPromotion.getNumOccurrences() > 0)
492     return DoCounterPromotion;
493 
494   return Options.DoCounterPromotion;
495 }
496 
497 void InstrProfiling::promoteCounterLoadStores(Function *F) {
498   if (!isCounterPromotionEnabled())
499     return;
500 
501   DominatorTree DT(*F);
502   LoopInfo LI(DT);
503   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates;
504 
505   std::unique_ptr<BlockFrequencyInfo> BFI;
506   if (Options.UseBFIInPromotion) {
507     std::unique_ptr<BranchProbabilityInfo> BPI;
508     BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F)));
509     BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI));
510   }
511 
512   for (const auto &LoadStore : PromotionCandidates) {
513     auto *CounterLoad = LoadStore.first;
514     auto *CounterStore = LoadStore.second;
515     BasicBlock *BB = CounterLoad->getParent();
516     Loop *ParentLoop = LI.getLoopFor(BB);
517     if (!ParentLoop)
518       continue;
519     LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore);
520   }
521 
522   SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder();
523 
524   // Do a post-order traversal of the loops so that counter updates can be
525   // iteratively hoisted outside the loop nest.
526   for (auto *Loop : llvm::reverse(Loops)) {
527     PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get());
528     Promoter.run(&TotalCountersPromoted);
529   }
530 }
531 
532 static bool needsRuntimeHookUnconditionally(const Triple &TT) {
533   // On Fuchsia, we only need runtime hook if any counters are present.
534   if (TT.isOSFuchsia())
535     return false;
536 
537   return true;
538 }
539 
540 /// Check if the module contains uses of any profiling intrinsics.
541 static bool containsProfilingIntrinsics(Module &M) {
542   if (auto *F = M.getFunction(
543           Intrinsic::getName(llvm::Intrinsic::instrprof_increment)))
544     if (!F->use_empty())
545       return true;
546   if (auto *F = M.getFunction(
547           Intrinsic::getName(llvm::Intrinsic::instrprof_increment_step)))
548     if (!F->use_empty())
549       return true;
550   if (auto *F = M.getFunction(
551           Intrinsic::getName(llvm::Intrinsic::instrprof_value_profile)))
552     if (!F->use_empty())
553       return true;
554   return false;
555 }
556 
557 bool InstrProfiling::run(
558     Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) {
559   this->M = &M;
560   this->GetTLI = std::move(GetTLI);
561   NamesVar = nullptr;
562   NamesSize = 0;
563   ProfileDataMap.clear();
564   CompilerUsedVars.clear();
565   UsedVars.clear();
566   TT = Triple(M.getTargetTriple());
567 
568   bool MadeChange = false;
569 
570   // Emit the runtime hook even if no counters are present.
571   if (needsRuntimeHookUnconditionally(TT))
572     MadeChange = emitRuntimeHook();
573 
574   // Improve compile time by avoiding linear scans when there is no work.
575   GlobalVariable *CoverageNamesVar =
576       M.getNamedGlobal(getCoverageUnusedNamesVarName());
577   if (!containsProfilingIntrinsics(M) && !CoverageNamesVar)
578     return MadeChange;
579 
580   // We did not know how many value sites there would be inside
581   // the instrumented function. This is counting the number of instrumented
582   // target value sites to enter it as field in the profile data variable.
583   for (Function &F : M) {
584     InstrProfIncrementInst *FirstProfIncInst = nullptr;
585     for (BasicBlock &BB : F)
586       for (auto I = BB.begin(), E = BB.end(); I != E; I++)
587         if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I))
588           computeNumValueSiteCounts(Ind);
589         else if (FirstProfIncInst == nullptr)
590           FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I);
591 
592     // Value profiling intrinsic lowering requires per-function profile data
593     // variable to be created first.
594     if (FirstProfIncInst != nullptr)
595       static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst));
596   }
597 
598   for (Function &F : M)
599     MadeChange |= lowerIntrinsics(&F);
600 
601   if (CoverageNamesVar) {
602     lowerCoverageData(CoverageNamesVar);
603     MadeChange = true;
604   }
605 
606   if (!MadeChange)
607     return false;
608 
609   emitVNodes();
610   emitNameData();
611   emitRuntimeHook();
612   emitRegistration();
613   emitUses();
614   emitInitialization();
615   return true;
616 }
617 
618 static FunctionCallee getOrInsertValueProfilingCall(
619     Module &M, const TargetLibraryInfo &TLI,
620     ValueProfilingCallType CallType = ValueProfilingCallType::Default) {
621   LLVMContext &Ctx = M.getContext();
622   auto *ReturnTy = Type::getVoidTy(M.getContext());
623 
624   AttributeList AL;
625   if (auto AK = TLI.getExtAttrForI32Param(false))
626     AL = AL.addParamAttribute(M.getContext(), 2, AK);
627 
628   assert((CallType == ValueProfilingCallType::Default ||
629           CallType == ValueProfilingCallType::MemOp) &&
630          "Must be Default or MemOp");
631   Type *ParamTypes[] = {
632 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType
633 #include "llvm/ProfileData/InstrProfData.inc"
634   };
635   auto *ValueProfilingCallTy =
636       FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false);
637   StringRef FuncName = CallType == ValueProfilingCallType::Default
638                            ? getInstrProfValueProfFuncName()
639                            : getInstrProfValueProfMemOpFuncName();
640   return M.getOrInsertFunction(FuncName, ValueProfilingCallTy, AL);
641 }
642 
643 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) {
644   GlobalVariable *Name = Ind->getName();
645   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
646   uint64_t Index = Ind->getIndex()->getZExtValue();
647   auto &PD = ProfileDataMap[Name];
648   PD.NumValueSites[ValueKind] =
649       std::max(PD.NumValueSites[ValueKind], (uint32_t)(Index + 1));
650 }
651 
652 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) {
653   // TODO: Value profiling heavily depends on the data section which is omitted
654   // in lightweight mode. We need to move the value profile pointer to the
655   // Counter struct to get this working.
656   assert(
657       !DebugInfoCorrelate &&
658       "Value profiling is not yet supported with lightweight instrumentation");
659   GlobalVariable *Name = Ind->getName();
660   auto It = ProfileDataMap.find(Name);
661   assert(It != ProfileDataMap.end() && It->second.DataVar &&
662          "value profiling detected in function with no counter incerement");
663 
664   GlobalVariable *DataVar = It->second.DataVar;
665   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
666   uint64_t Index = Ind->getIndex()->getZExtValue();
667   for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind)
668     Index += It->second.NumValueSites[Kind];
669 
670   IRBuilder<> Builder(Ind);
671   bool IsMemOpSize = (Ind->getValueKind()->getZExtValue() ==
672                       llvm::InstrProfValueKind::IPVK_MemOPSize);
673   CallInst *Call = nullptr;
674   auto *TLI = &GetTLI(*Ind->getFunction());
675 
676   // To support value profiling calls within Windows exception handlers, funclet
677   // information contained within operand bundles needs to be copied over to
678   // the library call. This is required for the IR to be processed by the
679   // WinEHPrepare pass.
680   SmallVector<OperandBundleDef, 1> OpBundles;
681   Ind->getOperandBundlesAsDefs(OpBundles);
682   if (!IsMemOpSize) {
683     Value *Args[3] = {Ind->getTargetValue(),
684                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
685                       Builder.getInt32(Index)};
686     Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args,
687                               OpBundles);
688   } else {
689     Value *Args[3] = {Ind->getTargetValue(),
690                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
691                       Builder.getInt32(Index)};
692     Call = Builder.CreateCall(
693         getOrInsertValueProfilingCall(*M, *TLI, ValueProfilingCallType::MemOp),
694         Args, OpBundles);
695   }
696   if (auto AK = TLI->getExtAttrForI32Param(false))
697     Call->addParamAttr(2, AK);
698   Ind->replaceAllUsesWith(Call);
699   Ind->eraseFromParent();
700 }
701 
702 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) {
703   GlobalVariable *Counters = getOrCreateRegionCounters(Inc);
704 
705   IRBuilder<> Builder(Inc);
706   uint64_t Index = Inc->getIndex()->getZExtValue();
707   Value *Addr = Builder.CreateConstInBoundsGEP2_32(Counters->getValueType(),
708                                                    Counters, 0, Index);
709 
710   if (isRuntimeCounterRelocationEnabled()) {
711     Type *Int64Ty = Type::getInt64Ty(M->getContext());
712     Type *Int64PtrTy = Type::getInt64PtrTy(M->getContext());
713     Function *Fn = Inc->getParent()->getParent();
714     Instruction &I = Fn->getEntryBlock().front();
715     LoadInst *LI = dyn_cast<LoadInst>(&I);
716     if (!LI) {
717       IRBuilder<> Builder(&I);
718       GlobalVariable *Bias =
719           M->getGlobalVariable(getInstrProfCounterBiasVarName());
720       if (!Bias) {
721         // Compiler must define this variable when runtime counter relocation
722         // is being used. Runtime has a weak external reference that is used
723         // to check whether that's the case or not.
724         Bias = new GlobalVariable(
725             *M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage,
726             Constant::getNullValue(Int64Ty), getInstrProfCounterBiasVarName());
727         Bias->setVisibility(GlobalVariable::HiddenVisibility);
728         // A definition that's weak (linkonce_odr) without being in a COMDAT
729         // section wouldn't lead to link errors, but it would lead to a dead
730         // data word from every TU but one. Putting it in COMDAT ensures there
731         // will be exactly one data slot in the link.
732         if (TT.supportsCOMDAT())
733           Bias->setComdat(M->getOrInsertComdat(Bias->getName()));
734       }
735       LI = Builder.CreateLoad(Int64Ty, Bias);
736     }
737     auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), LI);
738     Addr = Builder.CreateIntToPtr(Add, Int64PtrTy);
739   }
740 
741   if (Options.Atomic || AtomicCounterUpdateAll ||
742       (Index == 0 && AtomicFirstCounter)) {
743     Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(),
744                             MaybeAlign(), AtomicOrdering::Monotonic);
745   } else {
746     Value *IncStep = Inc->getStep();
747     Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount");
748     auto *Count = Builder.CreateAdd(Load, Inc->getStep());
749     auto *Store = Builder.CreateStore(Count, Addr);
750     if (isCounterPromotionEnabled())
751       PromotionCandidates.emplace_back(cast<Instruction>(Load), Store);
752   }
753   Inc->eraseFromParent();
754 }
755 
756 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) {
757   ConstantArray *Names =
758       cast<ConstantArray>(CoverageNamesVar->getInitializer());
759   for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) {
760     Constant *NC = Names->getOperand(I);
761     Value *V = NC->stripPointerCasts();
762     assert(isa<GlobalVariable>(V) && "Missing reference to function name");
763     GlobalVariable *Name = cast<GlobalVariable>(V);
764 
765     Name->setLinkage(GlobalValue::PrivateLinkage);
766     ReferencedNames.push_back(Name);
767     NC->dropAllReferences();
768   }
769   CoverageNamesVar->eraseFromParent();
770 }
771 
772 /// Get the name of a profiling variable for a particular function.
773 static std::string getVarName(InstrProfIncrementInst *Inc, StringRef Prefix,
774                               bool &Renamed) {
775   StringRef NamePrefix = getInstrProfNameVarPrefix();
776   StringRef Name = Inc->getName()->getName().substr(NamePrefix.size());
777   Function *F = Inc->getParent()->getParent();
778   Module *M = F->getParent();
779   if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) ||
780       !canRenameComdatFunc(*F)) {
781     Renamed = false;
782     return (Prefix + Name).str();
783   }
784   Renamed = true;
785   uint64_t FuncHash = Inc->getHash()->getZExtValue();
786   SmallVector<char, 24> HashPostfix;
787   if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix)))
788     return (Prefix + Name).str();
789   return (Prefix + Name + "." + Twine(FuncHash)).str();
790 }
791 
792 static uint64_t getIntModuleFlagOrZero(const Module &M, StringRef Flag) {
793   auto *MD = dyn_cast_or_null<ConstantAsMetadata>(M.getModuleFlag(Flag));
794   if (!MD)
795     return 0;
796 
797   // If the flag is a ConstantAsMetadata, it should be an integer representable
798   // in 64-bits.
799   return cast<ConstantInt>(MD->getValue())->getZExtValue();
800 }
801 
802 static bool enablesValueProfiling(const Module &M) {
803   return isIRPGOFlagSet(&M) ||
804          getIntModuleFlagOrZero(M, "EnableValueProfiling") != 0;
805 }
806 
807 // Conservatively returns true if data variables may be referenced by code.
808 static bool profDataReferencedByCode(const Module &M) {
809   return enablesValueProfiling(M);
810 }
811 
812 static inline bool shouldRecordFunctionAddr(Function *F) {
813   // Only record function addresses if IR PGO is enabled or if clang value
814   // profiling is enabled. Recording function addresses greatly increases object
815   // file size, because it prevents the inliner from deleting functions that
816   // have been inlined everywhere.
817   if (!profDataReferencedByCode(*F->getParent()))
818     return false;
819 
820   // Check the linkage
821   bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage();
822   if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() &&
823       !HasAvailableExternallyLinkage)
824     return true;
825 
826   // A function marked 'alwaysinline' with available_externally linkage can't
827   // have its address taken. Doing so would create an undefined external ref to
828   // the function, which would fail to link.
829   if (HasAvailableExternallyLinkage &&
830       F->hasFnAttribute(Attribute::AlwaysInline))
831     return false;
832 
833   // Prohibit function address recording if the function is both internal and
834   // COMDAT. This avoids the profile data variable referencing internal symbols
835   // in COMDAT.
836   if (F->hasLocalLinkage() && F->hasComdat())
837     return false;
838 
839   // Check uses of this function for other than direct calls or invokes to it.
840   // Inline virtual functions have linkeOnceODR linkage. When a key method
841   // exists, the vtable will only be emitted in the TU where the key method
842   // is defined. In a TU where vtable is not available, the function won't
843   // be 'addresstaken'. If its address is not recorded here, the profile data
844   // with missing address may be picked by the linker leading  to missing
845   // indirect call target info.
846   return F->hasAddressTaken() || F->hasLinkOnceLinkage();
847 }
848 
849 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) {
850   // Don't do this for Darwin.  compiler-rt uses linker magic.
851   if (TT.isOSDarwin())
852     return false;
853   // Use linker script magic to get data/cnts/name start/end.
854   if (TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() ||
855       TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4CPU() || TT.isOSWindows())
856     return false;
857 
858   return true;
859 }
860 
861 GlobalVariable *
862 InstrProfiling::getOrCreateRegionCounters(InstrProfIncrementInst *Inc) {
863   GlobalVariable *NamePtr = Inc->getName();
864   auto &PD = ProfileDataMap[NamePtr];
865   if (PD.RegionCounters)
866     return PD.RegionCounters;
867 
868   // Match the linkage and visibility of the name global.
869   Function *Fn = Inc->getParent()->getParent();
870   GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage();
871   GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility();
872 
873   // Use internal rather than private linkage so the counter variable shows up
874   // in the symbol table when using debug info for correlation.
875   if (DebugInfoCorrelate && TT.isOSBinFormatMachO() &&
876       Linkage == GlobalValue::PrivateLinkage)
877     Linkage = GlobalValue::InternalLinkage;
878 
879   // Due to the limitation of binder as of 2021/09/28, the duplicate weak
880   // symbols in the same csect won't be discarded. When there are duplicate weak
881   // symbols, we can NOT guarantee that the relocations get resolved to the
882   // intended weak symbol, so we can not ensure the correctness of the relative
883   // CounterPtr, so we have to use private linkage for counter and data symbols.
884   if (TT.isOSBinFormatXCOFF()) {
885     Linkage = GlobalValue::PrivateLinkage;
886     Visibility = GlobalValue::DefaultVisibility;
887   }
888   // Move the name variable to the right section. Place them in a COMDAT group
889   // if the associated function is a COMDAT. This will make sure that only one
890   // copy of counters of the COMDAT function will be emitted after linking. Keep
891   // in mind that this pass may run before the inliner, so we need to create a
892   // new comdat group for the counters and profiling data. If we use the comdat
893   // of the parent function, that will result in relocations against discarded
894   // sections.
895   //
896   // If the data variable is referenced by code,  counters and data have to be
897   // in different comdats for COFF because the Visual C++ linker will report
898   // duplicate symbol errors if there are multiple external symbols with the
899   // same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE.
900   //
901   // For ELF, when not using COMDAT, put counters, data and values into a
902   // nodeduplicate COMDAT which is lowered to a zero-flag section group. This
903   // allows -z start-stop-gc to discard the entire group when the function is
904   // discarded.
905   bool DataReferencedByCode = profDataReferencedByCode(*M);
906   bool NeedComdat = needsComdatForCounter(*Fn, *M);
907   bool Renamed;
908   std::string CntsVarName =
909       getVarName(Inc, getInstrProfCountersVarPrefix(), Renamed);
910   std::string DataVarName =
911       getVarName(Inc, getInstrProfDataVarPrefix(), Renamed);
912   auto MaybeSetComdat = [&](GlobalVariable *GV) {
913     bool UseComdat = (NeedComdat || TT.isOSBinFormatELF());
914     if (UseComdat) {
915       StringRef GroupName = TT.isOSBinFormatCOFF() && DataReferencedByCode
916                                 ? GV->getName()
917                                 : CntsVarName;
918       Comdat *C = M->getOrInsertComdat(GroupName);
919       if (!NeedComdat)
920         C->setSelectionKind(Comdat::NoDeduplicate);
921       GV->setComdat(C);
922     }
923   };
924 
925   uint64_t NumCounters = Inc->getNumCounters()->getZExtValue();
926   LLVMContext &Ctx = M->getContext();
927   ArrayType *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters);
928 
929   // Create the counters variable.
930   auto *CounterPtr =
931       new GlobalVariable(*M, CounterTy, false, Linkage,
932                          Constant::getNullValue(CounterTy), CntsVarName);
933   CounterPtr->setVisibility(Visibility);
934   CounterPtr->setSection(
935       getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat()));
936   CounterPtr->setAlignment(Align(8));
937   MaybeSetComdat(CounterPtr);
938   CounterPtr->setLinkage(Linkage);
939   PD.RegionCounters = CounterPtr;
940   if (DebugInfoCorrelate) {
941     if (auto *SP = Fn->getSubprogram()) {
942       DIBuilder DB(*M, true, SP->getUnit());
943       Metadata *FunctionNameAnnotation[] = {
944           MDString::get(Ctx, InstrProfCorrelator::FunctionNameAttributeName),
945           MDString::get(Ctx, getPGOFuncNameVarInitializer(NamePtr)),
946       };
947       Metadata *CFGHashAnnotation[] = {
948           MDString::get(Ctx, InstrProfCorrelator::CFGHashAttributeName),
949           ConstantAsMetadata::get(Inc->getHash()),
950       };
951       Metadata *NumCountersAnnotation[] = {
952           MDString::get(Ctx, InstrProfCorrelator::NumCountersAttributeName),
953           ConstantAsMetadata::get(Inc->getNumCounters()),
954       };
955       auto Annotations = DB.getOrCreateArray({
956           MDNode::get(Ctx, FunctionNameAnnotation),
957           MDNode::get(Ctx, CFGHashAnnotation),
958           MDNode::get(Ctx, NumCountersAnnotation),
959       });
960       auto *DICounter = DB.createGlobalVariableExpression(
961           SP, CounterPtr->getName(), /*LinkageName=*/StringRef(), SP->getFile(),
962           /*LineNo=*/0, DB.createUnspecifiedType("Profile Data Type"),
963           CounterPtr->hasLocalLinkage(), /*IsDefined=*/true, /*Expr=*/nullptr,
964           /*Decl=*/nullptr, /*TemplateParams=*/nullptr, /*AlignInBits=*/0,
965           Annotations);
966       CounterPtr->addDebugInfo(DICounter);
967       DB.finalize();
968     } else {
969       std::string Msg = ("Missing debug info for function " + Fn->getName() +
970                          "; required for profile correlation.")
971                             .str();
972       Ctx.diagnose(
973           DiagnosticInfoPGOProfile(M->getName().data(), Msg, DS_Warning));
974     }
975   }
976 
977   auto *Int8PtrTy = Type::getInt8PtrTy(Ctx);
978   // Allocate statically the array of pointers to value profile nodes for
979   // the current function.
980   Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy);
981   uint64_t NS = 0;
982   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
983     NS += PD.NumValueSites[Kind];
984   if (NS > 0 && ValueProfileStaticAlloc &&
985       !needsRuntimeRegistrationOfSectionRange(TT)) {
986     ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS);
987     auto *ValuesVar = new GlobalVariable(
988         *M, ValuesTy, false, Linkage, Constant::getNullValue(ValuesTy),
989         getVarName(Inc, getInstrProfValuesVarPrefix(), Renamed));
990     ValuesVar->setVisibility(Visibility);
991     ValuesVar->setSection(
992         getInstrProfSectionName(IPSK_vals, TT.getObjectFormat()));
993     ValuesVar->setAlignment(Align(8));
994     MaybeSetComdat(ValuesVar);
995     ValuesPtrExpr =
996         ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx));
997   }
998 
999   if (DebugInfoCorrelate) {
1000     // Mark the counter variable as used so that it isn't optimized out.
1001     CompilerUsedVars.push_back(PD.RegionCounters);
1002     return PD.RegionCounters;
1003   }
1004 
1005   // Create data variable.
1006   auto *IntPtrTy = M->getDataLayout().getIntPtrType(M->getContext());
1007   auto *Int16Ty = Type::getInt16Ty(Ctx);
1008   auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1);
1009   Type *DataTypes[] = {
1010 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType,
1011 #include "llvm/ProfileData/InstrProfData.inc"
1012   };
1013   auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes));
1014 
1015   Constant *FunctionAddr = shouldRecordFunctionAddr(Fn)
1016                                ? ConstantExpr::getBitCast(Fn, Int8PtrTy)
1017                                : ConstantPointerNull::get(Int8PtrTy);
1018 
1019   Constant *Int16ArrayVals[IPVK_Last + 1];
1020   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1021     Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]);
1022 
1023   // If the data variable is not referenced by code (if we don't emit
1024   // @llvm.instrprof.value.profile, NS will be 0), and the counter keeps the
1025   // data variable live under linker GC, the data variable can be private. This
1026   // optimization applies to ELF.
1027   //
1028   // On COFF, a comdat leader cannot be local so we require DataReferencedByCode
1029   // to be false.
1030   //
1031   // If profd is in a deduplicate comdat, NS==0 with a hash suffix guarantees
1032   // that other copies must have the same CFG and cannot have value profiling.
1033   // If no hash suffix, other profd copies may be referenced by code.
1034   if (NS == 0 && !(DataReferencedByCode && NeedComdat && !Renamed) &&
1035       (TT.isOSBinFormatELF() ||
1036        (!DataReferencedByCode && TT.isOSBinFormatCOFF()))) {
1037     Linkage = GlobalValue::PrivateLinkage;
1038     Visibility = GlobalValue::DefaultVisibility;
1039   }
1040   auto *Data =
1041       new GlobalVariable(*M, DataTy, false, Linkage, nullptr, DataVarName);
1042   // Reference the counter variable with a label difference (link-time
1043   // constant).
1044   auto *RelativeCounterPtr =
1045       ConstantExpr::getSub(ConstantExpr::getPtrToInt(CounterPtr, IntPtrTy),
1046                            ConstantExpr::getPtrToInt(Data, IntPtrTy));
1047 
1048   Constant *DataVals[] = {
1049 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init,
1050 #include "llvm/ProfileData/InstrProfData.inc"
1051   };
1052   Data->setInitializer(ConstantStruct::get(DataTy, DataVals));
1053 
1054   Data->setVisibility(Visibility);
1055   Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat()));
1056   Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT));
1057   MaybeSetComdat(Data);
1058   Data->setLinkage(Linkage);
1059 
1060   PD.DataVar = Data;
1061 
1062   // Mark the data variable as used so that it isn't stripped out.
1063   CompilerUsedVars.push_back(Data);
1064   // Now that the linkage set by the FE has been passed to the data and counter
1065   // variables, reset Name variable's linkage and visibility to private so that
1066   // it can be removed later by the compiler.
1067   NamePtr->setLinkage(GlobalValue::PrivateLinkage);
1068   // Collect the referenced names to be used by emitNameData.
1069   ReferencedNames.push_back(NamePtr);
1070 
1071   return PD.RegionCounters;
1072 }
1073 
1074 void InstrProfiling::emitVNodes() {
1075   if (!ValueProfileStaticAlloc)
1076     return;
1077 
1078   // For now only support this on platforms that do
1079   // not require runtime registration to discover
1080   // named section start/end.
1081   if (needsRuntimeRegistrationOfSectionRange(TT))
1082     return;
1083 
1084   size_t TotalNS = 0;
1085   for (auto &PD : ProfileDataMap) {
1086     for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1087       TotalNS += PD.second.NumValueSites[Kind];
1088   }
1089 
1090   if (!TotalNS)
1091     return;
1092 
1093   uint64_t NumCounters = TotalNS * NumCountersPerValueSite;
1094 // Heuristic for small programs with very few total value sites.
1095 // The default value of vp-counters-per-site is chosen based on
1096 // the observation that large apps usually have a low percentage
1097 // of value sites that actually have any profile data, and thus
1098 // the average number of counters per site is low. For small
1099 // apps with very few sites, this may not be true. Bump up the
1100 // number of counters in this case.
1101 #define INSTR_PROF_MIN_VAL_COUNTS 10
1102   if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS)
1103     NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2);
1104 
1105   auto &Ctx = M->getContext();
1106   Type *VNodeTypes[] = {
1107 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType,
1108 #include "llvm/ProfileData/InstrProfData.inc"
1109   };
1110   auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes));
1111 
1112   ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters);
1113   auto *VNodesVar = new GlobalVariable(
1114       *M, VNodesTy, false, GlobalValue::PrivateLinkage,
1115       Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName());
1116   VNodesVar->setSection(
1117       getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat()));
1118   // VNodesVar is used by runtime but not referenced via relocation by other
1119   // sections. Conservatively make it linker retained.
1120   UsedVars.push_back(VNodesVar);
1121 }
1122 
1123 void InstrProfiling::emitNameData() {
1124   std::string UncompressedData;
1125 
1126   if (ReferencedNames.empty())
1127     return;
1128 
1129   std::string CompressedNameStr;
1130   if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr,
1131                                           DoInstrProfNameCompression)) {
1132     report_fatal_error(Twine(toString(std::move(E))), false);
1133   }
1134 
1135   auto &Ctx = M->getContext();
1136   auto *NamesVal =
1137       ConstantDataArray::getString(Ctx, StringRef(CompressedNameStr), false);
1138   NamesVar = new GlobalVariable(*M, NamesVal->getType(), true,
1139                                 GlobalValue::PrivateLinkage, NamesVal,
1140                                 getInstrProfNamesVarName());
1141   NamesSize = CompressedNameStr.size();
1142   NamesVar->setSection(
1143       getInstrProfSectionName(IPSK_name, TT.getObjectFormat()));
1144   // On COFF, it's important to reduce the alignment down to 1 to prevent the
1145   // linker from inserting padding before the start of the names section or
1146   // between names entries.
1147   NamesVar->setAlignment(Align(1));
1148   // NamesVar is used by runtime but not referenced via relocation by other
1149   // sections. Conservatively make it linker retained.
1150   UsedVars.push_back(NamesVar);
1151 
1152   for (auto *NamePtr : ReferencedNames)
1153     NamePtr->eraseFromParent();
1154 }
1155 
1156 void InstrProfiling::emitRegistration() {
1157   if (!needsRuntimeRegistrationOfSectionRange(TT))
1158     return;
1159 
1160   // Construct the function.
1161   auto *VoidTy = Type::getVoidTy(M->getContext());
1162   auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext());
1163   auto *Int64Ty = Type::getInt64Ty(M->getContext());
1164   auto *RegisterFTy = FunctionType::get(VoidTy, false);
1165   auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage,
1166                                      getInstrProfRegFuncsName(), M);
1167   RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1168   if (Options.NoRedZone)
1169     RegisterF->addFnAttr(Attribute::NoRedZone);
1170 
1171   auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false);
1172   auto *RuntimeRegisterF =
1173       Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage,
1174                        getInstrProfRegFuncName(), M);
1175 
1176   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF));
1177   for (Value *Data : CompilerUsedVars)
1178     if (!isa<Function>(Data))
1179       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1180   for (Value *Data : UsedVars)
1181     if (Data != NamesVar && !isa<Function>(Data))
1182       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1183 
1184   if (NamesVar) {
1185     Type *ParamTypes[] = {VoidPtrTy, Int64Ty};
1186     auto *NamesRegisterTy =
1187         FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false);
1188     auto *NamesRegisterF =
1189         Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage,
1190                          getInstrProfNamesRegFuncName(), M);
1191     IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy),
1192                                     IRB.getInt64(NamesSize)});
1193   }
1194 
1195   IRB.CreateRetVoid();
1196 }
1197 
1198 bool InstrProfiling::emitRuntimeHook() {
1199   // We expect the linker to be invoked with -u<hook_var> flag for Linux
1200   // in which case there is no need to emit the external variable.
1201   if (TT.isOSLinux())
1202     return false;
1203 
1204   // If the module's provided its own runtime, we don't need to do anything.
1205   if (M->getGlobalVariable(getInstrProfRuntimeHookVarName()))
1206     return false;
1207 
1208   // Declare an external variable that will pull in the runtime initialization.
1209   auto *Int32Ty = Type::getInt32Ty(M->getContext());
1210   auto *Var =
1211       new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage,
1212                          nullptr, getInstrProfRuntimeHookVarName());
1213 
1214   if (TT.isOSBinFormatELF()) {
1215     // Mark the user variable as used so that it isn't stripped out.
1216     CompilerUsedVars.push_back(Var);
1217   } else {
1218     // Make a function that uses it.
1219     auto *User = Function::Create(FunctionType::get(Int32Ty, false),
1220                                   GlobalValue::LinkOnceODRLinkage,
1221                                   getInstrProfRuntimeHookVarUseFuncName(), M);
1222     User->addFnAttr(Attribute::NoInline);
1223     if (Options.NoRedZone)
1224       User->addFnAttr(Attribute::NoRedZone);
1225     User->setVisibility(GlobalValue::HiddenVisibility);
1226     if (TT.supportsCOMDAT())
1227       User->setComdat(M->getOrInsertComdat(User->getName()));
1228 
1229     IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User));
1230     auto *Load = IRB.CreateLoad(Int32Ty, Var);
1231     IRB.CreateRet(Load);
1232 
1233     // Mark the function as used so that it isn't stripped out.
1234     CompilerUsedVars.push_back(User);
1235   }
1236   return true;
1237 }
1238 
1239 void InstrProfiling::emitUses() {
1240   // The metadata sections are parallel arrays. Optimizers (e.g.
1241   // GlobalOpt/ConstantMerge) may not discard associated sections as a unit, so
1242   // we conservatively retain all unconditionally in the compiler.
1243   //
1244   // On ELF and Mach-O, the linker can guarantee the associated sections will be
1245   // retained or discarded as a unit, so llvm.compiler.used is sufficient.
1246   // Similarly on COFF, if prof data is not referenced by code we use one comdat
1247   // and ensure this GC property as well. Otherwise, we have to conservatively
1248   // make all of the sections retained by the linker.
1249   if (TT.isOSBinFormatELF() || TT.isOSBinFormatMachO() ||
1250       (TT.isOSBinFormatCOFF() && !profDataReferencedByCode(*M)))
1251     appendToCompilerUsed(*M, CompilerUsedVars);
1252   else
1253     appendToUsed(*M, CompilerUsedVars);
1254 
1255   // We do not add proper references from used metadata sections to NamesVar and
1256   // VNodesVar, so we have to be conservative and place them in llvm.used
1257   // regardless of the target,
1258   appendToUsed(*M, UsedVars);
1259 }
1260 
1261 void InstrProfiling::emitInitialization() {
1262   // Create ProfileFileName variable. Don't don't this for the
1263   // context-sensitive instrumentation lowering: This lowering is after
1264   // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should
1265   // have already create the variable before LTO/ThinLTO linking.
1266   if (!IsCS)
1267     createProfileFileNameVar(*M, Options.InstrProfileOutput);
1268   Function *RegisterF = M->getFunction(getInstrProfRegFuncsName());
1269   if (!RegisterF)
1270     return;
1271 
1272   // Create the initialization function.
1273   auto *VoidTy = Type::getVoidTy(M->getContext());
1274   auto *F = Function::Create(FunctionType::get(VoidTy, false),
1275                              GlobalValue::InternalLinkage,
1276                              getInstrProfInitFuncName(), M);
1277   F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1278   F->addFnAttr(Attribute::NoInline);
1279   if (Options.NoRedZone)
1280     F->addFnAttr(Attribute::NoRedZone);
1281 
1282   // Add the basic block and the necessary calls.
1283   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F));
1284   IRB.CreateCall(RegisterF, {});
1285   IRB.CreateRetVoid();
1286 
1287   appendToGlobalCtors(*M, F, 0);
1288 }
1289