1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling.
10 // It also builds the data structures and initialization code needed for
11 // updating execution counts and emitting the profile at runtime.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h"
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/SmallVector.h"
18 #include "llvm/ADT/StringRef.h"
19 #include "llvm/ADT/Triple.h"
20 #include "llvm/ADT/Twine.h"
21 #include "llvm/Analysis/BlockFrequencyInfo.h"
22 #include "llvm/Analysis/BranchProbabilityInfo.h"
23 #include "llvm/Analysis/LoopInfo.h"
24 #include "llvm/Analysis/TargetLibraryInfo.h"
25 #include "llvm/IR/Attributes.h"
26 #include "llvm/IR/BasicBlock.h"
27 #include "llvm/IR/Constant.h"
28 #include "llvm/IR/Constants.h"
29 #include "llvm/IR/DIBuilder.h"
30 #include "llvm/IR/DerivedTypes.h"
31 #include "llvm/IR/DiagnosticInfo.h"
32 #include "llvm/IR/Dominators.h"
33 #include "llvm/IR/Function.h"
34 #include "llvm/IR/GlobalValue.h"
35 #include "llvm/IR/GlobalVariable.h"
36 #include "llvm/IR/IRBuilder.h"
37 #include "llvm/IR/Instruction.h"
38 #include "llvm/IR/Instructions.h"
39 #include "llvm/IR/IntrinsicInst.h"
40 #include "llvm/IR/Module.h"
41 #include "llvm/IR/Type.h"
42 #include "llvm/InitializePasses.h"
43 #include "llvm/Pass.h"
44 #include "llvm/ProfileData/InstrProf.h"
45 #include "llvm/ProfileData/InstrProfCorrelator.h"
46 #include "llvm/Support/Casting.h"
47 #include "llvm/Support/CommandLine.h"
48 #include "llvm/Support/Error.h"
49 #include "llvm/Support/ErrorHandling.h"
50 #include "llvm/Transforms/Utils/ModuleUtils.h"
51 #include "llvm/Transforms/Utils/SSAUpdater.h"
52 #include <algorithm>
53 #include <cassert>
54 #include <cstdint>
55 #include <string>
56 
57 using namespace llvm;
58 
59 #define DEBUG_TYPE "instrprof"
60 
61 namespace llvm {
62 cl::opt<bool>
63     DebugInfoCorrelate("debug-info-correlate", cl::ZeroOrMore,
64                        cl::desc("Use debug info to correlate profiles."),
65                        cl::init(false));
66 } // namespace llvm
67 
68 namespace {
69 
70 cl::opt<bool> DoHashBasedCounterSplit(
71     "hash-based-counter-split",
72     cl::desc("Rename counter variable of a comdat function based on cfg hash"),
73     cl::init(true));
74 
75 cl::opt<bool>
76     RuntimeCounterRelocation("runtime-counter-relocation",
77                              cl::desc("Enable relocating counters at runtime."),
78                              cl::init(false));
79 
80 cl::opt<bool> ValueProfileStaticAlloc(
81     "vp-static-alloc",
82     cl::desc("Do static counter allocation for value profiler"),
83     cl::init(true));
84 
85 cl::opt<double> NumCountersPerValueSite(
86     "vp-counters-per-site",
87     cl::desc("The average number of profile counters allocated "
88              "per value profiling site."),
89     // This is set to a very small value because in real programs, only
90     // a very small percentage of value sites have non-zero targets, e.g, 1/30.
91     // For those sites with non-zero profile, the average number of targets
92     // is usually smaller than 2.
93     cl::init(1.0));
94 
95 cl::opt<bool> AtomicCounterUpdateAll(
96     "instrprof-atomic-counter-update-all", cl::ZeroOrMore,
97     cl::desc("Make all profile counter updates atomic (for testing only)"),
98     cl::init(false));
99 
100 cl::opt<bool> AtomicCounterUpdatePromoted(
101     "atomic-counter-update-promoted", cl::ZeroOrMore,
102     cl::desc("Do counter update using atomic fetch add "
103              " for promoted counters only"),
104     cl::init(false));
105 
106 cl::opt<bool> AtomicFirstCounter(
107     "atomic-first-counter", cl::ZeroOrMore,
108     cl::desc("Use atomic fetch add for first counter in a function (usually "
109              "the entry counter)"),
110     cl::init(false));
111 
112 // If the option is not specified, the default behavior about whether
113 // counter promotion is done depends on how instrumentaiton lowering
114 // pipeline is setup, i.e., the default value of true of this option
115 // does not mean the promotion will be done by default. Explicitly
116 // setting this option can override the default behavior.
117 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore,
118                                  cl::desc("Do counter register promotion"),
119                                  cl::init(false));
120 cl::opt<unsigned> MaxNumOfPromotionsPerLoop(
121     cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20),
122     cl::desc("Max number counter promotions per loop to avoid"
123              " increasing register pressure too much"));
124 
125 // A debug option
126 cl::opt<int>
127     MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1),
128                        cl::desc("Max number of allowed counter promotions"));
129 
130 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting(
131     cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3),
132     cl::desc("The max number of exiting blocks of a loop to allow "
133              " speculative counter promotion"));
134 
135 cl::opt<bool> SpeculativeCounterPromotionToLoop(
136     cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false),
137     cl::desc("When the option is false, if the target block is in a loop, "
138              "the promotion will be disallowed unless the promoted counter "
139              " update can be further/iteratively promoted into an acyclic "
140              " region."));
141 
142 cl::opt<bool> IterativeCounterPromotion(
143     cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true),
144     cl::desc("Allow counter promotion across the whole loop nest."));
145 
146 cl::opt<bool> SkipRetExitBlock(
147     cl::ZeroOrMore, "skip-ret-exit-block", cl::init(true),
148     cl::desc("Suppress counter promotion if exit blocks contain ret."));
149 
150 class InstrProfilingLegacyPass : public ModulePass {
151   InstrProfiling InstrProf;
152 
153 public:
154   static char ID;
155 
156   InstrProfilingLegacyPass() : ModulePass(ID) {}
157   InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false)
158       : ModulePass(ID), InstrProf(Options, IsCS) {
159     initializeInstrProfilingLegacyPassPass(*PassRegistry::getPassRegistry());
160   }
161 
162   StringRef getPassName() const override {
163     return "Frontend instrumentation-based coverage lowering";
164   }
165 
166   bool runOnModule(Module &M) override {
167     auto GetTLI = [this](Function &F) -> TargetLibraryInfo & {
168       return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
169     };
170     return InstrProf.run(M, GetTLI);
171   }
172 
173   void getAnalysisUsage(AnalysisUsage &AU) const override {
174     AU.setPreservesCFG();
175     AU.addRequired<TargetLibraryInfoWrapperPass>();
176   }
177 };
178 
179 ///
180 /// A helper class to promote one counter RMW operation in the loop
181 /// into register update.
182 ///
183 /// RWM update for the counter will be sinked out of the loop after
184 /// the transformation.
185 ///
186 class PGOCounterPromoterHelper : public LoadAndStorePromoter {
187 public:
188   PGOCounterPromoterHelper(
189       Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init,
190       BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks,
191       ArrayRef<Instruction *> InsertPts,
192       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
193       LoopInfo &LI)
194       : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks),
195         InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) {
196     assert(isa<LoadInst>(L));
197     assert(isa<StoreInst>(S));
198     SSA.AddAvailableValue(PH, Init);
199   }
200 
201   void doExtraRewritesBeforeFinalDeletion() override {
202     for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) {
203       BasicBlock *ExitBlock = ExitBlocks[i];
204       Instruction *InsertPos = InsertPts[i];
205       // Get LiveIn value into the ExitBlock. If there are multiple
206       // predecessors, the value is defined by a PHI node in this
207       // block.
208       Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock);
209       Value *Addr = cast<StoreInst>(Store)->getPointerOperand();
210       Type *Ty = LiveInValue->getType();
211       IRBuilder<> Builder(InsertPos);
212       if (AtomicCounterUpdatePromoted)
213         // automic update currently can only be promoted across the current
214         // loop, not the whole loop nest.
215         Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue,
216                                 MaybeAlign(),
217                                 AtomicOrdering::SequentiallyConsistent);
218       else {
219         LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted");
220         auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue);
221         auto *NewStore = Builder.CreateStore(NewVal, Addr);
222 
223         // Now update the parent loop's candidate list:
224         if (IterativeCounterPromotion) {
225           auto *TargetLoop = LI.getLoopFor(ExitBlock);
226           if (TargetLoop)
227             LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore);
228         }
229       }
230     }
231   }
232 
233 private:
234   Instruction *Store;
235   ArrayRef<BasicBlock *> ExitBlocks;
236   ArrayRef<Instruction *> InsertPts;
237   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
238   LoopInfo &LI;
239 };
240 
241 /// A helper class to do register promotion for all profile counter
242 /// updates in a loop.
243 ///
244 class PGOCounterPromoter {
245 public:
246   PGOCounterPromoter(
247       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
248       Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI)
249       : LoopToCandidates(LoopToCands), L(CurLoop), LI(LI), BFI(BFI) {
250 
251     // Skip collection of ExitBlocks and InsertPts for loops that will not be
252     // able to have counters promoted.
253     SmallVector<BasicBlock *, 8> LoopExitBlocks;
254     SmallPtrSet<BasicBlock *, 8> BlockSet;
255 
256     L.getExitBlocks(LoopExitBlocks);
257     if (!isPromotionPossible(&L, LoopExitBlocks))
258       return;
259 
260     for (BasicBlock *ExitBlock : LoopExitBlocks) {
261       if (BlockSet.insert(ExitBlock).second) {
262         ExitBlocks.push_back(ExitBlock);
263         InsertPts.push_back(&*ExitBlock->getFirstInsertionPt());
264       }
265     }
266   }
267 
268   bool run(int64_t *NumPromoted) {
269     // Skip 'infinite' loops:
270     if (ExitBlocks.size() == 0)
271       return false;
272 
273     // Skip if any of the ExitBlocks contains a ret instruction.
274     // This is to prevent dumping of incomplete profile -- if the
275     // the loop is a long running loop and dump is called in the middle
276     // of the loop, the result profile is incomplete.
277     // FIXME: add other heuristics to detect long running loops.
278     if (SkipRetExitBlock) {
279       for (auto BB : ExitBlocks)
280         if (isa<ReturnInst>(BB->getTerminator()))
281           return false;
282     }
283 
284     unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L);
285     if (MaxProm == 0)
286       return false;
287 
288     unsigned Promoted = 0;
289     for (auto &Cand : LoopToCandidates[&L]) {
290 
291       SmallVector<PHINode *, 4> NewPHIs;
292       SSAUpdater SSA(&NewPHIs);
293       Value *InitVal = ConstantInt::get(Cand.first->getType(), 0);
294 
295       // If BFI is set, we will use it to guide the promotions.
296       if (BFI) {
297         auto *BB = Cand.first->getParent();
298         auto InstrCount = BFI->getBlockProfileCount(BB);
299         if (!InstrCount)
300           continue;
301         auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader());
302         // If the average loop trip count is not greater than 1.5, we skip
303         // promotion.
304         if (PreheaderCount &&
305             (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2))
306           continue;
307       }
308 
309       PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal,
310                                         L.getLoopPreheader(), ExitBlocks,
311                                         InsertPts, LoopToCandidates, LI);
312       Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second}));
313       Promoted++;
314       if (Promoted >= MaxProm)
315         break;
316 
317       (*NumPromoted)++;
318       if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions)
319         break;
320     }
321 
322     LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth="
323                       << L.getLoopDepth() << ")\n");
324     return Promoted != 0;
325   }
326 
327 private:
328   bool allowSpeculativeCounterPromotion(Loop *LP) {
329     SmallVector<BasicBlock *, 8> ExitingBlocks;
330     L.getExitingBlocks(ExitingBlocks);
331     // Not considierered speculative.
332     if (ExitingBlocks.size() == 1)
333       return true;
334     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
335       return false;
336     return true;
337   }
338 
339   // Check whether the loop satisfies the basic conditions needed to perform
340   // Counter Promotions.
341   bool
342   isPromotionPossible(Loop *LP,
343                       const SmallVectorImpl<BasicBlock *> &LoopExitBlocks) {
344     // We can't insert into a catchswitch.
345     if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) {
346           return isa<CatchSwitchInst>(Exit->getTerminator());
347         }))
348       return false;
349 
350     if (!LP->hasDedicatedExits())
351       return false;
352 
353     BasicBlock *PH = LP->getLoopPreheader();
354     if (!PH)
355       return false;
356 
357     return true;
358   }
359 
360   // Returns the max number of Counter Promotions for LP.
361   unsigned getMaxNumOfPromotionsInLoop(Loop *LP) {
362     SmallVector<BasicBlock *, 8> LoopExitBlocks;
363     LP->getExitBlocks(LoopExitBlocks);
364     if (!isPromotionPossible(LP, LoopExitBlocks))
365       return 0;
366 
367     SmallVector<BasicBlock *, 8> ExitingBlocks;
368     LP->getExitingBlocks(ExitingBlocks);
369 
370     // If BFI is set, we do more aggressive promotions based on BFI.
371     if (BFI)
372       return (unsigned)-1;
373 
374     // Not considierered speculative.
375     if (ExitingBlocks.size() == 1)
376       return MaxNumOfPromotionsPerLoop;
377 
378     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
379       return 0;
380 
381     // Whether the target block is in a loop does not matter:
382     if (SpeculativeCounterPromotionToLoop)
383       return MaxNumOfPromotionsPerLoop;
384 
385     // Now check the target block:
386     unsigned MaxProm = MaxNumOfPromotionsPerLoop;
387     for (auto *TargetBlock : LoopExitBlocks) {
388       auto *TargetLoop = LI.getLoopFor(TargetBlock);
389       if (!TargetLoop)
390         continue;
391       unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop);
392       unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size();
393       MaxProm =
394           std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) -
395                                 PendingCandsInTarget);
396     }
397     return MaxProm;
398   }
399 
400   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
401   SmallVector<BasicBlock *, 8> ExitBlocks;
402   SmallVector<Instruction *, 8> InsertPts;
403   Loop &L;
404   LoopInfo &LI;
405   BlockFrequencyInfo *BFI;
406 };
407 
408 enum class ValueProfilingCallType {
409   // Individual values are tracked. Currently used for indiret call target
410   // profiling.
411   Default,
412 
413   // MemOp: the memop size value profiling.
414   MemOp
415 };
416 
417 } // end anonymous namespace
418 
419 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) {
420   FunctionAnalysisManager &FAM =
421       AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
422   auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
423     return FAM.getResult<TargetLibraryAnalysis>(F);
424   };
425   if (!run(M, GetTLI))
426     return PreservedAnalyses::all();
427 
428   return PreservedAnalyses::none();
429 }
430 
431 char InstrProfilingLegacyPass::ID = 0;
432 INITIALIZE_PASS_BEGIN(InstrProfilingLegacyPass, "instrprof",
433                       "Frontend instrumentation-based coverage lowering.",
434                       false, false)
435 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
436 INITIALIZE_PASS_END(InstrProfilingLegacyPass, "instrprof",
437                     "Frontend instrumentation-based coverage lowering.", false,
438                     false)
439 
440 ModulePass *
441 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options,
442                                      bool IsCS) {
443   return new InstrProfilingLegacyPass(Options, IsCS);
444 }
445 
446 bool InstrProfiling::lowerIntrinsics(Function *F) {
447   bool MadeChange = false;
448   PromotionCandidates.clear();
449   for (BasicBlock &BB : *F) {
450     for (Instruction &Instr : llvm::make_early_inc_range(BB)) {
451       if (auto *IPIS = dyn_cast<InstrProfIncrementInstStep>(&Instr)) {
452         lowerIncrement(IPIS);
453         MadeChange = true;
454       } else if (auto *IPI = dyn_cast<InstrProfIncrementInst>(&Instr)) {
455         lowerIncrement(IPI);
456         MadeChange = true;
457       } else if (auto *IPC = dyn_cast<InstrProfCoverInst>(&Instr)) {
458         lowerCover(IPC);
459         MadeChange = true;
460       } else if (auto *IPVP = dyn_cast<InstrProfValueProfileInst>(&Instr)) {
461         lowerValueProfileInst(IPVP);
462         MadeChange = true;
463       }
464     }
465   }
466 
467   if (!MadeChange)
468     return false;
469 
470   promoteCounterLoadStores(F);
471   return true;
472 }
473 
474 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const {
475   // Mach-O don't support weak external references.
476   if (TT.isOSBinFormatMachO())
477     return false;
478 
479   if (RuntimeCounterRelocation.getNumOccurrences() > 0)
480     return RuntimeCounterRelocation;
481 
482   // Fuchsia uses runtime counter relocation by default.
483   return TT.isOSFuchsia();
484 }
485 
486 bool InstrProfiling::isCounterPromotionEnabled() const {
487   if (DoCounterPromotion.getNumOccurrences() > 0)
488     return DoCounterPromotion;
489 
490   return Options.DoCounterPromotion;
491 }
492 
493 void InstrProfiling::promoteCounterLoadStores(Function *F) {
494   if (!isCounterPromotionEnabled())
495     return;
496 
497   DominatorTree DT(*F);
498   LoopInfo LI(DT);
499   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates;
500 
501   std::unique_ptr<BlockFrequencyInfo> BFI;
502   if (Options.UseBFIInPromotion) {
503     std::unique_ptr<BranchProbabilityInfo> BPI;
504     BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F)));
505     BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI));
506   }
507 
508   for (const auto &LoadStore : PromotionCandidates) {
509     auto *CounterLoad = LoadStore.first;
510     auto *CounterStore = LoadStore.second;
511     BasicBlock *BB = CounterLoad->getParent();
512     Loop *ParentLoop = LI.getLoopFor(BB);
513     if (!ParentLoop)
514       continue;
515     LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore);
516   }
517 
518   SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder();
519 
520   // Do a post-order traversal of the loops so that counter updates can be
521   // iteratively hoisted outside the loop nest.
522   for (auto *Loop : llvm::reverse(Loops)) {
523     PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get());
524     Promoter.run(&TotalCountersPromoted);
525   }
526 }
527 
528 static bool needsRuntimeHookUnconditionally(const Triple &TT) {
529   // On Fuchsia, we only need runtime hook if any counters are present.
530   if (TT.isOSFuchsia())
531     return false;
532 
533   return true;
534 }
535 
536 /// Check if the module contains uses of any profiling intrinsics.
537 static bool containsProfilingIntrinsics(Module &M) {
538   auto containsIntrinsic = [&](int ID) {
539     if (auto *F = M.getFunction(Intrinsic::getName(ID)))
540       return !F->use_empty();
541     return false;
542   };
543   return containsIntrinsic(llvm::Intrinsic::instrprof_cover) ||
544          containsIntrinsic(llvm::Intrinsic::instrprof_increment) ||
545          containsIntrinsic(llvm::Intrinsic::instrprof_increment_step) ||
546          containsIntrinsic(llvm::Intrinsic::instrprof_value_profile);
547 }
548 
549 bool InstrProfiling::run(
550     Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) {
551   this->M = &M;
552   this->GetTLI = std::move(GetTLI);
553   NamesVar = nullptr;
554   NamesSize = 0;
555   ProfileDataMap.clear();
556   CompilerUsedVars.clear();
557   UsedVars.clear();
558   TT = Triple(M.getTargetTriple());
559 
560   bool MadeChange = false;
561 
562   // Emit the runtime hook even if no counters are present.
563   if (needsRuntimeHookUnconditionally(TT))
564     MadeChange = emitRuntimeHook();
565 
566   // Improve compile time by avoiding linear scans when there is no work.
567   GlobalVariable *CoverageNamesVar =
568       M.getNamedGlobal(getCoverageUnusedNamesVarName());
569   if (!containsProfilingIntrinsics(M) && !CoverageNamesVar)
570     return MadeChange;
571 
572   // We did not know how many value sites there would be inside
573   // the instrumented function. This is counting the number of instrumented
574   // target value sites to enter it as field in the profile data variable.
575   for (Function &F : M) {
576     InstrProfIncrementInst *FirstProfIncInst = nullptr;
577     for (BasicBlock &BB : F)
578       for (auto I = BB.begin(), E = BB.end(); I != E; I++)
579         if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I))
580           computeNumValueSiteCounts(Ind);
581         else if (FirstProfIncInst == nullptr)
582           FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I);
583 
584     // Value profiling intrinsic lowering requires per-function profile data
585     // variable to be created first.
586     if (FirstProfIncInst != nullptr)
587       static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst));
588   }
589 
590   for (Function &F : M)
591     MadeChange |= lowerIntrinsics(&F);
592 
593   if (CoverageNamesVar) {
594     lowerCoverageData(CoverageNamesVar);
595     MadeChange = true;
596   }
597 
598   if (!MadeChange)
599     return false;
600 
601   emitVNodes();
602   emitNameData();
603   emitRuntimeHook();
604   emitRegistration();
605   emitUses();
606   emitInitialization();
607   return true;
608 }
609 
610 static FunctionCallee getOrInsertValueProfilingCall(
611     Module &M, const TargetLibraryInfo &TLI,
612     ValueProfilingCallType CallType = ValueProfilingCallType::Default) {
613   LLVMContext &Ctx = M.getContext();
614   auto *ReturnTy = Type::getVoidTy(M.getContext());
615 
616   AttributeList AL;
617   if (auto AK = TLI.getExtAttrForI32Param(false))
618     AL = AL.addParamAttribute(M.getContext(), 2, AK);
619 
620   assert((CallType == ValueProfilingCallType::Default ||
621           CallType == ValueProfilingCallType::MemOp) &&
622          "Must be Default or MemOp");
623   Type *ParamTypes[] = {
624 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType
625 #include "llvm/ProfileData/InstrProfData.inc"
626   };
627   auto *ValueProfilingCallTy =
628       FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false);
629   StringRef FuncName = CallType == ValueProfilingCallType::Default
630                            ? getInstrProfValueProfFuncName()
631                            : getInstrProfValueProfMemOpFuncName();
632   return M.getOrInsertFunction(FuncName, ValueProfilingCallTy, AL);
633 }
634 
635 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) {
636   GlobalVariable *Name = Ind->getName();
637   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
638   uint64_t Index = Ind->getIndex()->getZExtValue();
639   auto &PD = ProfileDataMap[Name];
640   PD.NumValueSites[ValueKind] =
641       std::max(PD.NumValueSites[ValueKind], (uint32_t)(Index + 1));
642 }
643 
644 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) {
645   // TODO: Value profiling heavily depends on the data section which is omitted
646   // in lightweight mode. We need to move the value profile pointer to the
647   // Counter struct to get this working.
648   assert(
649       !DebugInfoCorrelate &&
650       "Value profiling is not yet supported with lightweight instrumentation");
651   GlobalVariable *Name = Ind->getName();
652   auto It = ProfileDataMap.find(Name);
653   assert(It != ProfileDataMap.end() && It->second.DataVar &&
654          "value profiling detected in function with no counter incerement");
655 
656   GlobalVariable *DataVar = It->second.DataVar;
657   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
658   uint64_t Index = Ind->getIndex()->getZExtValue();
659   for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind)
660     Index += It->second.NumValueSites[Kind];
661 
662   IRBuilder<> Builder(Ind);
663   bool IsMemOpSize = (Ind->getValueKind()->getZExtValue() ==
664                       llvm::InstrProfValueKind::IPVK_MemOPSize);
665   CallInst *Call = nullptr;
666   auto *TLI = &GetTLI(*Ind->getFunction());
667 
668   // To support value profiling calls within Windows exception handlers, funclet
669   // information contained within operand bundles needs to be copied over to
670   // the library call. This is required for the IR to be processed by the
671   // WinEHPrepare pass.
672   SmallVector<OperandBundleDef, 1> OpBundles;
673   Ind->getOperandBundlesAsDefs(OpBundles);
674   if (!IsMemOpSize) {
675     Value *Args[3] = {Ind->getTargetValue(),
676                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
677                       Builder.getInt32(Index)};
678     Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args,
679                               OpBundles);
680   } else {
681     Value *Args[3] = {Ind->getTargetValue(),
682                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
683                       Builder.getInt32(Index)};
684     Call = Builder.CreateCall(
685         getOrInsertValueProfilingCall(*M, *TLI, ValueProfilingCallType::MemOp),
686         Args, OpBundles);
687   }
688   if (auto AK = TLI->getExtAttrForI32Param(false))
689     Call->addParamAttr(2, AK);
690   Ind->replaceAllUsesWith(Call);
691   Ind->eraseFromParent();
692 }
693 
694 Value *InstrProfiling::getCounterAddress(InstrProfInstBase *I) {
695   auto *Counters = getOrCreateRegionCounters(I);
696   IRBuilder<> Builder(I);
697 
698   auto *Addr = Builder.CreateConstInBoundsGEP2_32(
699       Counters->getValueType(), Counters, 0, I->getIndex()->getZExtValue());
700 
701   if (!isRuntimeCounterRelocationEnabled())
702     return Addr;
703 
704   Type *Int64Ty = Type::getInt64Ty(M->getContext());
705   Function *Fn = I->getParent()->getParent();
706   LoadInst *&BiasLI = FunctionToProfileBiasMap[Fn];
707   if (!BiasLI) {
708     IRBuilder<> EntryBuilder(&Fn->getEntryBlock().front());
709     auto *Bias = M->getGlobalVariable(getInstrProfCounterBiasVarName());
710     if (!Bias) {
711       // Compiler must define this variable when runtime counter relocation
712       // is being used. Runtime has a weak external reference that is used
713       // to check whether that's the case or not.
714       Bias = new GlobalVariable(
715           *M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage,
716           Constant::getNullValue(Int64Ty), getInstrProfCounterBiasVarName());
717       Bias->setVisibility(GlobalVariable::HiddenVisibility);
718       // A definition that's weak (linkonce_odr) without being in a COMDAT
719       // section wouldn't lead to link errors, but it would lead to a dead
720       // data word from every TU but one. Putting it in COMDAT ensures there
721       // will be exactly one data slot in the link.
722       if (TT.supportsCOMDAT())
723         Bias->setComdat(M->getOrInsertComdat(Bias->getName()));
724     }
725     BiasLI = EntryBuilder.CreateLoad(Int64Ty, Bias);
726   }
727   auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), BiasLI);
728   return Builder.CreateIntToPtr(Add, Addr->getType());
729 }
730 
731 void InstrProfiling::lowerCover(InstrProfCoverInst *CoverInstruction) {
732   auto *Addr = getCounterAddress(CoverInstruction);
733   IRBuilder<> Builder(CoverInstruction);
734   // We store zero to represent that this block is covered.
735   Builder.CreateStore(Builder.getInt8(0), Addr);
736   CoverInstruction->eraseFromParent();
737 }
738 
739 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) {
740   auto *Addr = getCounterAddress(Inc);
741 
742   IRBuilder<> Builder(Inc);
743   if (Options.Atomic || AtomicCounterUpdateAll ||
744       (Inc->getIndex()->isZeroValue() && AtomicFirstCounter)) {
745     Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(),
746                             MaybeAlign(), AtomicOrdering::Monotonic);
747   } else {
748     Value *IncStep = Inc->getStep();
749     Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount");
750     auto *Count = Builder.CreateAdd(Load, Inc->getStep());
751     auto *Store = Builder.CreateStore(Count, Addr);
752     if (isCounterPromotionEnabled())
753       PromotionCandidates.emplace_back(cast<Instruction>(Load), Store);
754   }
755   Inc->eraseFromParent();
756 }
757 
758 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) {
759   ConstantArray *Names =
760       cast<ConstantArray>(CoverageNamesVar->getInitializer());
761   for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) {
762     Constant *NC = Names->getOperand(I);
763     Value *V = NC->stripPointerCasts();
764     assert(isa<GlobalVariable>(V) && "Missing reference to function name");
765     GlobalVariable *Name = cast<GlobalVariable>(V);
766 
767     Name->setLinkage(GlobalValue::PrivateLinkage);
768     ReferencedNames.push_back(Name);
769     if (isa<ConstantExpr>(NC))
770       NC->dropAllReferences();
771   }
772   CoverageNamesVar->eraseFromParent();
773 }
774 
775 /// Get the name of a profiling variable for a particular function.
776 static std::string getVarName(InstrProfInstBase *Inc, StringRef Prefix,
777                               bool &Renamed) {
778   StringRef NamePrefix = getInstrProfNameVarPrefix();
779   StringRef Name = Inc->getName()->getName().substr(NamePrefix.size());
780   Function *F = Inc->getParent()->getParent();
781   Module *M = F->getParent();
782   if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) ||
783       !canRenameComdatFunc(*F)) {
784     Renamed = false;
785     return (Prefix + Name).str();
786   }
787   Renamed = true;
788   uint64_t FuncHash = Inc->getHash()->getZExtValue();
789   SmallVector<char, 24> HashPostfix;
790   if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix)))
791     return (Prefix + Name).str();
792   return (Prefix + Name + "." + Twine(FuncHash)).str();
793 }
794 
795 static uint64_t getIntModuleFlagOrZero(const Module &M, StringRef Flag) {
796   auto *MD = dyn_cast_or_null<ConstantAsMetadata>(M.getModuleFlag(Flag));
797   if (!MD)
798     return 0;
799 
800   // If the flag is a ConstantAsMetadata, it should be an integer representable
801   // in 64-bits.
802   return cast<ConstantInt>(MD->getValue())->getZExtValue();
803 }
804 
805 static bool enablesValueProfiling(const Module &M) {
806   return isIRPGOFlagSet(&M) ||
807          getIntModuleFlagOrZero(M, "EnableValueProfiling") != 0;
808 }
809 
810 // Conservatively returns true if data variables may be referenced by code.
811 static bool profDataReferencedByCode(const Module &M) {
812   return enablesValueProfiling(M);
813 }
814 
815 static inline bool shouldRecordFunctionAddr(Function *F) {
816   // Only record function addresses if IR PGO is enabled or if clang value
817   // profiling is enabled. Recording function addresses greatly increases object
818   // file size, because it prevents the inliner from deleting functions that
819   // have been inlined everywhere.
820   if (!profDataReferencedByCode(*F->getParent()))
821     return false;
822 
823   // Check the linkage
824   bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage();
825   if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() &&
826       !HasAvailableExternallyLinkage)
827     return true;
828 
829   // A function marked 'alwaysinline' with available_externally linkage can't
830   // have its address taken. Doing so would create an undefined external ref to
831   // the function, which would fail to link.
832   if (HasAvailableExternallyLinkage &&
833       F->hasFnAttribute(Attribute::AlwaysInline))
834     return false;
835 
836   // Prohibit function address recording if the function is both internal and
837   // COMDAT. This avoids the profile data variable referencing internal symbols
838   // in COMDAT.
839   if (F->hasLocalLinkage() && F->hasComdat())
840     return false;
841 
842   // Check uses of this function for other than direct calls or invokes to it.
843   // Inline virtual functions have linkeOnceODR linkage. When a key method
844   // exists, the vtable will only be emitted in the TU where the key method
845   // is defined. In a TU where vtable is not available, the function won't
846   // be 'addresstaken'. If its address is not recorded here, the profile data
847   // with missing address may be picked by the linker leading  to missing
848   // indirect call target info.
849   return F->hasAddressTaken() || F->hasLinkOnceLinkage();
850 }
851 
852 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) {
853   // Don't do this for Darwin.  compiler-rt uses linker magic.
854   if (TT.isOSDarwin())
855     return false;
856   // Use linker script magic to get data/cnts/name start/end.
857   if (TT.isOSAIX() || TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() ||
858       TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4() || TT.isOSWindows())
859     return false;
860 
861   return true;
862 }
863 
864 GlobalVariable *
865 InstrProfiling::createRegionCounters(InstrProfInstBase *Inc, StringRef Name,
866                                      GlobalValue::LinkageTypes Linkage) {
867   uint64_t NumCounters = Inc->getNumCounters()->getZExtValue();
868   auto &Ctx = M->getContext();
869   GlobalVariable *GV;
870   if (isa<InstrProfCoverInst>(Inc)) {
871     auto *CounterTy = Type::getInt8Ty(Ctx);
872     auto *CounterArrTy = ArrayType::get(CounterTy, NumCounters);
873     // TODO: `Constant::getAllOnesValue()` does not yet accept an array type.
874     std::vector<Constant *> InitialValues(NumCounters,
875                                           Constant::getAllOnesValue(CounterTy));
876     GV = new GlobalVariable(*M, CounterArrTy, false, Linkage,
877                             ConstantArray::get(CounterArrTy, InitialValues),
878                             Name);
879     GV->setAlignment(Align(1));
880   } else {
881     auto *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters);
882     GV = new GlobalVariable(*M, CounterTy, false, Linkage,
883                             Constant::getNullValue(CounterTy), Name);
884     GV->setAlignment(Align(8));
885   }
886   return GV;
887 }
888 
889 GlobalVariable *
890 InstrProfiling::getOrCreateRegionCounters(InstrProfInstBase *Inc) {
891   GlobalVariable *NamePtr = Inc->getName();
892   auto &PD = ProfileDataMap[NamePtr];
893   if (PD.RegionCounters)
894     return PD.RegionCounters;
895 
896   // Match the linkage and visibility of the name global.
897   Function *Fn = Inc->getParent()->getParent();
898   GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage();
899   GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility();
900 
901   // Use internal rather than private linkage so the counter variable shows up
902   // in the symbol table when using debug info for correlation.
903   if (DebugInfoCorrelate && TT.isOSBinFormatMachO() &&
904       Linkage == GlobalValue::PrivateLinkage)
905     Linkage = GlobalValue::InternalLinkage;
906 
907   // Due to the limitation of binder as of 2021/09/28, the duplicate weak
908   // symbols in the same csect won't be discarded. When there are duplicate weak
909   // symbols, we can NOT guarantee that the relocations get resolved to the
910   // intended weak symbol, so we can not ensure the correctness of the relative
911   // CounterPtr, so we have to use private linkage for counter and data symbols.
912   if (TT.isOSBinFormatXCOFF()) {
913     Linkage = GlobalValue::PrivateLinkage;
914     Visibility = GlobalValue::DefaultVisibility;
915   }
916   // Move the name variable to the right section. Place them in a COMDAT group
917   // if the associated function is a COMDAT. This will make sure that only one
918   // copy of counters of the COMDAT function will be emitted after linking. Keep
919   // in mind that this pass may run before the inliner, so we need to create a
920   // new comdat group for the counters and profiling data. If we use the comdat
921   // of the parent function, that will result in relocations against discarded
922   // sections.
923   //
924   // If the data variable is referenced by code,  counters and data have to be
925   // in different comdats for COFF because the Visual C++ linker will report
926   // duplicate symbol errors if there are multiple external symbols with the
927   // same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE.
928   //
929   // For ELF, when not using COMDAT, put counters, data and values into a
930   // nodeduplicate COMDAT which is lowered to a zero-flag section group. This
931   // allows -z start-stop-gc to discard the entire group when the function is
932   // discarded.
933   bool DataReferencedByCode = profDataReferencedByCode(*M);
934   bool NeedComdat = needsComdatForCounter(*Fn, *M);
935   bool Renamed;
936   std::string CntsVarName =
937       getVarName(Inc, getInstrProfCountersVarPrefix(), Renamed);
938   std::string DataVarName =
939       getVarName(Inc, getInstrProfDataVarPrefix(), Renamed);
940   auto MaybeSetComdat = [&](GlobalVariable *GV) {
941     bool UseComdat = (NeedComdat || TT.isOSBinFormatELF());
942     if (UseComdat) {
943       StringRef GroupName = TT.isOSBinFormatCOFF() && DataReferencedByCode
944                                 ? GV->getName()
945                                 : CntsVarName;
946       Comdat *C = M->getOrInsertComdat(GroupName);
947       if (!NeedComdat)
948         C->setSelectionKind(Comdat::NoDeduplicate);
949       GV->setComdat(C);
950     }
951   };
952 
953   uint64_t NumCounters = Inc->getNumCounters()->getZExtValue();
954   LLVMContext &Ctx = M->getContext();
955 
956   auto *CounterPtr = createRegionCounters(Inc, CntsVarName, Linkage);
957   CounterPtr->setVisibility(Visibility);
958   CounterPtr->setSection(
959       getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat()));
960   MaybeSetComdat(CounterPtr);
961   CounterPtr->setLinkage(Linkage);
962   PD.RegionCounters = CounterPtr;
963   if (DebugInfoCorrelate) {
964     if (auto *SP = Fn->getSubprogram()) {
965       DIBuilder DB(*M, true, SP->getUnit());
966       Metadata *FunctionNameAnnotation[] = {
967           MDString::get(Ctx, InstrProfCorrelator::FunctionNameAttributeName),
968           MDString::get(Ctx, getPGOFuncNameVarInitializer(NamePtr)),
969       };
970       Metadata *CFGHashAnnotation[] = {
971           MDString::get(Ctx, InstrProfCorrelator::CFGHashAttributeName),
972           ConstantAsMetadata::get(Inc->getHash()),
973       };
974       Metadata *NumCountersAnnotation[] = {
975           MDString::get(Ctx, InstrProfCorrelator::NumCountersAttributeName),
976           ConstantAsMetadata::get(Inc->getNumCounters()),
977       };
978       auto Annotations = DB.getOrCreateArray({
979           MDNode::get(Ctx, FunctionNameAnnotation),
980           MDNode::get(Ctx, CFGHashAnnotation),
981           MDNode::get(Ctx, NumCountersAnnotation),
982       });
983       auto *DICounter = DB.createGlobalVariableExpression(
984           SP, CounterPtr->getName(), /*LinkageName=*/StringRef(), SP->getFile(),
985           /*LineNo=*/0, DB.createUnspecifiedType("Profile Data Type"),
986           CounterPtr->hasLocalLinkage(), /*IsDefined=*/true, /*Expr=*/nullptr,
987           /*Decl=*/nullptr, /*TemplateParams=*/nullptr, /*AlignInBits=*/0,
988           Annotations);
989       CounterPtr->addDebugInfo(DICounter);
990       DB.finalize();
991     } else {
992       std::string Msg = ("Missing debug info for function " + Fn->getName() +
993                          "; required for profile correlation.")
994                             .str();
995       Ctx.diagnose(
996           DiagnosticInfoPGOProfile(M->getName().data(), Msg, DS_Warning));
997     }
998   }
999 
1000   auto *Int8PtrTy = Type::getInt8PtrTy(Ctx);
1001   // Allocate statically the array of pointers to value profile nodes for
1002   // the current function.
1003   Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy);
1004   uint64_t NS = 0;
1005   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1006     NS += PD.NumValueSites[Kind];
1007   if (NS > 0 && ValueProfileStaticAlloc &&
1008       !needsRuntimeRegistrationOfSectionRange(TT)) {
1009     ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS);
1010     auto *ValuesVar = new GlobalVariable(
1011         *M, ValuesTy, false, Linkage, Constant::getNullValue(ValuesTy),
1012         getVarName(Inc, getInstrProfValuesVarPrefix(), Renamed));
1013     ValuesVar->setVisibility(Visibility);
1014     ValuesVar->setSection(
1015         getInstrProfSectionName(IPSK_vals, TT.getObjectFormat()));
1016     ValuesVar->setAlignment(Align(8));
1017     MaybeSetComdat(ValuesVar);
1018     ValuesPtrExpr =
1019         ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx));
1020   }
1021 
1022   if (DebugInfoCorrelate) {
1023     // Mark the counter variable as used so that it isn't optimized out.
1024     CompilerUsedVars.push_back(PD.RegionCounters);
1025     return PD.RegionCounters;
1026   }
1027 
1028   // Create data variable.
1029   auto *IntPtrTy = M->getDataLayout().getIntPtrType(M->getContext());
1030   auto *Int16Ty = Type::getInt16Ty(Ctx);
1031   auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1);
1032   Type *DataTypes[] = {
1033 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType,
1034 #include "llvm/ProfileData/InstrProfData.inc"
1035   };
1036   auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes));
1037 
1038   Constant *FunctionAddr = shouldRecordFunctionAddr(Fn)
1039                                ? ConstantExpr::getBitCast(Fn, Int8PtrTy)
1040                                : ConstantPointerNull::get(Int8PtrTy);
1041 
1042   Constant *Int16ArrayVals[IPVK_Last + 1];
1043   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1044     Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]);
1045 
1046   // If the data variable is not referenced by code (if we don't emit
1047   // @llvm.instrprof.value.profile, NS will be 0), and the counter keeps the
1048   // data variable live under linker GC, the data variable can be private. This
1049   // optimization applies to ELF.
1050   //
1051   // On COFF, a comdat leader cannot be local so we require DataReferencedByCode
1052   // to be false.
1053   //
1054   // If profd is in a deduplicate comdat, NS==0 with a hash suffix guarantees
1055   // that other copies must have the same CFG and cannot have value profiling.
1056   // If no hash suffix, other profd copies may be referenced by code.
1057   if (NS == 0 && !(DataReferencedByCode && NeedComdat && !Renamed) &&
1058       (TT.isOSBinFormatELF() ||
1059        (!DataReferencedByCode && TT.isOSBinFormatCOFF()))) {
1060     Linkage = GlobalValue::PrivateLinkage;
1061     Visibility = GlobalValue::DefaultVisibility;
1062   }
1063   auto *Data =
1064       new GlobalVariable(*M, DataTy, false, Linkage, nullptr, DataVarName);
1065   // Reference the counter variable with a label difference (link-time
1066   // constant).
1067   auto *RelativeCounterPtr =
1068       ConstantExpr::getSub(ConstantExpr::getPtrToInt(CounterPtr, IntPtrTy),
1069                            ConstantExpr::getPtrToInt(Data, IntPtrTy));
1070 
1071   Constant *DataVals[] = {
1072 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init,
1073 #include "llvm/ProfileData/InstrProfData.inc"
1074   };
1075   Data->setInitializer(ConstantStruct::get(DataTy, DataVals));
1076 
1077   Data->setVisibility(Visibility);
1078   Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat()));
1079   Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT));
1080   MaybeSetComdat(Data);
1081   Data->setLinkage(Linkage);
1082 
1083   PD.DataVar = Data;
1084 
1085   // Mark the data variable as used so that it isn't stripped out.
1086   CompilerUsedVars.push_back(Data);
1087   // Now that the linkage set by the FE has been passed to the data and counter
1088   // variables, reset Name variable's linkage and visibility to private so that
1089   // it can be removed later by the compiler.
1090   NamePtr->setLinkage(GlobalValue::PrivateLinkage);
1091   // Collect the referenced names to be used by emitNameData.
1092   ReferencedNames.push_back(NamePtr);
1093 
1094   return PD.RegionCounters;
1095 }
1096 
1097 void InstrProfiling::emitVNodes() {
1098   if (!ValueProfileStaticAlloc)
1099     return;
1100 
1101   // For now only support this on platforms that do
1102   // not require runtime registration to discover
1103   // named section start/end.
1104   if (needsRuntimeRegistrationOfSectionRange(TT))
1105     return;
1106 
1107   size_t TotalNS = 0;
1108   for (auto &PD : ProfileDataMap) {
1109     for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
1110       TotalNS += PD.second.NumValueSites[Kind];
1111   }
1112 
1113   if (!TotalNS)
1114     return;
1115 
1116   uint64_t NumCounters = TotalNS * NumCountersPerValueSite;
1117 // Heuristic for small programs with very few total value sites.
1118 // The default value of vp-counters-per-site is chosen based on
1119 // the observation that large apps usually have a low percentage
1120 // of value sites that actually have any profile data, and thus
1121 // the average number of counters per site is low. For small
1122 // apps with very few sites, this may not be true. Bump up the
1123 // number of counters in this case.
1124 #define INSTR_PROF_MIN_VAL_COUNTS 10
1125   if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS)
1126     NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2);
1127 
1128   auto &Ctx = M->getContext();
1129   Type *VNodeTypes[] = {
1130 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType,
1131 #include "llvm/ProfileData/InstrProfData.inc"
1132   };
1133   auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes));
1134 
1135   ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters);
1136   auto *VNodesVar = new GlobalVariable(
1137       *M, VNodesTy, false, GlobalValue::PrivateLinkage,
1138       Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName());
1139   VNodesVar->setSection(
1140       getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat()));
1141   // VNodesVar is used by runtime but not referenced via relocation by other
1142   // sections. Conservatively make it linker retained.
1143   UsedVars.push_back(VNodesVar);
1144 }
1145 
1146 void InstrProfiling::emitNameData() {
1147   std::string UncompressedData;
1148 
1149   if (ReferencedNames.empty())
1150     return;
1151 
1152   std::string CompressedNameStr;
1153   if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr,
1154                                           DoInstrProfNameCompression)) {
1155     report_fatal_error(Twine(toString(std::move(E))), false);
1156   }
1157 
1158   auto &Ctx = M->getContext();
1159   auto *NamesVal =
1160       ConstantDataArray::getString(Ctx, StringRef(CompressedNameStr), false);
1161   NamesVar = new GlobalVariable(*M, NamesVal->getType(), true,
1162                                 GlobalValue::PrivateLinkage, NamesVal,
1163                                 getInstrProfNamesVarName());
1164   NamesSize = CompressedNameStr.size();
1165   NamesVar->setSection(
1166       getInstrProfSectionName(IPSK_name, TT.getObjectFormat()));
1167   // On COFF, it's important to reduce the alignment down to 1 to prevent the
1168   // linker from inserting padding before the start of the names section or
1169   // between names entries.
1170   NamesVar->setAlignment(Align(1));
1171   // NamesVar is used by runtime but not referenced via relocation by other
1172   // sections. Conservatively make it linker retained.
1173   UsedVars.push_back(NamesVar);
1174 
1175   for (auto *NamePtr : ReferencedNames)
1176     NamePtr->eraseFromParent();
1177 }
1178 
1179 void InstrProfiling::emitRegistration() {
1180   if (!needsRuntimeRegistrationOfSectionRange(TT))
1181     return;
1182 
1183   // Construct the function.
1184   auto *VoidTy = Type::getVoidTy(M->getContext());
1185   auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext());
1186   auto *Int64Ty = Type::getInt64Ty(M->getContext());
1187   auto *RegisterFTy = FunctionType::get(VoidTy, false);
1188   auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage,
1189                                      getInstrProfRegFuncsName(), M);
1190   RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1191   if (Options.NoRedZone)
1192     RegisterF->addFnAttr(Attribute::NoRedZone);
1193 
1194   auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false);
1195   auto *RuntimeRegisterF =
1196       Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage,
1197                        getInstrProfRegFuncName(), M);
1198 
1199   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF));
1200   for (Value *Data : CompilerUsedVars)
1201     if (!isa<Function>(Data))
1202       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1203   for (Value *Data : UsedVars)
1204     if (Data != NamesVar && !isa<Function>(Data))
1205       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1206 
1207   if (NamesVar) {
1208     Type *ParamTypes[] = {VoidPtrTy, Int64Ty};
1209     auto *NamesRegisterTy =
1210         FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false);
1211     auto *NamesRegisterF =
1212         Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage,
1213                          getInstrProfNamesRegFuncName(), M);
1214     IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy),
1215                                     IRB.getInt64(NamesSize)});
1216   }
1217 
1218   IRB.CreateRetVoid();
1219 }
1220 
1221 bool InstrProfiling::emitRuntimeHook() {
1222   // We expect the linker to be invoked with -u<hook_var> flag for Linux
1223   // in which case there is no need to emit the external variable.
1224   if (TT.isOSLinux())
1225     return false;
1226 
1227   // If the module's provided its own runtime, we don't need to do anything.
1228   if (M->getGlobalVariable(getInstrProfRuntimeHookVarName()))
1229     return false;
1230 
1231   // Declare an external variable that will pull in the runtime initialization.
1232   auto *Int32Ty = Type::getInt32Ty(M->getContext());
1233   auto *Var =
1234       new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage,
1235                          nullptr, getInstrProfRuntimeHookVarName());
1236 
1237   if (TT.isOSBinFormatELF()) {
1238     // Mark the user variable as used so that it isn't stripped out.
1239     CompilerUsedVars.push_back(Var);
1240   } else {
1241     // Make a function that uses it.
1242     auto *User = Function::Create(FunctionType::get(Int32Ty, false),
1243                                   GlobalValue::LinkOnceODRLinkage,
1244                                   getInstrProfRuntimeHookVarUseFuncName(), M);
1245     User->addFnAttr(Attribute::NoInline);
1246     if (Options.NoRedZone)
1247       User->addFnAttr(Attribute::NoRedZone);
1248     User->setVisibility(GlobalValue::HiddenVisibility);
1249     if (TT.supportsCOMDAT())
1250       User->setComdat(M->getOrInsertComdat(User->getName()));
1251 
1252     IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User));
1253     auto *Load = IRB.CreateLoad(Int32Ty, Var);
1254     IRB.CreateRet(Load);
1255 
1256     // Mark the function as used so that it isn't stripped out.
1257     CompilerUsedVars.push_back(User);
1258   }
1259   return true;
1260 }
1261 
1262 void InstrProfiling::emitUses() {
1263   // The metadata sections are parallel arrays. Optimizers (e.g.
1264   // GlobalOpt/ConstantMerge) may not discard associated sections as a unit, so
1265   // we conservatively retain all unconditionally in the compiler.
1266   //
1267   // On ELF and Mach-O, the linker can guarantee the associated sections will be
1268   // retained or discarded as a unit, so llvm.compiler.used is sufficient.
1269   // Similarly on COFF, if prof data is not referenced by code we use one comdat
1270   // and ensure this GC property as well. Otherwise, we have to conservatively
1271   // make all of the sections retained by the linker.
1272   if (TT.isOSBinFormatELF() || TT.isOSBinFormatMachO() ||
1273       (TT.isOSBinFormatCOFF() && !profDataReferencedByCode(*M)))
1274     appendToCompilerUsed(*M, CompilerUsedVars);
1275   else
1276     appendToUsed(*M, CompilerUsedVars);
1277 
1278   // We do not add proper references from used metadata sections to NamesVar and
1279   // VNodesVar, so we have to be conservative and place them in llvm.used
1280   // regardless of the target,
1281   appendToUsed(*M, UsedVars);
1282 }
1283 
1284 void InstrProfiling::emitInitialization() {
1285   // Create ProfileFileName variable. Don't don't this for the
1286   // context-sensitive instrumentation lowering: This lowering is after
1287   // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should
1288   // have already create the variable before LTO/ThinLTO linking.
1289   if (!IsCS)
1290     createProfileFileNameVar(*M, Options.InstrProfileOutput);
1291   Function *RegisterF = M->getFunction(getInstrProfRegFuncsName());
1292   if (!RegisterF)
1293     return;
1294 
1295   // Create the initialization function.
1296   auto *VoidTy = Type::getVoidTy(M->getContext());
1297   auto *F = Function::Create(FunctionType::get(VoidTy, false),
1298                              GlobalValue::InternalLinkage,
1299                              getInstrProfInitFuncName(), M);
1300   F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1301   F->addFnAttr(Attribute::NoInline);
1302   if (Options.NoRedZone)
1303     F->addFnAttr(Attribute::NoRedZone);
1304 
1305   // Add the basic block and the necessary calls.
1306   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F));
1307   IRB.CreateCall(RegisterF, {});
1308   IRB.CreateRetVoid();
1309 
1310   appendToGlobalCtors(*M, F, 0);
1311 }
1312