1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling.
10 // It also builds the data structures and initialization code needed for
11 // updating execution counts and emitting the profile at runtime.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h"
16 #include "llvm/ADT/ArrayRef.h"
17 #include "llvm/ADT/SmallVector.h"
18 #include "llvm/ADT/StringRef.h"
19 #include "llvm/ADT/Triple.h"
20 #include "llvm/ADT/Twine.h"
21 #include "llvm/Analysis/BlockFrequencyInfo.h"
22 #include "llvm/Analysis/BranchProbabilityInfo.h"
23 #include "llvm/Analysis/LoopInfo.h"
24 #include "llvm/Analysis/TargetLibraryInfo.h"
25 #include "llvm/IR/Attributes.h"
26 #include "llvm/IR/BasicBlock.h"
27 #include "llvm/IR/Constant.h"
28 #include "llvm/IR/Constants.h"
29 #include "llvm/IR/DerivedTypes.h"
30 #include "llvm/IR/Dominators.h"
31 #include "llvm/IR/Function.h"
32 #include "llvm/IR/GlobalValue.h"
33 #include "llvm/IR/GlobalVariable.h"
34 #include "llvm/IR/IRBuilder.h"
35 #include "llvm/IR/Instruction.h"
36 #include "llvm/IR/Instructions.h"
37 #include "llvm/IR/IntrinsicInst.h"
38 #include "llvm/IR/Module.h"
39 #include "llvm/IR/Type.h"
40 #include "llvm/InitializePasses.h"
41 #include "llvm/Pass.h"
42 #include "llvm/ProfileData/InstrProf.h"
43 #include "llvm/Support/Casting.h"
44 #include "llvm/Support/CommandLine.h"
45 #include "llvm/Support/Error.h"
46 #include "llvm/Support/ErrorHandling.h"
47 #include "llvm/Transforms/Utils/BasicBlockUtils.h"
48 #include "llvm/Transforms/Utils/ModuleUtils.h"
49 #include "llvm/Transforms/Utils/SSAUpdater.h"
50 #include <algorithm>
51 #include <cassert>
52 #include <cstddef>
53 #include <cstdint>
54 #include <string>
55 
56 using namespace llvm;
57 
58 #define DEBUG_TYPE "instrprof"
59 
60 // The start and end values of precise value profile range for memory
61 // intrinsic sizes
62 cl::opt<std::string> MemOPSizeRange(
63     "memop-size-range",
64     cl::desc("Set the range of size in memory intrinsic calls to be profiled "
65              "precisely, in a format of <start_val>:<end_val>"),
66     cl::init(""));
67 
68 // The value that considered to be large value in  memory intrinsic.
69 cl::opt<unsigned> MemOPSizeLarge(
70     "memop-size-large",
71     cl::desc("Set large value thresthold in memory intrinsic size profiling. "
72              "Value of 0 disables the large value profiling."),
73     cl::init(8192));
74 
75 namespace {
76 
77 cl::opt<bool> DoNameCompression("enable-name-compression",
78                                 cl::desc("Enable name string compression"),
79                                 cl::init(true));
80 
81 cl::opt<bool> DoHashBasedCounterSplit(
82     "hash-based-counter-split",
83     cl::desc("Rename counter variable of a comdat function based on cfg hash"),
84     cl::init(true));
85 
86 cl::opt<bool> RuntimeCounterRelocation(
87     "runtime-counter-relocation",
88     cl::desc("Enable relocating counters at runtime."),
89     cl::init(false));
90 
91 cl::opt<bool> ValueProfileStaticAlloc(
92     "vp-static-alloc",
93     cl::desc("Do static counter allocation for value profiler"),
94     cl::init(true));
95 
96 cl::opt<double> NumCountersPerValueSite(
97     "vp-counters-per-site",
98     cl::desc("The average number of profile counters allocated "
99              "per value profiling site."),
100     // This is set to a very small value because in real programs, only
101     // a very small percentage of value sites have non-zero targets, e.g, 1/30.
102     // For those sites with non-zero profile, the average number of targets
103     // is usually smaller than 2.
104     cl::init(1.0));
105 
106 cl::opt<bool> AtomicCounterUpdateAll(
107     "instrprof-atomic-counter-update-all", cl::ZeroOrMore,
108     cl::desc("Make all profile counter updates atomic (for testing only)"),
109     cl::init(false));
110 
111 cl::opt<bool> AtomicCounterUpdatePromoted(
112     "atomic-counter-update-promoted", cl::ZeroOrMore,
113     cl::desc("Do counter update using atomic fetch add "
114              " for promoted counters only"),
115     cl::init(false));
116 
117 // If the option is not specified, the default behavior about whether
118 // counter promotion is done depends on how instrumentaiton lowering
119 // pipeline is setup, i.e., the default value of true of this option
120 // does not mean the promotion will be done by default. Explicitly
121 // setting this option can override the default behavior.
122 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore,
123                                  cl::desc("Do counter register promotion"),
124                                  cl::init(false));
125 cl::opt<unsigned> MaxNumOfPromotionsPerLoop(
126     cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20),
127     cl::desc("Max number counter promotions per loop to avoid"
128              " increasing register pressure too much"));
129 
130 // A debug option
131 cl::opt<int>
132     MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1),
133                        cl::desc("Max number of allowed counter promotions"));
134 
135 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting(
136     cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3),
137     cl::desc("The max number of exiting blocks of a loop to allow "
138              " speculative counter promotion"));
139 
140 cl::opt<bool> SpeculativeCounterPromotionToLoop(
141     cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false),
142     cl::desc("When the option is false, if the target block is in a loop, "
143              "the promotion will be disallowed unless the promoted counter "
144              " update can be further/iteratively promoted into an acyclic "
145              " region."));
146 
147 cl::opt<bool> IterativeCounterPromotion(
148     cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true),
149     cl::desc("Allow counter promotion across the whole loop nest."));
150 
151 class InstrProfilingLegacyPass : public ModulePass {
152   InstrProfiling InstrProf;
153 
154 public:
155   static char ID;
156 
157   InstrProfilingLegacyPass() : ModulePass(ID) {}
158   InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false)
159       : ModulePass(ID), InstrProf(Options, IsCS) {}
160 
161   StringRef getPassName() const override {
162     return "Frontend instrumentation-based coverage lowering";
163   }
164 
165   bool runOnModule(Module &M) override {
166     auto GetTLI = [this](Function &F) -> TargetLibraryInfo & {
167       return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
168     };
169     return InstrProf.run(M, GetTLI);
170   }
171 
172   void getAnalysisUsage(AnalysisUsage &AU) const override {
173     AU.setPreservesCFG();
174     AU.addRequired<TargetLibraryInfoWrapperPass>();
175   }
176 };
177 
178 ///
179 /// A helper class to promote one counter RMW operation in the loop
180 /// into register update.
181 ///
182 /// RWM update for the counter will be sinked out of the loop after
183 /// the transformation.
184 ///
185 class PGOCounterPromoterHelper : public LoadAndStorePromoter {
186 public:
187   PGOCounterPromoterHelper(
188       Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init,
189       BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks,
190       ArrayRef<Instruction *> InsertPts,
191       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
192       LoopInfo &LI)
193       : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks),
194         InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) {
195     assert(isa<LoadInst>(L));
196     assert(isa<StoreInst>(S));
197     SSA.AddAvailableValue(PH, Init);
198   }
199 
200   void doExtraRewritesBeforeFinalDeletion() override {
201     for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) {
202       BasicBlock *ExitBlock = ExitBlocks[i];
203       Instruction *InsertPos = InsertPts[i];
204       // Get LiveIn value into the ExitBlock. If there are multiple
205       // predecessors, the value is defined by a PHI node in this
206       // block.
207       Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock);
208       Value *Addr = cast<StoreInst>(Store)->getPointerOperand();
209       Type *Ty = LiveInValue->getType();
210       IRBuilder<> Builder(InsertPos);
211       if (AtomicCounterUpdatePromoted)
212         // automic update currently can only be promoted across the current
213         // loop, not the whole loop nest.
214         Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue,
215                                 AtomicOrdering::SequentiallyConsistent);
216       else {
217         LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted");
218         auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue);
219         auto *NewStore = Builder.CreateStore(NewVal, Addr);
220 
221         // Now update the parent loop's candidate list:
222         if (IterativeCounterPromotion) {
223           auto *TargetLoop = LI.getLoopFor(ExitBlock);
224           if (TargetLoop)
225             LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore);
226         }
227       }
228     }
229   }
230 
231 private:
232   Instruction *Store;
233   ArrayRef<BasicBlock *> ExitBlocks;
234   ArrayRef<Instruction *> InsertPts;
235   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
236   LoopInfo &LI;
237 };
238 
239 /// A helper class to do register promotion for all profile counter
240 /// updates in a loop.
241 ///
242 class PGOCounterPromoter {
243 public:
244   PGOCounterPromoter(
245       DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands,
246       Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI)
247       : LoopToCandidates(LoopToCands), ExitBlocks(), InsertPts(), L(CurLoop),
248         LI(LI), BFI(BFI) {
249 
250     // Skip collection of ExitBlocks and InsertPts for loops that will not be
251     // able to have counters promoted.
252     SmallVector<BasicBlock *, 8> LoopExitBlocks;
253     SmallPtrSet<BasicBlock *, 8> BlockSet;
254 
255     L.getExitBlocks(LoopExitBlocks);
256     if (!isPromotionPossible(&L, LoopExitBlocks))
257       return;
258 
259     for (BasicBlock *ExitBlock : LoopExitBlocks) {
260       if (BlockSet.insert(ExitBlock).second) {
261         ExitBlocks.push_back(ExitBlock);
262         InsertPts.push_back(&*ExitBlock->getFirstInsertionPt());
263       }
264     }
265   }
266 
267   bool run(int64_t *NumPromoted) {
268     // Skip 'infinite' loops:
269     if (ExitBlocks.size() == 0)
270       return false;
271     unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L);
272     if (MaxProm == 0)
273       return false;
274 
275     unsigned Promoted = 0;
276     for (auto &Cand : LoopToCandidates[&L]) {
277 
278       SmallVector<PHINode *, 4> NewPHIs;
279       SSAUpdater SSA(&NewPHIs);
280       Value *InitVal = ConstantInt::get(Cand.first->getType(), 0);
281 
282       // If BFI is set, we will use it to guide the promotions.
283       if (BFI) {
284         auto *BB = Cand.first->getParent();
285         auto InstrCount = BFI->getBlockProfileCount(BB);
286         if (!InstrCount)
287           continue;
288         auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader());
289         // If the average loop trip count is not greater than 1.5, we skip
290         // promotion.
291         if (PreheaderCount &&
292             (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2))
293           continue;
294       }
295 
296       PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal,
297                                         L.getLoopPreheader(), ExitBlocks,
298                                         InsertPts, LoopToCandidates, LI);
299       Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second}));
300       Promoted++;
301       if (Promoted >= MaxProm)
302         break;
303 
304       (*NumPromoted)++;
305       if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions)
306         break;
307     }
308 
309     LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth="
310                       << L.getLoopDepth() << ")\n");
311     return Promoted != 0;
312   }
313 
314 private:
315   bool allowSpeculativeCounterPromotion(Loop *LP) {
316     SmallVector<BasicBlock *, 8> ExitingBlocks;
317     L.getExitingBlocks(ExitingBlocks);
318     // Not considierered speculative.
319     if (ExitingBlocks.size() == 1)
320       return true;
321     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
322       return false;
323     return true;
324   }
325 
326   // Check whether the loop satisfies the basic conditions needed to perform
327   // Counter Promotions.
328   bool isPromotionPossible(Loop *LP,
329                            const SmallVectorImpl<BasicBlock *> &LoopExitBlocks) {
330     // We can't insert into a catchswitch.
331     if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) {
332           return isa<CatchSwitchInst>(Exit->getTerminator());
333         }))
334       return false;
335 
336     if (!LP->hasDedicatedExits())
337       return false;
338 
339     BasicBlock *PH = LP->getLoopPreheader();
340     if (!PH)
341       return false;
342 
343     return true;
344   }
345 
346   // Returns the max number of Counter Promotions for LP.
347   unsigned getMaxNumOfPromotionsInLoop(Loop *LP) {
348     SmallVector<BasicBlock *, 8> LoopExitBlocks;
349     LP->getExitBlocks(LoopExitBlocks);
350     if (!isPromotionPossible(LP, LoopExitBlocks))
351       return 0;
352 
353     SmallVector<BasicBlock *, 8> ExitingBlocks;
354     LP->getExitingBlocks(ExitingBlocks);
355 
356     // If BFI is set, we do more aggressive promotions based on BFI.
357     if (BFI)
358       return (unsigned)-1;
359 
360     // Not considierered speculative.
361     if (ExitingBlocks.size() == 1)
362       return MaxNumOfPromotionsPerLoop;
363 
364     if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting)
365       return 0;
366 
367     // Whether the target block is in a loop does not matter:
368     if (SpeculativeCounterPromotionToLoop)
369       return MaxNumOfPromotionsPerLoop;
370 
371     // Now check the target block:
372     unsigned MaxProm = MaxNumOfPromotionsPerLoop;
373     for (auto *TargetBlock : LoopExitBlocks) {
374       auto *TargetLoop = LI.getLoopFor(TargetBlock);
375       if (!TargetLoop)
376         continue;
377       unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop);
378       unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size();
379       MaxProm =
380           std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) -
381                                 PendingCandsInTarget);
382     }
383     return MaxProm;
384   }
385 
386   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates;
387   SmallVector<BasicBlock *, 8> ExitBlocks;
388   SmallVector<Instruction *, 8> InsertPts;
389   Loop &L;
390   LoopInfo &LI;
391   BlockFrequencyInfo *BFI;
392 };
393 
394 } // end anonymous namespace
395 
396 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) {
397   FunctionAnalysisManager &FAM =
398       AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
399   auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
400     return FAM.getResult<TargetLibraryAnalysis>(F);
401   };
402   if (!run(M, GetTLI))
403     return PreservedAnalyses::all();
404 
405   return PreservedAnalyses::none();
406 }
407 
408 char InstrProfilingLegacyPass::ID = 0;
409 INITIALIZE_PASS_BEGIN(
410     InstrProfilingLegacyPass, "instrprof",
411     "Frontend instrumentation-based coverage lowering.", false, false)
412 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
413 INITIALIZE_PASS_END(
414     InstrProfilingLegacyPass, "instrprof",
415     "Frontend instrumentation-based coverage lowering.", false, false)
416 
417 ModulePass *
418 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options,
419                                      bool IsCS) {
420   return new InstrProfilingLegacyPass(Options, IsCS);
421 }
422 
423 static InstrProfIncrementInst *castToIncrementInst(Instruction *Instr) {
424   InstrProfIncrementInst *Inc = dyn_cast<InstrProfIncrementInstStep>(Instr);
425   if (Inc)
426     return Inc;
427   return dyn_cast<InstrProfIncrementInst>(Instr);
428 }
429 
430 bool InstrProfiling::lowerIntrinsics(Function *F) {
431   bool MadeChange = false;
432   PromotionCandidates.clear();
433   for (BasicBlock &BB : *F) {
434     for (auto I = BB.begin(), E = BB.end(); I != E;) {
435       auto Instr = I++;
436       InstrProfIncrementInst *Inc = castToIncrementInst(&*Instr);
437       if (Inc) {
438         lowerIncrement(Inc);
439         MadeChange = true;
440       } else if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(Instr)) {
441         lowerValueProfileInst(Ind);
442         MadeChange = true;
443       }
444     }
445   }
446 
447   if (!MadeChange)
448     return false;
449 
450   promoteCounterLoadStores(F);
451   return true;
452 }
453 
454 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const {
455   if (RuntimeCounterRelocation.getNumOccurrences() > 0)
456     return RuntimeCounterRelocation;
457 
458   return TT.isOSFuchsia();
459 }
460 
461 bool InstrProfiling::isCounterPromotionEnabled() const {
462   if (DoCounterPromotion.getNumOccurrences() > 0)
463     return DoCounterPromotion;
464 
465   return Options.DoCounterPromotion;
466 }
467 
468 void InstrProfiling::promoteCounterLoadStores(Function *F) {
469   if (!isCounterPromotionEnabled())
470     return;
471 
472   DominatorTree DT(*F);
473   LoopInfo LI(DT);
474   DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates;
475 
476   std::unique_ptr<BlockFrequencyInfo> BFI;
477   if (Options.UseBFIInPromotion) {
478     std::unique_ptr<BranchProbabilityInfo> BPI;
479     BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F)));
480     BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI));
481   }
482 
483   for (const auto &LoadStore : PromotionCandidates) {
484     auto *CounterLoad = LoadStore.first;
485     auto *CounterStore = LoadStore.second;
486     BasicBlock *BB = CounterLoad->getParent();
487     Loop *ParentLoop = LI.getLoopFor(BB);
488     if (!ParentLoop)
489       continue;
490     LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore);
491   }
492 
493   SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder();
494 
495   // Do a post-order traversal of the loops so that counter updates can be
496   // iteratively hoisted outside the loop nest.
497   for (auto *Loop : llvm::reverse(Loops)) {
498     PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get());
499     Promoter.run(&TotalCountersPromoted);
500   }
501 }
502 
503 /// Check if the module contains uses of any profiling intrinsics.
504 static bool containsProfilingIntrinsics(Module &M) {
505   if (auto *F = M.getFunction(
506           Intrinsic::getName(llvm::Intrinsic::instrprof_increment)))
507     if (!F->use_empty())
508       return true;
509   if (auto *F = M.getFunction(
510           Intrinsic::getName(llvm::Intrinsic::instrprof_increment_step)))
511     if (!F->use_empty())
512       return true;
513   if (auto *F = M.getFunction(
514           Intrinsic::getName(llvm::Intrinsic::instrprof_value_profile)))
515     if (!F->use_empty())
516       return true;
517   return false;
518 }
519 
520 bool InstrProfiling::run(
521     Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) {
522   this->M = &M;
523   this->GetTLI = std::move(GetTLI);
524   NamesVar = nullptr;
525   NamesSize = 0;
526   ProfileDataMap.clear();
527   UsedVars.clear();
528   getMemOPSizeRangeFromOption(MemOPSizeRange, MemOPSizeRangeStart,
529                               MemOPSizeRangeLast);
530   TT = Triple(M.getTargetTriple());
531 
532   // Emit the runtime hook even if no counters are present.
533   bool MadeChange = emitRuntimeHook();
534 
535   // Improve compile time by avoiding linear scans when there is no work.
536   GlobalVariable *CoverageNamesVar =
537       M.getNamedGlobal(getCoverageUnusedNamesVarName());
538   if (!containsProfilingIntrinsics(M) && !CoverageNamesVar)
539     return MadeChange;
540 
541   // We did not know how many value sites there would be inside
542   // the instrumented function. This is counting the number of instrumented
543   // target value sites to enter it as field in the profile data variable.
544   for (Function &F : M) {
545     InstrProfIncrementInst *FirstProfIncInst = nullptr;
546     for (BasicBlock &BB : F)
547       for (auto I = BB.begin(), E = BB.end(); I != E; I++)
548         if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I))
549           computeNumValueSiteCounts(Ind);
550         else if (FirstProfIncInst == nullptr)
551           FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I);
552 
553     // Value profiling intrinsic lowering requires per-function profile data
554     // variable to be created first.
555     if (FirstProfIncInst != nullptr)
556       static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst));
557   }
558 
559   for (Function &F : M)
560     MadeChange |= lowerIntrinsics(&F);
561 
562   if (CoverageNamesVar) {
563     lowerCoverageData(CoverageNamesVar);
564     MadeChange = true;
565   }
566 
567   if (!MadeChange)
568     return false;
569 
570   emitVNodes();
571   emitNameData();
572   emitRegistration();
573   emitUses();
574   emitInitialization();
575   return true;
576 }
577 
578 static FunctionCallee
579 getOrInsertValueProfilingCall(Module &M, const TargetLibraryInfo &TLI,
580                               bool IsRange = false) {
581   LLVMContext &Ctx = M.getContext();
582   auto *ReturnTy = Type::getVoidTy(M.getContext());
583 
584   AttributeList AL;
585   if (auto AK = TLI.getExtAttrForI32Param(false))
586     AL = AL.addParamAttribute(M.getContext(), 2, AK);
587 
588   if (!IsRange) {
589     Type *ParamTypes[] = {
590 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType
591 #include "llvm/ProfileData/InstrProfData.inc"
592     };
593     auto *ValueProfilingCallTy =
594         FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false);
595     return M.getOrInsertFunction(getInstrProfValueProfFuncName(),
596                                  ValueProfilingCallTy, AL);
597   } else {
598     Type *RangeParamTypes[] = {
599 #define VALUE_RANGE_PROF 1
600 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType
601 #include "llvm/ProfileData/InstrProfData.inc"
602 #undef VALUE_RANGE_PROF
603     };
604     auto *ValueRangeProfilingCallTy =
605         FunctionType::get(ReturnTy, makeArrayRef(RangeParamTypes), false);
606     return M.getOrInsertFunction(getInstrProfValueRangeProfFuncName(),
607                                  ValueRangeProfilingCallTy, AL);
608   }
609 }
610 
611 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) {
612   GlobalVariable *Name = Ind->getName();
613   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
614   uint64_t Index = Ind->getIndex()->getZExtValue();
615   auto It = ProfileDataMap.find(Name);
616   if (It == ProfileDataMap.end()) {
617     PerFunctionProfileData PD;
618     PD.NumValueSites[ValueKind] = Index + 1;
619     ProfileDataMap[Name] = PD;
620   } else if (It->second.NumValueSites[ValueKind] <= Index)
621     It->second.NumValueSites[ValueKind] = Index + 1;
622 }
623 
624 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) {
625   GlobalVariable *Name = Ind->getName();
626   auto It = ProfileDataMap.find(Name);
627   assert(It != ProfileDataMap.end() && It->second.DataVar &&
628          "value profiling detected in function with no counter incerement");
629 
630   GlobalVariable *DataVar = It->second.DataVar;
631   uint64_t ValueKind = Ind->getValueKind()->getZExtValue();
632   uint64_t Index = Ind->getIndex()->getZExtValue();
633   for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind)
634     Index += It->second.NumValueSites[Kind];
635 
636   IRBuilder<> Builder(Ind);
637   bool IsRange = (Ind->getValueKind()->getZExtValue() ==
638                   llvm::InstrProfValueKind::IPVK_MemOPSize);
639   CallInst *Call = nullptr;
640   auto *TLI = &GetTLI(*Ind->getFunction());
641 
642   // To support value profiling calls within Windows exception handlers, funclet
643   // information contained within operand bundles needs to be copied over to
644   // the library call. This is required for the IR to be processed by the
645   // WinEHPrepare pass.
646   SmallVector<OperandBundleDef, 1> OpBundles;
647   Ind->getOperandBundlesAsDefs(OpBundles);
648   if (!IsRange) {
649     Value *Args[3] = {Ind->getTargetValue(),
650                       Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
651                       Builder.getInt32(Index)};
652     Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args,
653                               OpBundles);
654   } else {
655     Value *Args[6] = {
656         Ind->getTargetValue(),
657         Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()),
658         Builder.getInt32(Index),
659         Builder.getInt64(MemOPSizeRangeStart),
660         Builder.getInt64(MemOPSizeRangeLast),
661         Builder.getInt64(MemOPSizeLarge == 0 ? INT64_MIN : MemOPSizeLarge)};
662     Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI, true),
663                               Args, OpBundles);
664   }
665   if (auto AK = TLI->getExtAttrForI32Param(false))
666     Call->addParamAttr(2, AK);
667   Ind->replaceAllUsesWith(Call);
668   Ind->eraseFromParent();
669 }
670 
671 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) {
672   GlobalVariable *Counters = getOrCreateRegionCounters(Inc);
673 
674   IRBuilder<> Builder(Inc);
675   uint64_t Index = Inc->getIndex()->getZExtValue();
676   Value *Addr = Builder.CreateConstInBoundsGEP2_64(Counters->getValueType(),
677                                                    Counters, 0, Index);
678 
679   if (isRuntimeCounterRelocationEnabled()) {
680     Type *Int64Ty = Type::getInt64Ty(M->getContext());
681     Type *Int64PtrTy = Type::getInt64PtrTy(M->getContext());
682     Function *Fn = Inc->getParent()->getParent();
683     Instruction &I = Fn->getEntryBlock().front();
684     LoadInst *LI = dyn_cast<LoadInst>(&I);
685     if (!LI) {
686       IRBuilder<> Builder(&I);
687       Type *Int64Ty = Type::getInt64Ty(M->getContext());
688       GlobalVariable *Bias = M->getGlobalVariable(getInstrProfCounterBiasVarName());
689       if (!Bias) {
690         Bias = new GlobalVariable(*M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage,
691                                   Constant::getNullValue(Int64Ty),
692                                   getInstrProfCounterBiasVarName());
693         Bias->setVisibility(GlobalVariable::HiddenVisibility);
694       }
695       LI = Builder.CreateLoad(Int64Ty, Bias);
696     }
697     auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), LI);
698     Addr = Builder.CreateIntToPtr(Add, Int64PtrTy);
699   }
700 
701   if (Options.Atomic || AtomicCounterUpdateAll) {
702     Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(),
703                             AtomicOrdering::Monotonic);
704   } else {
705     Value *IncStep = Inc->getStep();
706     Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount");
707     auto *Count = Builder.CreateAdd(Load, Inc->getStep());
708     auto *Store = Builder.CreateStore(Count, Addr);
709     if (isCounterPromotionEnabled())
710       PromotionCandidates.emplace_back(cast<Instruction>(Load), Store);
711   }
712   Inc->eraseFromParent();
713 }
714 
715 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) {
716   ConstantArray *Names =
717       cast<ConstantArray>(CoverageNamesVar->getInitializer());
718   for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) {
719     Constant *NC = Names->getOperand(I);
720     Value *V = NC->stripPointerCasts();
721     assert(isa<GlobalVariable>(V) && "Missing reference to function name");
722     GlobalVariable *Name = cast<GlobalVariable>(V);
723 
724     Name->setLinkage(GlobalValue::PrivateLinkage);
725     ReferencedNames.push_back(Name);
726     NC->dropAllReferences();
727   }
728   CoverageNamesVar->eraseFromParent();
729 }
730 
731 /// Get the name of a profiling variable for a particular function.
732 static std::string getVarName(InstrProfIncrementInst *Inc, StringRef Prefix) {
733   StringRef NamePrefix = getInstrProfNameVarPrefix();
734   StringRef Name = Inc->getName()->getName().substr(NamePrefix.size());
735   Function *F = Inc->getParent()->getParent();
736   Module *M = F->getParent();
737   if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) ||
738       !canRenameComdatFunc(*F))
739     return (Prefix + Name).str();
740   uint64_t FuncHash = Inc->getHash()->getZExtValue();
741   SmallVector<char, 24> HashPostfix;
742   if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix)))
743     return (Prefix + Name).str();
744   return (Prefix + Name + "." + Twine(FuncHash)).str();
745 }
746 
747 static inline bool shouldRecordFunctionAddr(Function *F) {
748   // Check the linkage
749   bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage();
750   if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() &&
751       !HasAvailableExternallyLinkage)
752     return true;
753 
754   // A function marked 'alwaysinline' with available_externally linkage can't
755   // have its address taken. Doing so would create an undefined external ref to
756   // the function, which would fail to link.
757   if (HasAvailableExternallyLinkage &&
758       F->hasFnAttribute(Attribute::AlwaysInline))
759     return false;
760 
761   // Prohibit function address recording if the function is both internal and
762   // COMDAT. This avoids the profile data variable referencing internal symbols
763   // in COMDAT.
764   if (F->hasLocalLinkage() && F->hasComdat())
765     return false;
766 
767   // Check uses of this function for other than direct calls or invokes to it.
768   // Inline virtual functions have linkeOnceODR linkage. When a key method
769   // exists, the vtable will only be emitted in the TU where the key method
770   // is defined. In a TU where vtable is not available, the function won't
771   // be 'addresstaken'. If its address is not recorded here, the profile data
772   // with missing address may be picked by the linker leading  to missing
773   // indirect call target info.
774   return F->hasAddressTaken() || F->hasLinkOnceLinkage();
775 }
776 
777 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) {
778   // Don't do this for Darwin.  compiler-rt uses linker magic.
779   if (TT.isOSDarwin())
780     return false;
781   // Use linker script magic to get data/cnts/name start/end.
782   if (TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() ||
783       TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4CPU() ||
784       TT.isOSWindows())
785     return false;
786 
787   return true;
788 }
789 
790 GlobalVariable *
791 InstrProfiling::getOrCreateRegionCounters(InstrProfIncrementInst *Inc) {
792   GlobalVariable *NamePtr = Inc->getName();
793   auto It = ProfileDataMap.find(NamePtr);
794   PerFunctionProfileData PD;
795   if (It != ProfileDataMap.end()) {
796     if (It->second.RegionCounters)
797       return It->second.RegionCounters;
798     PD = It->second;
799   }
800 
801   // Match the linkage and visibility of the name global. COFF supports using
802   // comdats with internal symbols, so do that if we can.
803   Function *Fn = Inc->getParent()->getParent();
804   GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage();
805   GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility();
806   if (TT.isOSBinFormatCOFF()) {
807     Linkage = GlobalValue::InternalLinkage;
808     Visibility = GlobalValue::DefaultVisibility;
809   }
810 
811   // Move the name variable to the right section. Place them in a COMDAT group
812   // if the associated function is a COMDAT. This will make sure that only one
813   // copy of counters of the COMDAT function will be emitted after linking. Keep
814   // in mind that this pass may run before the inliner, so we need to create a
815   // new comdat group for the counters and profiling data. If we use the comdat
816   // of the parent function, that will result in relocations against discarded
817   // sections.
818   bool NeedComdat = needsComdatForCounter(*Fn, *M);
819   if (NeedComdat) {
820     if (TT.isOSBinFormatCOFF()) {
821       // For COFF, put the counters, data, and values each into their own
822       // comdats. We can't use a group because the Visual C++ linker will
823       // report duplicate symbol errors if there are multiple external symbols
824       // with the same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE.
825       Linkage = GlobalValue::LinkOnceODRLinkage;
826       Visibility = GlobalValue::HiddenVisibility;
827     }
828   }
829   auto MaybeSetComdat = [=](GlobalVariable *GV) {
830     if (NeedComdat)
831       GV->setComdat(M->getOrInsertComdat(GV->getName()));
832   };
833 
834   uint64_t NumCounters = Inc->getNumCounters()->getZExtValue();
835   LLVMContext &Ctx = M->getContext();
836   ArrayType *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters);
837 
838   // Create the counters variable.
839   auto *CounterPtr =
840       new GlobalVariable(*M, CounterTy, false, Linkage,
841                          Constant::getNullValue(CounterTy),
842                          getVarName(Inc, getInstrProfCountersVarPrefix()));
843   CounterPtr->setVisibility(Visibility);
844   CounterPtr->setSection(
845       getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat()));
846   CounterPtr->setAlignment(Align(8));
847   MaybeSetComdat(CounterPtr);
848   CounterPtr->setLinkage(Linkage);
849 
850   auto *Int8PtrTy = Type::getInt8PtrTy(Ctx);
851   // Allocate statically the array of pointers to value profile nodes for
852   // the current function.
853   Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy);
854   if (ValueProfileStaticAlloc && !needsRuntimeRegistrationOfSectionRange(TT)) {
855     uint64_t NS = 0;
856     for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
857       NS += PD.NumValueSites[Kind];
858     if (NS) {
859       ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS);
860 
861       auto *ValuesVar =
862           new GlobalVariable(*M, ValuesTy, false, Linkage,
863                              Constant::getNullValue(ValuesTy),
864                              getVarName(Inc, getInstrProfValuesVarPrefix()));
865       ValuesVar->setVisibility(Visibility);
866       ValuesVar->setSection(
867           getInstrProfSectionName(IPSK_vals, TT.getObjectFormat()));
868       ValuesVar->setAlignment(Align(8));
869       MaybeSetComdat(ValuesVar);
870       ValuesPtrExpr =
871           ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx));
872     }
873   }
874 
875   // Create data variable.
876   auto *Int16Ty = Type::getInt16Ty(Ctx);
877   auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1);
878   Type *DataTypes[] = {
879 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType,
880 #include "llvm/ProfileData/InstrProfData.inc"
881   };
882   auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes));
883 
884   Constant *FunctionAddr = shouldRecordFunctionAddr(Fn)
885                                ? ConstantExpr::getBitCast(Fn, Int8PtrTy)
886                                : ConstantPointerNull::get(Int8PtrTy);
887 
888   Constant *Int16ArrayVals[IPVK_Last + 1];
889   for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
890     Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]);
891 
892   Constant *DataVals[] = {
893 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init,
894 #include "llvm/ProfileData/InstrProfData.inc"
895   };
896   auto *Data = new GlobalVariable(*M, DataTy, false, Linkage,
897                                   ConstantStruct::get(DataTy, DataVals),
898                                   getVarName(Inc, getInstrProfDataVarPrefix()));
899   Data->setVisibility(Visibility);
900   Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat()));
901   Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT));
902   MaybeSetComdat(Data);
903   Data->setLinkage(Linkage);
904 
905   PD.RegionCounters = CounterPtr;
906   PD.DataVar = Data;
907   ProfileDataMap[NamePtr] = PD;
908 
909   // Mark the data variable as used so that it isn't stripped out.
910   UsedVars.push_back(Data);
911   // Now that the linkage set by the FE has been passed to the data and counter
912   // variables, reset Name variable's linkage and visibility to private so that
913   // it can be removed later by the compiler.
914   NamePtr->setLinkage(GlobalValue::PrivateLinkage);
915   // Collect the referenced names to be used by emitNameData.
916   ReferencedNames.push_back(NamePtr);
917 
918   return CounterPtr;
919 }
920 
921 void InstrProfiling::emitVNodes() {
922   if (!ValueProfileStaticAlloc)
923     return;
924 
925   // For now only support this on platforms that do
926   // not require runtime registration to discover
927   // named section start/end.
928   if (needsRuntimeRegistrationOfSectionRange(TT))
929     return;
930 
931   size_t TotalNS = 0;
932   for (auto &PD : ProfileDataMap) {
933     for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind)
934       TotalNS += PD.second.NumValueSites[Kind];
935   }
936 
937   if (!TotalNS)
938     return;
939 
940   uint64_t NumCounters = TotalNS * NumCountersPerValueSite;
941 // Heuristic for small programs with very few total value sites.
942 // The default value of vp-counters-per-site is chosen based on
943 // the observation that large apps usually have a low percentage
944 // of value sites that actually have any profile data, and thus
945 // the average number of counters per site is low. For small
946 // apps with very few sites, this may not be true. Bump up the
947 // number of counters in this case.
948 #define INSTR_PROF_MIN_VAL_COUNTS 10
949   if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS)
950     NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2);
951 
952   auto &Ctx = M->getContext();
953   Type *VNodeTypes[] = {
954 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType,
955 #include "llvm/ProfileData/InstrProfData.inc"
956   };
957   auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes));
958 
959   ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters);
960   auto *VNodesVar = new GlobalVariable(
961       *M, VNodesTy, false, GlobalValue::PrivateLinkage,
962       Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName());
963   VNodesVar->setSection(
964       getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat()));
965   UsedVars.push_back(VNodesVar);
966 }
967 
968 void InstrProfiling::emitNameData() {
969   std::string UncompressedData;
970 
971   if (ReferencedNames.empty())
972     return;
973 
974   std::string CompressedNameStr;
975   if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr,
976                                           DoNameCompression)) {
977     report_fatal_error(toString(std::move(E)), false);
978   }
979 
980   auto &Ctx = M->getContext();
981   auto *NamesVal = ConstantDataArray::getString(
982       Ctx, StringRef(CompressedNameStr), false);
983   NamesVar = new GlobalVariable(*M, NamesVal->getType(), true,
984                                 GlobalValue::PrivateLinkage, NamesVal,
985                                 getInstrProfNamesVarName());
986   NamesSize = CompressedNameStr.size();
987   NamesVar->setSection(
988       getInstrProfSectionName(IPSK_name, TT.getObjectFormat()));
989   // On COFF, it's important to reduce the alignment down to 1 to prevent the
990   // linker from inserting padding before the start of the names section or
991   // between names entries.
992   NamesVar->setAlignment(Align(1));
993   UsedVars.push_back(NamesVar);
994 
995   for (auto *NamePtr : ReferencedNames)
996     NamePtr->eraseFromParent();
997 }
998 
999 void InstrProfiling::emitRegistration() {
1000   if (!needsRuntimeRegistrationOfSectionRange(TT))
1001     return;
1002 
1003   // Construct the function.
1004   auto *VoidTy = Type::getVoidTy(M->getContext());
1005   auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext());
1006   auto *Int64Ty = Type::getInt64Ty(M->getContext());
1007   auto *RegisterFTy = FunctionType::get(VoidTy, false);
1008   auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage,
1009                                      getInstrProfRegFuncsName(), M);
1010   RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1011   if (Options.NoRedZone)
1012     RegisterF->addFnAttr(Attribute::NoRedZone);
1013 
1014   auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false);
1015   auto *RuntimeRegisterF =
1016       Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage,
1017                        getInstrProfRegFuncName(), M);
1018 
1019   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF));
1020   for (Value *Data : UsedVars)
1021     if (Data != NamesVar && !isa<Function>(Data))
1022       IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy));
1023 
1024   if (NamesVar) {
1025     Type *ParamTypes[] = {VoidPtrTy, Int64Ty};
1026     auto *NamesRegisterTy =
1027         FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false);
1028     auto *NamesRegisterF =
1029         Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage,
1030                          getInstrProfNamesRegFuncName(), M);
1031     IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy),
1032                                     IRB.getInt64(NamesSize)});
1033   }
1034 
1035   IRB.CreateRetVoid();
1036 }
1037 
1038 bool InstrProfiling::emitRuntimeHook() {
1039   // We expect the linker to be invoked with -u<hook_var> flag for linux,
1040   // for which case there is no need to emit the user function.
1041   if (TT.isOSLinux())
1042     return false;
1043 
1044   // If the module's provided its own runtime, we don't need to do anything.
1045   if (M->getGlobalVariable(getInstrProfRuntimeHookVarName()))
1046     return false;
1047 
1048   // Declare an external variable that will pull in the runtime initialization.
1049   auto *Int32Ty = Type::getInt32Ty(M->getContext());
1050   auto *Var =
1051       new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage,
1052                          nullptr, getInstrProfRuntimeHookVarName());
1053 
1054   // Make a function that uses it.
1055   auto *User = Function::Create(FunctionType::get(Int32Ty, false),
1056                                 GlobalValue::LinkOnceODRLinkage,
1057                                 getInstrProfRuntimeHookVarUseFuncName(), M);
1058   User->addFnAttr(Attribute::NoInline);
1059   if (Options.NoRedZone)
1060     User->addFnAttr(Attribute::NoRedZone);
1061   User->setVisibility(GlobalValue::HiddenVisibility);
1062   if (TT.supportsCOMDAT())
1063     User->setComdat(M->getOrInsertComdat(User->getName()));
1064 
1065   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User));
1066   auto *Load = IRB.CreateLoad(Int32Ty, Var);
1067   IRB.CreateRet(Load);
1068 
1069   // Mark the user variable as used so that it isn't stripped out.
1070   UsedVars.push_back(User);
1071   return true;
1072 }
1073 
1074 void InstrProfiling::emitUses() {
1075   if (!UsedVars.empty())
1076     appendToUsed(*M, UsedVars);
1077 }
1078 
1079 void InstrProfiling::emitInitialization() {
1080   // Create ProfileFileName variable. Don't don't this for the
1081   // context-sensitive instrumentation lowering: This lowering is after
1082   // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should
1083   // have already create the variable before LTO/ThinLTO linking.
1084   if (!IsCS)
1085     createProfileFileNameVar(*M, Options.InstrProfileOutput);
1086   Function *RegisterF = M->getFunction(getInstrProfRegFuncsName());
1087   if (!RegisterF)
1088     return;
1089 
1090   // Create the initialization function.
1091   auto *VoidTy = Type::getVoidTy(M->getContext());
1092   auto *F = Function::Create(FunctionType::get(VoidTy, false),
1093                              GlobalValue::InternalLinkage,
1094                              getInstrProfInitFuncName(), M);
1095   F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
1096   F->addFnAttr(Attribute::NoInline);
1097   if (Options.NoRedZone)
1098     F->addFnAttr(Attribute::NoRedZone);
1099 
1100   // Add the basic block and the necessary calls.
1101   IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F));
1102   IRB.CreateCall(RegisterF, {});
1103   IRB.CreateRetVoid();
1104 
1105   appendToGlobalCtors(*M, F, 0);
1106 }
1107