1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling. 10 // It also builds the data structures and initialization code needed for 11 // updating execution counts and emitting the profile at runtime. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h" 16 #include "llvm/ADT/ArrayRef.h" 17 #include "llvm/ADT/SmallVector.h" 18 #include "llvm/ADT/StringRef.h" 19 #include "llvm/ADT/Triple.h" 20 #include "llvm/ADT/Twine.h" 21 #include "llvm/Analysis/BlockFrequencyInfo.h" 22 #include "llvm/Analysis/BranchProbabilityInfo.h" 23 #include "llvm/Analysis/LoopInfo.h" 24 #include "llvm/Analysis/TargetLibraryInfo.h" 25 #include "llvm/IR/Attributes.h" 26 #include "llvm/IR/BasicBlock.h" 27 #include "llvm/IR/Constant.h" 28 #include "llvm/IR/Constants.h" 29 #include "llvm/IR/DerivedTypes.h" 30 #include "llvm/IR/Dominators.h" 31 #include "llvm/IR/Function.h" 32 #include "llvm/IR/GlobalValue.h" 33 #include "llvm/IR/GlobalVariable.h" 34 #include "llvm/IR/IRBuilder.h" 35 #include "llvm/IR/Instruction.h" 36 #include "llvm/IR/Instructions.h" 37 #include "llvm/IR/IntrinsicInst.h" 38 #include "llvm/IR/Module.h" 39 #include "llvm/IR/Type.h" 40 #include "llvm/InitializePasses.h" 41 #include "llvm/Pass.h" 42 #include "llvm/ProfileData/InstrProf.h" 43 #include "llvm/Support/Casting.h" 44 #include "llvm/Support/CommandLine.h" 45 #include "llvm/Support/Error.h" 46 #include "llvm/Support/ErrorHandling.h" 47 #include "llvm/Transforms/Utils/BasicBlockUtils.h" 48 #include "llvm/Transforms/Utils/ModuleUtils.h" 49 #include "llvm/Transforms/Utils/SSAUpdater.h" 50 #include <algorithm> 51 #include <cassert> 52 #include <cstddef> 53 #include <cstdint> 54 #include <string> 55 56 using namespace llvm; 57 58 #define DEBUG_TYPE "instrprof" 59 60 // The start and end values of precise value profile range for memory 61 // intrinsic sizes 62 cl::opt<std::string> MemOPSizeRange( 63 "memop-size-range", 64 cl::desc("Set the range of size in memory intrinsic calls to be profiled " 65 "precisely, in a format of <start_val>:<end_val>"), 66 cl::init("")); 67 68 // The value that considered to be large value in memory intrinsic. 69 cl::opt<unsigned> MemOPSizeLarge( 70 "memop-size-large", 71 cl::desc("Set large value thresthold in memory intrinsic size profiling. " 72 "Value of 0 disables the large value profiling."), 73 cl::init(8192)); 74 75 namespace { 76 77 cl::opt<bool> DoHashBasedCounterSplit( 78 "hash-based-counter-split", 79 cl::desc("Rename counter variable of a comdat function based on cfg hash"), 80 cl::init(true)); 81 82 cl::opt<bool> RuntimeCounterRelocation( 83 "runtime-counter-relocation", 84 cl::desc("Enable relocating counters at runtime."), 85 cl::init(false)); 86 87 cl::opt<bool> ValueProfileStaticAlloc( 88 "vp-static-alloc", 89 cl::desc("Do static counter allocation for value profiler"), 90 cl::init(true)); 91 92 cl::opt<double> NumCountersPerValueSite( 93 "vp-counters-per-site", 94 cl::desc("The average number of profile counters allocated " 95 "per value profiling site."), 96 // This is set to a very small value because in real programs, only 97 // a very small percentage of value sites have non-zero targets, e.g, 1/30. 98 // For those sites with non-zero profile, the average number of targets 99 // is usually smaller than 2. 100 cl::init(1.0)); 101 102 cl::opt<bool> AtomicCounterUpdateAll( 103 "instrprof-atomic-counter-update-all", cl::ZeroOrMore, 104 cl::desc("Make all profile counter updates atomic (for testing only)"), 105 cl::init(false)); 106 107 cl::opt<bool> AtomicCounterUpdatePromoted( 108 "atomic-counter-update-promoted", cl::ZeroOrMore, 109 cl::desc("Do counter update using atomic fetch add " 110 " for promoted counters only"), 111 cl::init(false)); 112 113 // If the option is not specified, the default behavior about whether 114 // counter promotion is done depends on how instrumentaiton lowering 115 // pipeline is setup, i.e., the default value of true of this option 116 // does not mean the promotion will be done by default. Explicitly 117 // setting this option can override the default behavior. 118 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore, 119 cl::desc("Do counter register promotion"), 120 cl::init(false)); 121 cl::opt<unsigned> MaxNumOfPromotionsPerLoop( 122 cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20), 123 cl::desc("Max number counter promotions per loop to avoid" 124 " increasing register pressure too much")); 125 126 // A debug option 127 cl::opt<int> 128 MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1), 129 cl::desc("Max number of allowed counter promotions")); 130 131 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting( 132 cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3), 133 cl::desc("The max number of exiting blocks of a loop to allow " 134 " speculative counter promotion")); 135 136 cl::opt<bool> SpeculativeCounterPromotionToLoop( 137 cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false), 138 cl::desc("When the option is false, if the target block is in a loop, " 139 "the promotion will be disallowed unless the promoted counter " 140 " update can be further/iteratively promoted into an acyclic " 141 " region.")); 142 143 cl::opt<bool> IterativeCounterPromotion( 144 cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true), 145 cl::desc("Allow counter promotion across the whole loop nest.")); 146 147 class InstrProfilingLegacyPass : public ModulePass { 148 InstrProfiling InstrProf; 149 150 public: 151 static char ID; 152 153 InstrProfilingLegacyPass() : ModulePass(ID) {} 154 InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false) 155 : ModulePass(ID), InstrProf(Options, IsCS) {} 156 157 StringRef getPassName() const override { 158 return "Frontend instrumentation-based coverage lowering"; 159 } 160 161 bool runOnModule(Module &M) override { 162 auto GetTLI = [this](Function &F) -> TargetLibraryInfo & { 163 return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F); 164 }; 165 return InstrProf.run(M, GetTLI); 166 } 167 168 void getAnalysisUsage(AnalysisUsage &AU) const override { 169 AU.setPreservesCFG(); 170 AU.addRequired<TargetLibraryInfoWrapperPass>(); 171 } 172 }; 173 174 /// 175 /// A helper class to promote one counter RMW operation in the loop 176 /// into register update. 177 /// 178 /// RWM update for the counter will be sinked out of the loop after 179 /// the transformation. 180 /// 181 class PGOCounterPromoterHelper : public LoadAndStorePromoter { 182 public: 183 PGOCounterPromoterHelper( 184 Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init, 185 BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks, 186 ArrayRef<Instruction *> InsertPts, 187 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands, 188 LoopInfo &LI) 189 : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks), 190 InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) { 191 assert(isa<LoadInst>(L)); 192 assert(isa<StoreInst>(S)); 193 SSA.AddAvailableValue(PH, Init); 194 } 195 196 void doExtraRewritesBeforeFinalDeletion() override { 197 for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) { 198 BasicBlock *ExitBlock = ExitBlocks[i]; 199 Instruction *InsertPos = InsertPts[i]; 200 // Get LiveIn value into the ExitBlock. If there are multiple 201 // predecessors, the value is defined by a PHI node in this 202 // block. 203 Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock); 204 Value *Addr = cast<StoreInst>(Store)->getPointerOperand(); 205 Type *Ty = LiveInValue->getType(); 206 IRBuilder<> Builder(InsertPos); 207 if (AtomicCounterUpdatePromoted) 208 // automic update currently can only be promoted across the current 209 // loop, not the whole loop nest. 210 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue, 211 AtomicOrdering::SequentiallyConsistent); 212 else { 213 LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted"); 214 auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue); 215 auto *NewStore = Builder.CreateStore(NewVal, Addr); 216 217 // Now update the parent loop's candidate list: 218 if (IterativeCounterPromotion) { 219 auto *TargetLoop = LI.getLoopFor(ExitBlock); 220 if (TargetLoop) 221 LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore); 222 } 223 } 224 } 225 } 226 227 private: 228 Instruction *Store; 229 ArrayRef<BasicBlock *> ExitBlocks; 230 ArrayRef<Instruction *> InsertPts; 231 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates; 232 LoopInfo &LI; 233 }; 234 235 /// A helper class to do register promotion for all profile counter 236 /// updates in a loop. 237 /// 238 class PGOCounterPromoter { 239 public: 240 PGOCounterPromoter( 241 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands, 242 Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI) 243 : LoopToCandidates(LoopToCands), ExitBlocks(), InsertPts(), L(CurLoop), 244 LI(LI), BFI(BFI) { 245 246 // Skip collection of ExitBlocks and InsertPts for loops that will not be 247 // able to have counters promoted. 248 SmallVector<BasicBlock *, 8> LoopExitBlocks; 249 SmallPtrSet<BasicBlock *, 8> BlockSet; 250 251 L.getExitBlocks(LoopExitBlocks); 252 if (!isPromotionPossible(&L, LoopExitBlocks)) 253 return; 254 255 for (BasicBlock *ExitBlock : LoopExitBlocks) { 256 if (BlockSet.insert(ExitBlock).second) { 257 ExitBlocks.push_back(ExitBlock); 258 InsertPts.push_back(&*ExitBlock->getFirstInsertionPt()); 259 } 260 } 261 } 262 263 bool run(int64_t *NumPromoted) { 264 // Skip 'infinite' loops: 265 if (ExitBlocks.size() == 0) 266 return false; 267 unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L); 268 if (MaxProm == 0) 269 return false; 270 271 unsigned Promoted = 0; 272 for (auto &Cand : LoopToCandidates[&L]) { 273 274 SmallVector<PHINode *, 4> NewPHIs; 275 SSAUpdater SSA(&NewPHIs); 276 Value *InitVal = ConstantInt::get(Cand.first->getType(), 0); 277 278 // If BFI is set, we will use it to guide the promotions. 279 if (BFI) { 280 auto *BB = Cand.first->getParent(); 281 auto InstrCount = BFI->getBlockProfileCount(BB); 282 if (!InstrCount) 283 continue; 284 auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader()); 285 // If the average loop trip count is not greater than 1.5, we skip 286 // promotion. 287 if (PreheaderCount && 288 (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2)) 289 continue; 290 } 291 292 PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal, 293 L.getLoopPreheader(), ExitBlocks, 294 InsertPts, LoopToCandidates, LI); 295 Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second})); 296 Promoted++; 297 if (Promoted >= MaxProm) 298 break; 299 300 (*NumPromoted)++; 301 if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions) 302 break; 303 } 304 305 LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth=" 306 << L.getLoopDepth() << ")\n"); 307 return Promoted != 0; 308 } 309 310 private: 311 bool allowSpeculativeCounterPromotion(Loop *LP) { 312 SmallVector<BasicBlock *, 8> ExitingBlocks; 313 L.getExitingBlocks(ExitingBlocks); 314 // Not considierered speculative. 315 if (ExitingBlocks.size() == 1) 316 return true; 317 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting) 318 return false; 319 return true; 320 } 321 322 // Check whether the loop satisfies the basic conditions needed to perform 323 // Counter Promotions. 324 bool isPromotionPossible(Loop *LP, 325 const SmallVectorImpl<BasicBlock *> &LoopExitBlocks) { 326 // We can't insert into a catchswitch. 327 if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) { 328 return isa<CatchSwitchInst>(Exit->getTerminator()); 329 })) 330 return false; 331 332 if (!LP->hasDedicatedExits()) 333 return false; 334 335 BasicBlock *PH = LP->getLoopPreheader(); 336 if (!PH) 337 return false; 338 339 return true; 340 } 341 342 // Returns the max number of Counter Promotions for LP. 343 unsigned getMaxNumOfPromotionsInLoop(Loop *LP) { 344 SmallVector<BasicBlock *, 8> LoopExitBlocks; 345 LP->getExitBlocks(LoopExitBlocks); 346 if (!isPromotionPossible(LP, LoopExitBlocks)) 347 return 0; 348 349 SmallVector<BasicBlock *, 8> ExitingBlocks; 350 LP->getExitingBlocks(ExitingBlocks); 351 352 // If BFI is set, we do more aggressive promotions based on BFI. 353 if (BFI) 354 return (unsigned)-1; 355 356 // Not considierered speculative. 357 if (ExitingBlocks.size() == 1) 358 return MaxNumOfPromotionsPerLoop; 359 360 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting) 361 return 0; 362 363 // Whether the target block is in a loop does not matter: 364 if (SpeculativeCounterPromotionToLoop) 365 return MaxNumOfPromotionsPerLoop; 366 367 // Now check the target block: 368 unsigned MaxProm = MaxNumOfPromotionsPerLoop; 369 for (auto *TargetBlock : LoopExitBlocks) { 370 auto *TargetLoop = LI.getLoopFor(TargetBlock); 371 if (!TargetLoop) 372 continue; 373 unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop); 374 unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size(); 375 MaxProm = 376 std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) - 377 PendingCandsInTarget); 378 } 379 return MaxProm; 380 } 381 382 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates; 383 SmallVector<BasicBlock *, 8> ExitBlocks; 384 SmallVector<Instruction *, 8> InsertPts; 385 Loop &L; 386 LoopInfo &LI; 387 BlockFrequencyInfo *BFI; 388 }; 389 390 } // end anonymous namespace 391 392 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) { 393 FunctionAnalysisManager &FAM = 394 AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager(); 395 auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & { 396 return FAM.getResult<TargetLibraryAnalysis>(F); 397 }; 398 if (!run(M, GetTLI)) 399 return PreservedAnalyses::all(); 400 401 return PreservedAnalyses::none(); 402 } 403 404 char InstrProfilingLegacyPass::ID = 0; 405 INITIALIZE_PASS_BEGIN( 406 InstrProfilingLegacyPass, "instrprof", 407 "Frontend instrumentation-based coverage lowering.", false, false) 408 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) 409 INITIALIZE_PASS_END( 410 InstrProfilingLegacyPass, "instrprof", 411 "Frontend instrumentation-based coverage lowering.", false, false) 412 413 ModulePass * 414 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options, 415 bool IsCS) { 416 return new InstrProfilingLegacyPass(Options, IsCS); 417 } 418 419 static InstrProfIncrementInst *castToIncrementInst(Instruction *Instr) { 420 InstrProfIncrementInst *Inc = dyn_cast<InstrProfIncrementInstStep>(Instr); 421 if (Inc) 422 return Inc; 423 return dyn_cast<InstrProfIncrementInst>(Instr); 424 } 425 426 bool InstrProfiling::lowerIntrinsics(Function *F) { 427 bool MadeChange = false; 428 PromotionCandidates.clear(); 429 for (BasicBlock &BB : *F) { 430 for (auto I = BB.begin(), E = BB.end(); I != E;) { 431 auto Instr = I++; 432 InstrProfIncrementInst *Inc = castToIncrementInst(&*Instr); 433 if (Inc) { 434 lowerIncrement(Inc); 435 MadeChange = true; 436 } else if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(Instr)) { 437 lowerValueProfileInst(Ind); 438 MadeChange = true; 439 } 440 } 441 } 442 443 if (!MadeChange) 444 return false; 445 446 promoteCounterLoadStores(F); 447 return true; 448 } 449 450 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const { 451 if (RuntimeCounterRelocation.getNumOccurrences() > 0) 452 return RuntimeCounterRelocation; 453 454 return TT.isOSFuchsia(); 455 } 456 457 bool InstrProfiling::isCounterPromotionEnabled() const { 458 if (DoCounterPromotion.getNumOccurrences() > 0) 459 return DoCounterPromotion; 460 461 return Options.DoCounterPromotion; 462 } 463 464 void InstrProfiling::promoteCounterLoadStores(Function *F) { 465 if (!isCounterPromotionEnabled()) 466 return; 467 468 DominatorTree DT(*F); 469 LoopInfo LI(DT); 470 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates; 471 472 std::unique_ptr<BlockFrequencyInfo> BFI; 473 if (Options.UseBFIInPromotion) { 474 std::unique_ptr<BranchProbabilityInfo> BPI; 475 BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F))); 476 BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI)); 477 } 478 479 for (const auto &LoadStore : PromotionCandidates) { 480 auto *CounterLoad = LoadStore.first; 481 auto *CounterStore = LoadStore.second; 482 BasicBlock *BB = CounterLoad->getParent(); 483 Loop *ParentLoop = LI.getLoopFor(BB); 484 if (!ParentLoop) 485 continue; 486 LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore); 487 } 488 489 SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder(); 490 491 // Do a post-order traversal of the loops so that counter updates can be 492 // iteratively hoisted outside the loop nest. 493 for (auto *Loop : llvm::reverse(Loops)) { 494 PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get()); 495 Promoter.run(&TotalCountersPromoted); 496 } 497 } 498 499 /// Check if the module contains uses of any profiling intrinsics. 500 static bool containsProfilingIntrinsics(Module &M) { 501 if (auto *F = M.getFunction( 502 Intrinsic::getName(llvm::Intrinsic::instrprof_increment))) 503 if (!F->use_empty()) 504 return true; 505 if (auto *F = M.getFunction( 506 Intrinsic::getName(llvm::Intrinsic::instrprof_increment_step))) 507 if (!F->use_empty()) 508 return true; 509 if (auto *F = M.getFunction( 510 Intrinsic::getName(llvm::Intrinsic::instrprof_value_profile))) 511 if (!F->use_empty()) 512 return true; 513 return false; 514 } 515 516 bool InstrProfiling::run( 517 Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) { 518 this->M = &M; 519 this->GetTLI = std::move(GetTLI); 520 NamesVar = nullptr; 521 NamesSize = 0; 522 ProfileDataMap.clear(); 523 UsedVars.clear(); 524 getMemOPSizeRangeFromOption(MemOPSizeRange, MemOPSizeRangeStart, 525 MemOPSizeRangeLast); 526 TT = Triple(M.getTargetTriple()); 527 528 // Emit the runtime hook even if no counters are present. 529 bool MadeChange = emitRuntimeHook(); 530 531 // Improve compile time by avoiding linear scans when there is no work. 532 GlobalVariable *CoverageNamesVar = 533 M.getNamedGlobal(getCoverageUnusedNamesVarName()); 534 if (!containsProfilingIntrinsics(M) && !CoverageNamesVar) 535 return MadeChange; 536 537 // We did not know how many value sites there would be inside 538 // the instrumented function. This is counting the number of instrumented 539 // target value sites to enter it as field in the profile data variable. 540 for (Function &F : M) { 541 InstrProfIncrementInst *FirstProfIncInst = nullptr; 542 for (BasicBlock &BB : F) 543 for (auto I = BB.begin(), E = BB.end(); I != E; I++) 544 if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I)) 545 computeNumValueSiteCounts(Ind); 546 else if (FirstProfIncInst == nullptr) 547 FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I); 548 549 // Value profiling intrinsic lowering requires per-function profile data 550 // variable to be created first. 551 if (FirstProfIncInst != nullptr) 552 static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst)); 553 } 554 555 for (Function &F : M) 556 MadeChange |= lowerIntrinsics(&F); 557 558 if (CoverageNamesVar) { 559 lowerCoverageData(CoverageNamesVar); 560 MadeChange = true; 561 } 562 563 if (!MadeChange) 564 return false; 565 566 emitVNodes(); 567 emitNameData(); 568 emitRegistration(); 569 emitUses(); 570 emitInitialization(); 571 return true; 572 } 573 574 static FunctionCallee 575 getOrInsertValueProfilingCall(Module &M, const TargetLibraryInfo &TLI, 576 bool IsRange = false) { 577 LLVMContext &Ctx = M.getContext(); 578 auto *ReturnTy = Type::getVoidTy(M.getContext()); 579 580 AttributeList AL; 581 if (auto AK = TLI.getExtAttrForI32Param(false)) 582 AL = AL.addParamAttribute(M.getContext(), 2, AK); 583 584 if (!IsRange) { 585 Type *ParamTypes[] = { 586 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType 587 #include "llvm/ProfileData/InstrProfData.inc" 588 }; 589 auto *ValueProfilingCallTy = 590 FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false); 591 return M.getOrInsertFunction(getInstrProfValueProfFuncName(), 592 ValueProfilingCallTy, AL); 593 } else { 594 Type *RangeParamTypes[] = { 595 #define VALUE_RANGE_PROF 1 596 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType 597 #include "llvm/ProfileData/InstrProfData.inc" 598 #undef VALUE_RANGE_PROF 599 }; 600 auto *ValueRangeProfilingCallTy = 601 FunctionType::get(ReturnTy, makeArrayRef(RangeParamTypes), false); 602 return M.getOrInsertFunction(getInstrProfValueRangeProfFuncName(), 603 ValueRangeProfilingCallTy, AL); 604 } 605 } 606 607 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) { 608 GlobalVariable *Name = Ind->getName(); 609 uint64_t ValueKind = Ind->getValueKind()->getZExtValue(); 610 uint64_t Index = Ind->getIndex()->getZExtValue(); 611 auto It = ProfileDataMap.find(Name); 612 if (It == ProfileDataMap.end()) { 613 PerFunctionProfileData PD; 614 PD.NumValueSites[ValueKind] = Index + 1; 615 ProfileDataMap[Name] = PD; 616 } else if (It->second.NumValueSites[ValueKind] <= Index) 617 It->second.NumValueSites[ValueKind] = Index + 1; 618 } 619 620 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) { 621 GlobalVariable *Name = Ind->getName(); 622 auto It = ProfileDataMap.find(Name); 623 assert(It != ProfileDataMap.end() && It->second.DataVar && 624 "value profiling detected in function with no counter incerement"); 625 626 GlobalVariable *DataVar = It->second.DataVar; 627 uint64_t ValueKind = Ind->getValueKind()->getZExtValue(); 628 uint64_t Index = Ind->getIndex()->getZExtValue(); 629 for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind) 630 Index += It->second.NumValueSites[Kind]; 631 632 IRBuilder<> Builder(Ind); 633 bool IsRange = (Ind->getValueKind()->getZExtValue() == 634 llvm::InstrProfValueKind::IPVK_MemOPSize); 635 CallInst *Call = nullptr; 636 auto *TLI = &GetTLI(*Ind->getFunction()); 637 638 // To support value profiling calls within Windows exception handlers, funclet 639 // information contained within operand bundles needs to be copied over to 640 // the library call. This is required for the IR to be processed by the 641 // WinEHPrepare pass. 642 SmallVector<OperandBundleDef, 1> OpBundles; 643 Ind->getOperandBundlesAsDefs(OpBundles); 644 if (!IsRange) { 645 Value *Args[3] = {Ind->getTargetValue(), 646 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()), 647 Builder.getInt32(Index)}; 648 Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args, 649 OpBundles); 650 } else { 651 Value *Args[6] = { 652 Ind->getTargetValue(), 653 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()), 654 Builder.getInt32(Index), 655 Builder.getInt64(MemOPSizeRangeStart), 656 Builder.getInt64(MemOPSizeRangeLast), 657 Builder.getInt64(MemOPSizeLarge == 0 ? INT64_MIN : MemOPSizeLarge)}; 658 Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI, true), 659 Args, OpBundles); 660 } 661 if (auto AK = TLI->getExtAttrForI32Param(false)) 662 Call->addParamAttr(2, AK); 663 Ind->replaceAllUsesWith(Call); 664 Ind->eraseFromParent(); 665 } 666 667 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) { 668 GlobalVariable *Counters = getOrCreateRegionCounters(Inc); 669 670 IRBuilder<> Builder(Inc); 671 uint64_t Index = Inc->getIndex()->getZExtValue(); 672 Value *Addr = Builder.CreateConstInBoundsGEP2_64(Counters->getValueType(), 673 Counters, 0, Index); 674 675 if (isRuntimeCounterRelocationEnabled()) { 676 Type *Int64Ty = Type::getInt64Ty(M->getContext()); 677 Type *Int64PtrTy = Type::getInt64PtrTy(M->getContext()); 678 Function *Fn = Inc->getParent()->getParent(); 679 Instruction &I = Fn->getEntryBlock().front(); 680 LoadInst *LI = dyn_cast<LoadInst>(&I); 681 if (!LI) { 682 IRBuilder<> Builder(&I); 683 Type *Int64Ty = Type::getInt64Ty(M->getContext()); 684 GlobalVariable *Bias = M->getGlobalVariable(getInstrProfCounterBiasVarName()); 685 if (!Bias) { 686 Bias = new GlobalVariable(*M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage, 687 Constant::getNullValue(Int64Ty), 688 getInstrProfCounterBiasVarName()); 689 Bias->setVisibility(GlobalVariable::HiddenVisibility); 690 } 691 LI = Builder.CreateLoad(Int64Ty, Bias); 692 } 693 auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), LI); 694 Addr = Builder.CreateIntToPtr(Add, Int64PtrTy); 695 } 696 697 if (Options.Atomic || AtomicCounterUpdateAll) { 698 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(), 699 AtomicOrdering::Monotonic); 700 } else { 701 Value *IncStep = Inc->getStep(); 702 Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount"); 703 auto *Count = Builder.CreateAdd(Load, Inc->getStep()); 704 auto *Store = Builder.CreateStore(Count, Addr); 705 if (isCounterPromotionEnabled()) 706 PromotionCandidates.emplace_back(cast<Instruction>(Load), Store); 707 } 708 Inc->eraseFromParent(); 709 } 710 711 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) { 712 ConstantArray *Names = 713 cast<ConstantArray>(CoverageNamesVar->getInitializer()); 714 for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) { 715 Constant *NC = Names->getOperand(I); 716 Value *V = NC->stripPointerCasts(); 717 assert(isa<GlobalVariable>(V) && "Missing reference to function name"); 718 GlobalVariable *Name = cast<GlobalVariable>(V); 719 720 Name->setLinkage(GlobalValue::PrivateLinkage); 721 ReferencedNames.push_back(Name); 722 NC->dropAllReferences(); 723 } 724 CoverageNamesVar->eraseFromParent(); 725 } 726 727 /// Get the name of a profiling variable for a particular function. 728 static std::string getVarName(InstrProfIncrementInst *Inc, StringRef Prefix) { 729 StringRef NamePrefix = getInstrProfNameVarPrefix(); 730 StringRef Name = Inc->getName()->getName().substr(NamePrefix.size()); 731 Function *F = Inc->getParent()->getParent(); 732 Module *M = F->getParent(); 733 if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) || 734 !canRenameComdatFunc(*F)) 735 return (Prefix + Name).str(); 736 uint64_t FuncHash = Inc->getHash()->getZExtValue(); 737 SmallVector<char, 24> HashPostfix; 738 if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix))) 739 return (Prefix + Name).str(); 740 return (Prefix + Name + "." + Twine(FuncHash)).str(); 741 } 742 743 static inline bool shouldRecordFunctionAddr(Function *F) { 744 // Check the linkage 745 bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage(); 746 if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() && 747 !HasAvailableExternallyLinkage) 748 return true; 749 750 // A function marked 'alwaysinline' with available_externally linkage can't 751 // have its address taken. Doing so would create an undefined external ref to 752 // the function, which would fail to link. 753 if (HasAvailableExternallyLinkage && 754 F->hasFnAttribute(Attribute::AlwaysInline)) 755 return false; 756 757 // Prohibit function address recording if the function is both internal and 758 // COMDAT. This avoids the profile data variable referencing internal symbols 759 // in COMDAT. 760 if (F->hasLocalLinkage() && F->hasComdat()) 761 return false; 762 763 // Check uses of this function for other than direct calls or invokes to it. 764 // Inline virtual functions have linkeOnceODR linkage. When a key method 765 // exists, the vtable will only be emitted in the TU where the key method 766 // is defined. In a TU where vtable is not available, the function won't 767 // be 'addresstaken'. If its address is not recorded here, the profile data 768 // with missing address may be picked by the linker leading to missing 769 // indirect call target info. 770 return F->hasAddressTaken() || F->hasLinkOnceLinkage(); 771 } 772 773 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) { 774 // Don't do this for Darwin. compiler-rt uses linker magic. 775 if (TT.isOSDarwin()) 776 return false; 777 // Use linker script magic to get data/cnts/name start/end. 778 if (TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() || 779 TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4CPU() || 780 TT.isOSWindows()) 781 return false; 782 783 return true; 784 } 785 786 GlobalVariable * 787 InstrProfiling::getOrCreateRegionCounters(InstrProfIncrementInst *Inc) { 788 GlobalVariable *NamePtr = Inc->getName(); 789 auto It = ProfileDataMap.find(NamePtr); 790 PerFunctionProfileData PD; 791 if (It != ProfileDataMap.end()) { 792 if (It->second.RegionCounters) 793 return It->second.RegionCounters; 794 PD = It->second; 795 } 796 797 // Match the linkage and visibility of the name global. COFF supports using 798 // comdats with internal symbols, so do that if we can. 799 Function *Fn = Inc->getParent()->getParent(); 800 GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage(); 801 GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility(); 802 if (TT.isOSBinFormatCOFF()) { 803 Linkage = GlobalValue::InternalLinkage; 804 Visibility = GlobalValue::DefaultVisibility; 805 } 806 807 // Move the name variable to the right section. Place them in a COMDAT group 808 // if the associated function is a COMDAT. This will make sure that only one 809 // copy of counters of the COMDAT function will be emitted after linking. Keep 810 // in mind that this pass may run before the inliner, so we need to create a 811 // new comdat group for the counters and profiling data. If we use the comdat 812 // of the parent function, that will result in relocations against discarded 813 // sections. 814 bool NeedComdat = needsComdatForCounter(*Fn, *M); 815 if (NeedComdat) { 816 if (TT.isOSBinFormatCOFF()) { 817 // For COFF, put the counters, data, and values each into their own 818 // comdats. We can't use a group because the Visual C++ linker will 819 // report duplicate symbol errors if there are multiple external symbols 820 // with the same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE. 821 Linkage = GlobalValue::LinkOnceODRLinkage; 822 Visibility = GlobalValue::HiddenVisibility; 823 } 824 } 825 auto MaybeSetComdat = [=](GlobalVariable *GV) { 826 if (NeedComdat) 827 GV->setComdat(M->getOrInsertComdat(GV->getName())); 828 }; 829 830 uint64_t NumCounters = Inc->getNumCounters()->getZExtValue(); 831 LLVMContext &Ctx = M->getContext(); 832 ArrayType *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters); 833 834 // Create the counters variable. 835 auto *CounterPtr = 836 new GlobalVariable(*M, CounterTy, false, Linkage, 837 Constant::getNullValue(CounterTy), 838 getVarName(Inc, getInstrProfCountersVarPrefix())); 839 CounterPtr->setVisibility(Visibility); 840 CounterPtr->setSection( 841 getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat())); 842 CounterPtr->setAlignment(Align(8)); 843 MaybeSetComdat(CounterPtr); 844 CounterPtr->setLinkage(Linkage); 845 846 auto *Int8PtrTy = Type::getInt8PtrTy(Ctx); 847 // Allocate statically the array of pointers to value profile nodes for 848 // the current function. 849 Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy); 850 if (ValueProfileStaticAlloc && !needsRuntimeRegistrationOfSectionRange(TT)) { 851 uint64_t NS = 0; 852 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 853 NS += PD.NumValueSites[Kind]; 854 if (NS) { 855 ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS); 856 857 auto *ValuesVar = 858 new GlobalVariable(*M, ValuesTy, false, Linkage, 859 Constant::getNullValue(ValuesTy), 860 getVarName(Inc, getInstrProfValuesVarPrefix())); 861 ValuesVar->setVisibility(Visibility); 862 ValuesVar->setSection( 863 getInstrProfSectionName(IPSK_vals, TT.getObjectFormat())); 864 ValuesVar->setAlignment(Align(8)); 865 MaybeSetComdat(ValuesVar); 866 ValuesPtrExpr = 867 ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx)); 868 } 869 } 870 871 // Create data variable. 872 auto *Int16Ty = Type::getInt16Ty(Ctx); 873 auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1); 874 Type *DataTypes[] = { 875 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType, 876 #include "llvm/ProfileData/InstrProfData.inc" 877 }; 878 auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes)); 879 880 Constant *FunctionAddr = shouldRecordFunctionAddr(Fn) 881 ? ConstantExpr::getBitCast(Fn, Int8PtrTy) 882 : ConstantPointerNull::get(Int8PtrTy); 883 884 Constant *Int16ArrayVals[IPVK_Last + 1]; 885 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 886 Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]); 887 888 Constant *DataVals[] = { 889 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init, 890 #include "llvm/ProfileData/InstrProfData.inc" 891 }; 892 auto *Data = new GlobalVariable(*M, DataTy, false, Linkage, 893 ConstantStruct::get(DataTy, DataVals), 894 getVarName(Inc, getInstrProfDataVarPrefix())); 895 Data->setVisibility(Visibility); 896 Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat())); 897 Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT)); 898 MaybeSetComdat(Data); 899 Data->setLinkage(Linkage); 900 901 PD.RegionCounters = CounterPtr; 902 PD.DataVar = Data; 903 ProfileDataMap[NamePtr] = PD; 904 905 // Mark the data variable as used so that it isn't stripped out. 906 UsedVars.push_back(Data); 907 // Now that the linkage set by the FE has been passed to the data and counter 908 // variables, reset Name variable's linkage and visibility to private so that 909 // it can be removed later by the compiler. 910 NamePtr->setLinkage(GlobalValue::PrivateLinkage); 911 // Collect the referenced names to be used by emitNameData. 912 ReferencedNames.push_back(NamePtr); 913 914 return CounterPtr; 915 } 916 917 void InstrProfiling::emitVNodes() { 918 if (!ValueProfileStaticAlloc) 919 return; 920 921 // For now only support this on platforms that do 922 // not require runtime registration to discover 923 // named section start/end. 924 if (needsRuntimeRegistrationOfSectionRange(TT)) 925 return; 926 927 size_t TotalNS = 0; 928 for (auto &PD : ProfileDataMap) { 929 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 930 TotalNS += PD.second.NumValueSites[Kind]; 931 } 932 933 if (!TotalNS) 934 return; 935 936 uint64_t NumCounters = TotalNS * NumCountersPerValueSite; 937 // Heuristic for small programs with very few total value sites. 938 // The default value of vp-counters-per-site is chosen based on 939 // the observation that large apps usually have a low percentage 940 // of value sites that actually have any profile data, and thus 941 // the average number of counters per site is low. For small 942 // apps with very few sites, this may not be true. Bump up the 943 // number of counters in this case. 944 #define INSTR_PROF_MIN_VAL_COUNTS 10 945 if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS) 946 NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2); 947 948 auto &Ctx = M->getContext(); 949 Type *VNodeTypes[] = { 950 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType, 951 #include "llvm/ProfileData/InstrProfData.inc" 952 }; 953 auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes)); 954 955 ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters); 956 auto *VNodesVar = new GlobalVariable( 957 *M, VNodesTy, false, GlobalValue::PrivateLinkage, 958 Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName()); 959 VNodesVar->setSection( 960 getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat())); 961 UsedVars.push_back(VNodesVar); 962 } 963 964 void InstrProfiling::emitNameData() { 965 std::string UncompressedData; 966 967 if (ReferencedNames.empty()) 968 return; 969 970 std::string CompressedNameStr; 971 if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr, 972 DoInstrProfNameCompression)) { 973 report_fatal_error(toString(std::move(E)), false); 974 } 975 976 auto &Ctx = M->getContext(); 977 auto *NamesVal = ConstantDataArray::getString( 978 Ctx, StringRef(CompressedNameStr), false); 979 NamesVar = new GlobalVariable(*M, NamesVal->getType(), true, 980 GlobalValue::PrivateLinkage, NamesVal, 981 getInstrProfNamesVarName()); 982 NamesSize = CompressedNameStr.size(); 983 NamesVar->setSection( 984 getInstrProfSectionName(IPSK_name, TT.getObjectFormat())); 985 // On COFF, it's important to reduce the alignment down to 1 to prevent the 986 // linker from inserting padding before the start of the names section or 987 // between names entries. 988 NamesVar->setAlignment(Align(1)); 989 UsedVars.push_back(NamesVar); 990 991 for (auto *NamePtr : ReferencedNames) 992 NamePtr->eraseFromParent(); 993 } 994 995 void InstrProfiling::emitRegistration() { 996 if (!needsRuntimeRegistrationOfSectionRange(TT)) 997 return; 998 999 // Construct the function. 1000 auto *VoidTy = Type::getVoidTy(M->getContext()); 1001 auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext()); 1002 auto *Int64Ty = Type::getInt64Ty(M->getContext()); 1003 auto *RegisterFTy = FunctionType::get(VoidTy, false); 1004 auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage, 1005 getInstrProfRegFuncsName(), M); 1006 RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); 1007 if (Options.NoRedZone) 1008 RegisterF->addFnAttr(Attribute::NoRedZone); 1009 1010 auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false); 1011 auto *RuntimeRegisterF = 1012 Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage, 1013 getInstrProfRegFuncName(), M); 1014 1015 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF)); 1016 for (Value *Data : UsedVars) 1017 if (Data != NamesVar && !isa<Function>(Data)) 1018 IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy)); 1019 1020 if (NamesVar) { 1021 Type *ParamTypes[] = {VoidPtrTy, Int64Ty}; 1022 auto *NamesRegisterTy = 1023 FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false); 1024 auto *NamesRegisterF = 1025 Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage, 1026 getInstrProfNamesRegFuncName(), M); 1027 IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy), 1028 IRB.getInt64(NamesSize)}); 1029 } 1030 1031 IRB.CreateRetVoid(); 1032 } 1033 1034 bool InstrProfiling::emitRuntimeHook() { 1035 // We expect the linker to be invoked with -u<hook_var> flag for linux, 1036 // for which case there is no need to emit the user function. 1037 if (TT.isOSLinux()) 1038 return false; 1039 1040 // If the module's provided its own runtime, we don't need to do anything. 1041 if (M->getGlobalVariable(getInstrProfRuntimeHookVarName())) 1042 return false; 1043 1044 // Declare an external variable that will pull in the runtime initialization. 1045 auto *Int32Ty = Type::getInt32Ty(M->getContext()); 1046 auto *Var = 1047 new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage, 1048 nullptr, getInstrProfRuntimeHookVarName()); 1049 1050 // Make a function that uses it. 1051 auto *User = Function::Create(FunctionType::get(Int32Ty, false), 1052 GlobalValue::LinkOnceODRLinkage, 1053 getInstrProfRuntimeHookVarUseFuncName(), M); 1054 User->addFnAttr(Attribute::NoInline); 1055 if (Options.NoRedZone) 1056 User->addFnAttr(Attribute::NoRedZone); 1057 User->setVisibility(GlobalValue::HiddenVisibility); 1058 if (TT.supportsCOMDAT()) 1059 User->setComdat(M->getOrInsertComdat(User->getName())); 1060 1061 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User)); 1062 auto *Load = IRB.CreateLoad(Int32Ty, Var); 1063 IRB.CreateRet(Load); 1064 1065 // Mark the user variable as used so that it isn't stripped out. 1066 UsedVars.push_back(User); 1067 return true; 1068 } 1069 1070 void InstrProfiling::emitUses() { 1071 if (!UsedVars.empty()) 1072 appendToUsed(*M, UsedVars); 1073 } 1074 1075 void InstrProfiling::emitInitialization() { 1076 // Create ProfileFileName variable. Don't don't this for the 1077 // context-sensitive instrumentation lowering: This lowering is after 1078 // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should 1079 // have already create the variable before LTO/ThinLTO linking. 1080 if (!IsCS) 1081 createProfileFileNameVar(*M, Options.InstrProfileOutput); 1082 Function *RegisterF = M->getFunction(getInstrProfRegFuncsName()); 1083 if (!RegisterF) 1084 return; 1085 1086 // Create the initialization function. 1087 auto *VoidTy = Type::getVoidTy(M->getContext()); 1088 auto *F = Function::Create(FunctionType::get(VoidTy, false), 1089 GlobalValue::InternalLinkage, 1090 getInstrProfInitFuncName(), M); 1091 F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); 1092 F->addFnAttr(Attribute::NoInline); 1093 if (Options.NoRedZone) 1094 F->addFnAttr(Attribute::NoRedZone); 1095 1096 // Add the basic block and the necessary calls. 1097 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F)); 1098 IRB.CreateCall(RegisterF, {}); 1099 IRB.CreateRetVoid(); 1100 1101 appendToGlobalCtors(*M, F, 0); 1102 } 1103