1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling. 10 // It also builds the data structures and initialization code needed for 11 // updating execution counts and emitting the profile at runtime. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h" 16 #include "llvm/ADT/ArrayRef.h" 17 #include "llvm/ADT/SmallVector.h" 18 #include "llvm/ADT/StringRef.h" 19 #include "llvm/ADT/Triple.h" 20 #include "llvm/ADT/Twine.h" 21 #include "llvm/Analysis/BlockFrequencyInfo.h" 22 #include "llvm/Analysis/BranchProbabilityInfo.h" 23 #include "llvm/Analysis/LoopInfo.h" 24 #include "llvm/Analysis/TargetLibraryInfo.h" 25 #include "llvm/IR/Attributes.h" 26 #include "llvm/IR/BasicBlock.h" 27 #include "llvm/IR/Constant.h" 28 #include "llvm/IR/Constants.h" 29 #include "llvm/IR/DIBuilder.h" 30 #include "llvm/IR/DerivedTypes.h" 31 #include "llvm/IR/DiagnosticInfo.h" 32 #include "llvm/IR/Dominators.h" 33 #include "llvm/IR/Function.h" 34 #include "llvm/IR/GlobalValue.h" 35 #include "llvm/IR/GlobalVariable.h" 36 #include "llvm/IR/IRBuilder.h" 37 #include "llvm/IR/Instruction.h" 38 #include "llvm/IR/Instructions.h" 39 #include "llvm/IR/IntrinsicInst.h" 40 #include "llvm/IR/Module.h" 41 #include "llvm/IR/Type.h" 42 #include "llvm/InitializePasses.h" 43 #include "llvm/Pass.h" 44 #include "llvm/ProfileData/InstrProf.h" 45 #include "llvm/ProfileData/InstrProfCorrelator.h" 46 #include "llvm/Support/Casting.h" 47 #include "llvm/Support/CommandLine.h" 48 #include "llvm/Support/Error.h" 49 #include "llvm/Support/ErrorHandling.h" 50 #include "llvm/Transforms/Utils/ModuleUtils.h" 51 #include "llvm/Transforms/Utils/SSAUpdater.h" 52 #include <algorithm> 53 #include <cassert> 54 #include <cstdint> 55 #include <string> 56 57 using namespace llvm; 58 59 #define DEBUG_TYPE "instrprof" 60 61 namespace llvm { 62 cl::opt<bool> 63 DebugInfoCorrelate("debug-info-correlate", cl::ZeroOrMore, 64 cl::desc("Use debug info to correlate profiles."), 65 cl::init(false)); 66 } // namespace llvm 67 68 namespace { 69 70 cl::opt<bool> DoHashBasedCounterSplit( 71 "hash-based-counter-split", 72 cl::desc("Rename counter variable of a comdat function based on cfg hash"), 73 cl::init(true)); 74 75 cl::opt<bool> 76 RuntimeCounterRelocation("runtime-counter-relocation", 77 cl::desc("Enable relocating counters at runtime."), 78 cl::init(false)); 79 80 cl::opt<bool> ValueProfileStaticAlloc( 81 "vp-static-alloc", 82 cl::desc("Do static counter allocation for value profiler"), 83 cl::init(true)); 84 85 cl::opt<double> NumCountersPerValueSite( 86 "vp-counters-per-site", 87 cl::desc("The average number of profile counters allocated " 88 "per value profiling site."), 89 // This is set to a very small value because in real programs, only 90 // a very small percentage of value sites have non-zero targets, e.g, 1/30. 91 // For those sites with non-zero profile, the average number of targets 92 // is usually smaller than 2. 93 cl::init(1.0)); 94 95 cl::opt<bool> AtomicCounterUpdateAll( 96 "instrprof-atomic-counter-update-all", cl::ZeroOrMore, 97 cl::desc("Make all profile counter updates atomic (for testing only)"), 98 cl::init(false)); 99 100 cl::opt<bool> AtomicCounterUpdatePromoted( 101 "atomic-counter-update-promoted", cl::ZeroOrMore, 102 cl::desc("Do counter update using atomic fetch add " 103 " for promoted counters only"), 104 cl::init(false)); 105 106 cl::opt<bool> AtomicFirstCounter( 107 "atomic-first-counter", cl::ZeroOrMore, 108 cl::desc("Use atomic fetch add for first counter in a function (usually " 109 "the entry counter)"), 110 cl::init(false)); 111 112 // If the option is not specified, the default behavior about whether 113 // counter promotion is done depends on how instrumentaiton lowering 114 // pipeline is setup, i.e., the default value of true of this option 115 // does not mean the promotion will be done by default. Explicitly 116 // setting this option can override the default behavior. 117 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore, 118 cl::desc("Do counter register promotion"), 119 cl::init(false)); 120 cl::opt<unsigned> MaxNumOfPromotionsPerLoop( 121 cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20), 122 cl::desc("Max number counter promotions per loop to avoid" 123 " increasing register pressure too much")); 124 125 // A debug option 126 cl::opt<int> 127 MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1), 128 cl::desc("Max number of allowed counter promotions")); 129 130 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting( 131 cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3), 132 cl::desc("The max number of exiting blocks of a loop to allow " 133 " speculative counter promotion")); 134 135 cl::opt<bool> SpeculativeCounterPromotionToLoop( 136 cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false), 137 cl::desc("When the option is false, if the target block is in a loop, " 138 "the promotion will be disallowed unless the promoted counter " 139 " update can be further/iteratively promoted into an acyclic " 140 " region.")); 141 142 cl::opt<bool> IterativeCounterPromotion( 143 cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true), 144 cl::desc("Allow counter promotion across the whole loop nest.")); 145 146 cl::opt<bool> SkipRetExitBlock( 147 cl::ZeroOrMore, "skip-ret-exit-block", cl::init(true), 148 cl::desc("Suppress counter promotion if exit blocks contain ret.")); 149 150 class InstrProfilingLegacyPass : public ModulePass { 151 InstrProfiling InstrProf; 152 153 public: 154 static char ID; 155 156 InstrProfilingLegacyPass() : ModulePass(ID) {} 157 InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false) 158 : ModulePass(ID), InstrProf(Options, IsCS) { 159 initializeInstrProfilingLegacyPassPass(*PassRegistry::getPassRegistry()); 160 } 161 162 StringRef getPassName() const override { 163 return "Frontend instrumentation-based coverage lowering"; 164 } 165 166 bool runOnModule(Module &M) override { 167 auto GetTLI = [this](Function &F) -> TargetLibraryInfo & { 168 return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F); 169 }; 170 return InstrProf.run(M, GetTLI); 171 } 172 173 void getAnalysisUsage(AnalysisUsage &AU) const override { 174 AU.setPreservesCFG(); 175 AU.addRequired<TargetLibraryInfoWrapperPass>(); 176 } 177 }; 178 179 /// 180 /// A helper class to promote one counter RMW operation in the loop 181 /// into register update. 182 /// 183 /// RWM update for the counter will be sinked out of the loop after 184 /// the transformation. 185 /// 186 class PGOCounterPromoterHelper : public LoadAndStorePromoter { 187 public: 188 PGOCounterPromoterHelper( 189 Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init, 190 BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks, 191 ArrayRef<Instruction *> InsertPts, 192 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands, 193 LoopInfo &LI) 194 : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks), 195 InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) { 196 assert(isa<LoadInst>(L)); 197 assert(isa<StoreInst>(S)); 198 SSA.AddAvailableValue(PH, Init); 199 } 200 201 void doExtraRewritesBeforeFinalDeletion() override { 202 for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) { 203 BasicBlock *ExitBlock = ExitBlocks[i]; 204 Instruction *InsertPos = InsertPts[i]; 205 // Get LiveIn value into the ExitBlock. If there are multiple 206 // predecessors, the value is defined by a PHI node in this 207 // block. 208 Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock); 209 Value *Addr = cast<StoreInst>(Store)->getPointerOperand(); 210 Type *Ty = LiveInValue->getType(); 211 IRBuilder<> Builder(InsertPos); 212 if (AtomicCounterUpdatePromoted) 213 // automic update currently can only be promoted across the current 214 // loop, not the whole loop nest. 215 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue, 216 MaybeAlign(), 217 AtomicOrdering::SequentiallyConsistent); 218 else { 219 LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted"); 220 auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue); 221 auto *NewStore = Builder.CreateStore(NewVal, Addr); 222 223 // Now update the parent loop's candidate list: 224 if (IterativeCounterPromotion) { 225 auto *TargetLoop = LI.getLoopFor(ExitBlock); 226 if (TargetLoop) 227 LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore); 228 } 229 } 230 } 231 } 232 233 private: 234 Instruction *Store; 235 ArrayRef<BasicBlock *> ExitBlocks; 236 ArrayRef<Instruction *> InsertPts; 237 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates; 238 LoopInfo &LI; 239 }; 240 241 /// A helper class to do register promotion for all profile counter 242 /// updates in a loop. 243 /// 244 class PGOCounterPromoter { 245 public: 246 PGOCounterPromoter( 247 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands, 248 Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI) 249 : LoopToCandidates(LoopToCands), L(CurLoop), LI(LI), BFI(BFI) { 250 251 // Skip collection of ExitBlocks and InsertPts for loops that will not be 252 // able to have counters promoted. 253 SmallVector<BasicBlock *, 8> LoopExitBlocks; 254 SmallPtrSet<BasicBlock *, 8> BlockSet; 255 256 L.getExitBlocks(LoopExitBlocks); 257 if (!isPromotionPossible(&L, LoopExitBlocks)) 258 return; 259 260 for (BasicBlock *ExitBlock : LoopExitBlocks) { 261 if (BlockSet.insert(ExitBlock).second) { 262 ExitBlocks.push_back(ExitBlock); 263 InsertPts.push_back(&*ExitBlock->getFirstInsertionPt()); 264 } 265 } 266 } 267 268 bool run(int64_t *NumPromoted) { 269 // Skip 'infinite' loops: 270 if (ExitBlocks.size() == 0) 271 return false; 272 273 // Skip if any of the ExitBlocks contains a ret instruction. 274 // This is to prevent dumping of incomplete profile -- if the 275 // the loop is a long running loop and dump is called in the middle 276 // of the loop, the result profile is incomplete. 277 // FIXME: add other heuristics to detect long running loops. 278 if (SkipRetExitBlock) { 279 for (auto BB : ExitBlocks) 280 if (isa<ReturnInst>(BB->getTerminator())) 281 return false; 282 } 283 284 unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L); 285 if (MaxProm == 0) 286 return false; 287 288 unsigned Promoted = 0; 289 for (auto &Cand : LoopToCandidates[&L]) { 290 291 SmallVector<PHINode *, 4> NewPHIs; 292 SSAUpdater SSA(&NewPHIs); 293 Value *InitVal = ConstantInt::get(Cand.first->getType(), 0); 294 295 // If BFI is set, we will use it to guide the promotions. 296 if (BFI) { 297 auto *BB = Cand.first->getParent(); 298 auto InstrCount = BFI->getBlockProfileCount(BB); 299 if (!InstrCount) 300 continue; 301 auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader()); 302 // If the average loop trip count is not greater than 1.5, we skip 303 // promotion. 304 if (PreheaderCount && 305 (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2)) 306 continue; 307 } 308 309 PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal, 310 L.getLoopPreheader(), ExitBlocks, 311 InsertPts, LoopToCandidates, LI); 312 Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second})); 313 Promoted++; 314 if (Promoted >= MaxProm) 315 break; 316 317 (*NumPromoted)++; 318 if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions) 319 break; 320 } 321 322 LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth=" 323 << L.getLoopDepth() << ")\n"); 324 return Promoted != 0; 325 } 326 327 private: 328 bool allowSpeculativeCounterPromotion(Loop *LP) { 329 SmallVector<BasicBlock *, 8> ExitingBlocks; 330 L.getExitingBlocks(ExitingBlocks); 331 // Not considierered speculative. 332 if (ExitingBlocks.size() == 1) 333 return true; 334 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting) 335 return false; 336 return true; 337 } 338 339 // Check whether the loop satisfies the basic conditions needed to perform 340 // Counter Promotions. 341 bool 342 isPromotionPossible(Loop *LP, 343 const SmallVectorImpl<BasicBlock *> &LoopExitBlocks) { 344 // We can't insert into a catchswitch. 345 if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) { 346 return isa<CatchSwitchInst>(Exit->getTerminator()); 347 })) 348 return false; 349 350 if (!LP->hasDedicatedExits()) 351 return false; 352 353 BasicBlock *PH = LP->getLoopPreheader(); 354 if (!PH) 355 return false; 356 357 return true; 358 } 359 360 // Returns the max number of Counter Promotions for LP. 361 unsigned getMaxNumOfPromotionsInLoop(Loop *LP) { 362 SmallVector<BasicBlock *, 8> LoopExitBlocks; 363 LP->getExitBlocks(LoopExitBlocks); 364 if (!isPromotionPossible(LP, LoopExitBlocks)) 365 return 0; 366 367 SmallVector<BasicBlock *, 8> ExitingBlocks; 368 LP->getExitingBlocks(ExitingBlocks); 369 370 // If BFI is set, we do more aggressive promotions based on BFI. 371 if (BFI) 372 return (unsigned)-1; 373 374 // Not considierered speculative. 375 if (ExitingBlocks.size() == 1) 376 return MaxNumOfPromotionsPerLoop; 377 378 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting) 379 return 0; 380 381 // Whether the target block is in a loop does not matter: 382 if (SpeculativeCounterPromotionToLoop) 383 return MaxNumOfPromotionsPerLoop; 384 385 // Now check the target block: 386 unsigned MaxProm = MaxNumOfPromotionsPerLoop; 387 for (auto *TargetBlock : LoopExitBlocks) { 388 auto *TargetLoop = LI.getLoopFor(TargetBlock); 389 if (!TargetLoop) 390 continue; 391 unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop); 392 unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size(); 393 MaxProm = 394 std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) - 395 PendingCandsInTarget); 396 } 397 return MaxProm; 398 } 399 400 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates; 401 SmallVector<BasicBlock *, 8> ExitBlocks; 402 SmallVector<Instruction *, 8> InsertPts; 403 Loop &L; 404 LoopInfo &LI; 405 BlockFrequencyInfo *BFI; 406 }; 407 408 enum class ValueProfilingCallType { 409 // Individual values are tracked. Currently used for indiret call target 410 // profiling. 411 Default, 412 413 // MemOp: the memop size value profiling. 414 MemOp 415 }; 416 417 } // end anonymous namespace 418 419 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) { 420 FunctionAnalysisManager &FAM = 421 AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager(); 422 auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & { 423 return FAM.getResult<TargetLibraryAnalysis>(F); 424 }; 425 if (!run(M, GetTLI)) 426 return PreservedAnalyses::all(); 427 428 return PreservedAnalyses::none(); 429 } 430 431 char InstrProfilingLegacyPass::ID = 0; 432 INITIALIZE_PASS_BEGIN(InstrProfilingLegacyPass, "instrprof", 433 "Frontend instrumentation-based coverage lowering.", 434 false, false) 435 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) 436 INITIALIZE_PASS_END(InstrProfilingLegacyPass, "instrprof", 437 "Frontend instrumentation-based coverage lowering.", false, 438 false) 439 440 ModulePass * 441 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options, 442 bool IsCS) { 443 return new InstrProfilingLegacyPass(Options, IsCS); 444 } 445 446 bool InstrProfiling::lowerIntrinsics(Function *F) { 447 bool MadeChange = false; 448 PromotionCandidates.clear(); 449 for (BasicBlock &BB : *F) { 450 for (Instruction &Instr : llvm::make_early_inc_range(BB)) { 451 if (auto *IPIS = dyn_cast<InstrProfIncrementInstStep>(&Instr)) { 452 lowerIncrement(IPIS); 453 MadeChange = true; 454 } else if (auto *IPI = dyn_cast<InstrProfIncrementInst>(&Instr)) { 455 lowerIncrement(IPI); 456 MadeChange = true; 457 } else if (auto *IPC = dyn_cast<InstrProfCoverInst>(&Instr)) { 458 lowerCover(IPC); 459 MadeChange = true; 460 } else if (auto *IPVP = dyn_cast<InstrProfValueProfileInst>(&Instr)) { 461 lowerValueProfileInst(IPVP); 462 MadeChange = true; 463 } 464 } 465 } 466 467 if (!MadeChange) 468 return false; 469 470 promoteCounterLoadStores(F); 471 return true; 472 } 473 474 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const { 475 // Mach-O don't support weak external references. 476 if (TT.isOSBinFormatMachO()) 477 return false; 478 479 if (RuntimeCounterRelocation.getNumOccurrences() > 0) 480 return RuntimeCounterRelocation; 481 482 // Fuchsia uses runtime counter relocation by default. 483 return TT.isOSFuchsia(); 484 } 485 486 bool InstrProfiling::isCounterPromotionEnabled() const { 487 if (DoCounterPromotion.getNumOccurrences() > 0) 488 return DoCounterPromotion; 489 490 return Options.DoCounterPromotion; 491 } 492 493 void InstrProfiling::promoteCounterLoadStores(Function *F) { 494 if (!isCounterPromotionEnabled()) 495 return; 496 497 DominatorTree DT(*F); 498 LoopInfo LI(DT); 499 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates; 500 501 std::unique_ptr<BlockFrequencyInfo> BFI; 502 if (Options.UseBFIInPromotion) { 503 std::unique_ptr<BranchProbabilityInfo> BPI; 504 BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F))); 505 BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI)); 506 } 507 508 for (const auto &LoadStore : PromotionCandidates) { 509 auto *CounterLoad = LoadStore.first; 510 auto *CounterStore = LoadStore.second; 511 BasicBlock *BB = CounterLoad->getParent(); 512 Loop *ParentLoop = LI.getLoopFor(BB); 513 if (!ParentLoop) 514 continue; 515 LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore); 516 } 517 518 SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder(); 519 520 // Do a post-order traversal of the loops so that counter updates can be 521 // iteratively hoisted outside the loop nest. 522 for (auto *Loop : llvm::reverse(Loops)) { 523 PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get()); 524 Promoter.run(&TotalCountersPromoted); 525 } 526 } 527 528 static bool needsRuntimeHookUnconditionally(const Triple &TT) { 529 // On Fuchsia, we only need runtime hook if any counters are present. 530 if (TT.isOSFuchsia()) 531 return false; 532 533 return true; 534 } 535 536 /// Check if the module contains uses of any profiling intrinsics. 537 static bool containsProfilingIntrinsics(Module &M) { 538 auto containsIntrinsic = [&](int ID) { 539 if (auto *F = M.getFunction(Intrinsic::getName(ID))) 540 return !F->use_empty(); 541 return false; 542 }; 543 return containsIntrinsic(llvm::Intrinsic::instrprof_cover) || 544 containsIntrinsic(llvm::Intrinsic::instrprof_increment) || 545 containsIntrinsic(llvm::Intrinsic::instrprof_increment_step) || 546 containsIntrinsic(llvm::Intrinsic::instrprof_value_profile); 547 } 548 549 bool InstrProfiling::run( 550 Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) { 551 this->M = &M; 552 this->GetTLI = std::move(GetTLI); 553 NamesVar = nullptr; 554 NamesSize = 0; 555 ProfileDataMap.clear(); 556 CompilerUsedVars.clear(); 557 UsedVars.clear(); 558 TT = Triple(M.getTargetTriple()); 559 560 bool MadeChange = false; 561 bool NeedsRuntimeHook = needsRuntimeHookUnconditionally(TT); 562 if (NeedsRuntimeHook) 563 MadeChange = emitRuntimeHook(); 564 565 GlobalVariable *CoverageNamesVar = 566 M.getNamedGlobal(getCoverageUnusedNamesVarName()); 567 // Improve compile time by avoiding linear scans when there is no work. 568 // When coverage is enabled on code that is eliminated by the front-end, 569 // e.g. unused functions with internal linkage, and the target does not 570 // require pulling in profile runtime, there is no need to do further work. 571 if (!containsProfilingIntrinsics(M) && 572 (!CoverageNamesVar || !NeedsRuntimeHook)) { 573 return MadeChange; 574 } 575 576 // We did not know how many value sites there would be inside 577 // the instrumented function. This is counting the number of instrumented 578 // target value sites to enter it as field in the profile data variable. 579 for (Function &F : M) { 580 InstrProfIncrementInst *FirstProfIncInst = nullptr; 581 for (BasicBlock &BB : F) 582 for (auto I = BB.begin(), E = BB.end(); I != E; I++) 583 if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I)) 584 computeNumValueSiteCounts(Ind); 585 else if (FirstProfIncInst == nullptr) 586 FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I); 587 588 // Value profiling intrinsic lowering requires per-function profile data 589 // variable to be created first. 590 if (FirstProfIncInst != nullptr) 591 static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst)); 592 } 593 594 for (Function &F : M) 595 MadeChange |= lowerIntrinsics(&F); 596 597 if (CoverageNamesVar) { 598 lowerCoverageData(CoverageNamesVar); 599 MadeChange = true; 600 } 601 602 if (!MadeChange) 603 return false; 604 605 emitVNodes(); 606 emitNameData(); 607 emitRuntimeHook(); 608 emitRegistration(); 609 emitUses(); 610 emitInitialization(); 611 return true; 612 } 613 614 static FunctionCallee getOrInsertValueProfilingCall( 615 Module &M, const TargetLibraryInfo &TLI, 616 ValueProfilingCallType CallType = ValueProfilingCallType::Default) { 617 LLVMContext &Ctx = M.getContext(); 618 auto *ReturnTy = Type::getVoidTy(M.getContext()); 619 620 AttributeList AL; 621 if (auto AK = TLI.getExtAttrForI32Param(false)) 622 AL = AL.addParamAttribute(M.getContext(), 2, AK); 623 624 assert((CallType == ValueProfilingCallType::Default || 625 CallType == ValueProfilingCallType::MemOp) && 626 "Must be Default or MemOp"); 627 Type *ParamTypes[] = { 628 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType 629 #include "llvm/ProfileData/InstrProfData.inc" 630 }; 631 auto *ValueProfilingCallTy = 632 FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false); 633 StringRef FuncName = CallType == ValueProfilingCallType::Default 634 ? getInstrProfValueProfFuncName() 635 : getInstrProfValueProfMemOpFuncName(); 636 return M.getOrInsertFunction(FuncName, ValueProfilingCallTy, AL); 637 } 638 639 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) { 640 GlobalVariable *Name = Ind->getName(); 641 uint64_t ValueKind = Ind->getValueKind()->getZExtValue(); 642 uint64_t Index = Ind->getIndex()->getZExtValue(); 643 auto &PD = ProfileDataMap[Name]; 644 PD.NumValueSites[ValueKind] = 645 std::max(PD.NumValueSites[ValueKind], (uint32_t)(Index + 1)); 646 } 647 648 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) { 649 // TODO: Value profiling heavily depends on the data section which is omitted 650 // in lightweight mode. We need to move the value profile pointer to the 651 // Counter struct to get this working. 652 assert( 653 !DebugInfoCorrelate && 654 "Value profiling is not yet supported with lightweight instrumentation"); 655 GlobalVariable *Name = Ind->getName(); 656 auto It = ProfileDataMap.find(Name); 657 assert(It != ProfileDataMap.end() && It->second.DataVar && 658 "value profiling detected in function with no counter incerement"); 659 660 GlobalVariable *DataVar = It->second.DataVar; 661 uint64_t ValueKind = Ind->getValueKind()->getZExtValue(); 662 uint64_t Index = Ind->getIndex()->getZExtValue(); 663 for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind) 664 Index += It->second.NumValueSites[Kind]; 665 666 IRBuilder<> Builder(Ind); 667 bool IsMemOpSize = (Ind->getValueKind()->getZExtValue() == 668 llvm::InstrProfValueKind::IPVK_MemOPSize); 669 CallInst *Call = nullptr; 670 auto *TLI = &GetTLI(*Ind->getFunction()); 671 672 // To support value profiling calls within Windows exception handlers, funclet 673 // information contained within operand bundles needs to be copied over to 674 // the library call. This is required for the IR to be processed by the 675 // WinEHPrepare pass. 676 SmallVector<OperandBundleDef, 1> OpBundles; 677 Ind->getOperandBundlesAsDefs(OpBundles); 678 if (!IsMemOpSize) { 679 Value *Args[3] = {Ind->getTargetValue(), 680 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()), 681 Builder.getInt32(Index)}; 682 Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args, 683 OpBundles); 684 } else { 685 Value *Args[3] = {Ind->getTargetValue(), 686 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()), 687 Builder.getInt32(Index)}; 688 Call = Builder.CreateCall( 689 getOrInsertValueProfilingCall(*M, *TLI, ValueProfilingCallType::MemOp), 690 Args, OpBundles); 691 } 692 if (auto AK = TLI->getExtAttrForI32Param(false)) 693 Call->addParamAttr(2, AK); 694 Ind->replaceAllUsesWith(Call); 695 Ind->eraseFromParent(); 696 } 697 698 Value *InstrProfiling::getCounterAddress(InstrProfInstBase *I) { 699 auto *Counters = getOrCreateRegionCounters(I); 700 IRBuilder<> Builder(I); 701 702 auto *Addr = Builder.CreateConstInBoundsGEP2_32( 703 Counters->getValueType(), Counters, 0, I->getIndex()->getZExtValue()); 704 705 if (!isRuntimeCounterRelocationEnabled()) 706 return Addr; 707 708 Type *Int64Ty = Type::getInt64Ty(M->getContext()); 709 Function *Fn = I->getParent()->getParent(); 710 Instruction &EntryI = Fn->getEntryBlock().front(); 711 LoadInst *LI = dyn_cast<LoadInst>(&EntryI); 712 if (!LI) { 713 IRBuilder<> EntryBuilder(&EntryI); 714 auto *Bias = M->getGlobalVariable(getInstrProfCounterBiasVarName()); 715 if (!Bias) { 716 // Compiler must define this variable when runtime counter relocation 717 // is being used. Runtime has a weak external reference that is used 718 // to check whether that's the case or not. 719 Bias = new GlobalVariable( 720 *M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage, 721 Constant::getNullValue(Int64Ty), getInstrProfCounterBiasVarName()); 722 Bias->setVisibility(GlobalVariable::HiddenVisibility); 723 // A definition that's weak (linkonce_odr) without being in a COMDAT 724 // section wouldn't lead to link errors, but it would lead to a dead 725 // data word from every TU but one. Putting it in COMDAT ensures there 726 // will be exactly one data slot in the link. 727 if (TT.supportsCOMDAT()) 728 Bias->setComdat(M->getOrInsertComdat(Bias->getName())); 729 } 730 LI = EntryBuilder.CreateLoad(Int64Ty, Bias); 731 } 732 auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), LI); 733 return Builder.CreateIntToPtr(Add, Addr->getType()); 734 } 735 736 void InstrProfiling::lowerCover(InstrProfCoverInst *CoverInstruction) { 737 auto *Addr = getCounterAddress(CoverInstruction); 738 IRBuilder<> Builder(CoverInstruction); 739 // We store zero to represent that this block is covered. 740 Builder.CreateStore(Builder.getInt8(0), Addr); 741 CoverInstruction->eraseFromParent(); 742 } 743 744 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) { 745 auto *Addr = getCounterAddress(Inc); 746 747 IRBuilder<> Builder(Inc); 748 if (Options.Atomic || AtomicCounterUpdateAll || 749 (Inc->getIndex()->isZeroValue() && AtomicFirstCounter)) { 750 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(), 751 MaybeAlign(), AtomicOrdering::Monotonic); 752 } else { 753 Value *IncStep = Inc->getStep(); 754 Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount"); 755 auto *Count = Builder.CreateAdd(Load, Inc->getStep()); 756 auto *Store = Builder.CreateStore(Count, Addr); 757 if (isCounterPromotionEnabled()) 758 PromotionCandidates.emplace_back(cast<Instruction>(Load), Store); 759 } 760 Inc->eraseFromParent(); 761 } 762 763 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) { 764 ConstantArray *Names = 765 cast<ConstantArray>(CoverageNamesVar->getInitializer()); 766 for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) { 767 Constant *NC = Names->getOperand(I); 768 Value *V = NC->stripPointerCasts(); 769 assert(isa<GlobalVariable>(V) && "Missing reference to function name"); 770 GlobalVariable *Name = cast<GlobalVariable>(V); 771 772 Name->setLinkage(GlobalValue::PrivateLinkage); 773 ReferencedNames.push_back(Name); 774 if (isa<ConstantExpr>(NC)) 775 NC->dropAllReferences(); 776 } 777 CoverageNamesVar->eraseFromParent(); 778 } 779 780 /// Get the name of a profiling variable for a particular function. 781 static std::string getVarName(InstrProfInstBase *Inc, StringRef Prefix, 782 bool &Renamed) { 783 StringRef NamePrefix = getInstrProfNameVarPrefix(); 784 StringRef Name = Inc->getName()->getName().substr(NamePrefix.size()); 785 Function *F = Inc->getParent()->getParent(); 786 Module *M = F->getParent(); 787 if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) || 788 !canRenameComdatFunc(*F)) { 789 Renamed = false; 790 return (Prefix + Name).str(); 791 } 792 Renamed = true; 793 uint64_t FuncHash = Inc->getHash()->getZExtValue(); 794 SmallVector<char, 24> HashPostfix; 795 if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix))) 796 return (Prefix + Name).str(); 797 return (Prefix + Name + "." + Twine(FuncHash)).str(); 798 } 799 800 static uint64_t getIntModuleFlagOrZero(const Module &M, StringRef Flag) { 801 auto *MD = dyn_cast_or_null<ConstantAsMetadata>(M.getModuleFlag(Flag)); 802 if (!MD) 803 return 0; 804 805 // If the flag is a ConstantAsMetadata, it should be an integer representable 806 // in 64-bits. 807 return cast<ConstantInt>(MD->getValue())->getZExtValue(); 808 } 809 810 static bool enablesValueProfiling(const Module &M) { 811 return isIRPGOFlagSet(&M) || 812 getIntModuleFlagOrZero(M, "EnableValueProfiling") != 0; 813 } 814 815 // Conservatively returns true if data variables may be referenced by code. 816 static bool profDataReferencedByCode(const Module &M) { 817 return enablesValueProfiling(M); 818 } 819 820 static inline bool shouldRecordFunctionAddr(Function *F) { 821 // Only record function addresses if IR PGO is enabled or if clang value 822 // profiling is enabled. Recording function addresses greatly increases object 823 // file size, because it prevents the inliner from deleting functions that 824 // have been inlined everywhere. 825 if (!profDataReferencedByCode(*F->getParent())) 826 return false; 827 828 // Check the linkage 829 bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage(); 830 if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() && 831 !HasAvailableExternallyLinkage) 832 return true; 833 834 // A function marked 'alwaysinline' with available_externally linkage can't 835 // have its address taken. Doing so would create an undefined external ref to 836 // the function, which would fail to link. 837 if (HasAvailableExternallyLinkage && 838 F->hasFnAttribute(Attribute::AlwaysInline)) 839 return false; 840 841 // Prohibit function address recording if the function is both internal and 842 // COMDAT. This avoids the profile data variable referencing internal symbols 843 // in COMDAT. 844 if (F->hasLocalLinkage() && F->hasComdat()) 845 return false; 846 847 // Check uses of this function for other than direct calls or invokes to it. 848 // Inline virtual functions have linkeOnceODR linkage. When a key method 849 // exists, the vtable will only be emitted in the TU where the key method 850 // is defined. In a TU where vtable is not available, the function won't 851 // be 'addresstaken'. If its address is not recorded here, the profile data 852 // with missing address may be picked by the linker leading to missing 853 // indirect call target info. 854 return F->hasAddressTaken() || F->hasLinkOnceLinkage(); 855 } 856 857 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) { 858 // Don't do this for Darwin. compiler-rt uses linker magic. 859 if (TT.isOSDarwin()) 860 return false; 861 // Use linker script magic to get data/cnts/name start/end. 862 if (TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() || 863 TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4() || TT.isOSWindows()) 864 return false; 865 866 return true; 867 } 868 869 GlobalVariable * 870 InstrProfiling::createRegionCounters(InstrProfInstBase *Inc, StringRef Name, 871 GlobalValue::LinkageTypes Linkage) { 872 uint64_t NumCounters = Inc->getNumCounters()->getZExtValue(); 873 auto &Ctx = M->getContext(); 874 GlobalVariable *GV; 875 if (isa<InstrProfCoverInst>(Inc)) { 876 auto *CounterTy = Type::getInt8Ty(Ctx); 877 auto *CounterArrTy = ArrayType::get(CounterTy, NumCounters); 878 // TODO: `Constant::getAllOnesValue()` does not yet accept an array type. 879 std::vector<Constant *> InitialValues(NumCounters, 880 Constant::getAllOnesValue(CounterTy)); 881 GV = new GlobalVariable(*M, CounterArrTy, false, Linkage, 882 ConstantArray::get(CounterArrTy, InitialValues), 883 Name); 884 GV->setAlignment(Align(1)); 885 } else { 886 auto *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters); 887 GV = new GlobalVariable(*M, CounterTy, false, Linkage, 888 Constant::getNullValue(CounterTy), Name); 889 GV->setAlignment(Align(8)); 890 } 891 return GV; 892 } 893 894 GlobalVariable * 895 InstrProfiling::getOrCreateRegionCounters(InstrProfInstBase *Inc) { 896 GlobalVariable *NamePtr = Inc->getName(); 897 auto &PD = ProfileDataMap[NamePtr]; 898 if (PD.RegionCounters) 899 return PD.RegionCounters; 900 901 // Match the linkage and visibility of the name global. 902 Function *Fn = Inc->getParent()->getParent(); 903 GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage(); 904 GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility(); 905 906 // Use internal rather than private linkage so the counter variable shows up 907 // in the symbol table when using debug info for correlation. 908 if (DebugInfoCorrelate && TT.isOSBinFormatMachO() && 909 Linkage == GlobalValue::PrivateLinkage) 910 Linkage = GlobalValue::InternalLinkage; 911 912 // Due to the limitation of binder as of 2021/09/28, the duplicate weak 913 // symbols in the same csect won't be discarded. When there are duplicate weak 914 // symbols, we can NOT guarantee that the relocations get resolved to the 915 // intended weak symbol, so we can not ensure the correctness of the relative 916 // CounterPtr, so we have to use private linkage for counter and data symbols. 917 if (TT.isOSBinFormatXCOFF()) { 918 Linkage = GlobalValue::PrivateLinkage; 919 Visibility = GlobalValue::DefaultVisibility; 920 } 921 // Move the name variable to the right section. Place them in a COMDAT group 922 // if the associated function is a COMDAT. This will make sure that only one 923 // copy of counters of the COMDAT function will be emitted after linking. Keep 924 // in mind that this pass may run before the inliner, so we need to create a 925 // new comdat group for the counters and profiling data. If we use the comdat 926 // of the parent function, that will result in relocations against discarded 927 // sections. 928 // 929 // If the data variable is referenced by code, counters and data have to be 930 // in different comdats for COFF because the Visual C++ linker will report 931 // duplicate symbol errors if there are multiple external symbols with the 932 // same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE. 933 // 934 // For ELF, when not using COMDAT, put counters, data and values into a 935 // nodeduplicate COMDAT which is lowered to a zero-flag section group. This 936 // allows -z start-stop-gc to discard the entire group when the function is 937 // discarded. 938 bool DataReferencedByCode = profDataReferencedByCode(*M); 939 bool NeedComdat = needsComdatForCounter(*Fn, *M); 940 bool Renamed; 941 std::string CntsVarName = 942 getVarName(Inc, getInstrProfCountersVarPrefix(), Renamed); 943 std::string DataVarName = 944 getVarName(Inc, getInstrProfDataVarPrefix(), Renamed); 945 auto MaybeSetComdat = [&](GlobalVariable *GV) { 946 bool UseComdat = (NeedComdat || TT.isOSBinFormatELF()); 947 if (UseComdat) { 948 StringRef GroupName = TT.isOSBinFormatCOFF() && DataReferencedByCode 949 ? GV->getName() 950 : CntsVarName; 951 Comdat *C = M->getOrInsertComdat(GroupName); 952 if (!NeedComdat) 953 C->setSelectionKind(Comdat::NoDeduplicate); 954 GV->setComdat(C); 955 } 956 }; 957 958 uint64_t NumCounters = Inc->getNumCounters()->getZExtValue(); 959 LLVMContext &Ctx = M->getContext(); 960 961 auto *CounterPtr = createRegionCounters(Inc, CntsVarName, Linkage); 962 CounterPtr->setVisibility(Visibility); 963 CounterPtr->setSection( 964 getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat())); 965 MaybeSetComdat(CounterPtr); 966 CounterPtr->setLinkage(Linkage); 967 PD.RegionCounters = CounterPtr; 968 if (DebugInfoCorrelate) { 969 if (auto *SP = Fn->getSubprogram()) { 970 DIBuilder DB(*M, true, SP->getUnit()); 971 Metadata *FunctionNameAnnotation[] = { 972 MDString::get(Ctx, InstrProfCorrelator::FunctionNameAttributeName), 973 MDString::get(Ctx, getPGOFuncNameVarInitializer(NamePtr)), 974 }; 975 Metadata *CFGHashAnnotation[] = { 976 MDString::get(Ctx, InstrProfCorrelator::CFGHashAttributeName), 977 ConstantAsMetadata::get(Inc->getHash()), 978 }; 979 Metadata *NumCountersAnnotation[] = { 980 MDString::get(Ctx, InstrProfCorrelator::NumCountersAttributeName), 981 ConstantAsMetadata::get(Inc->getNumCounters()), 982 }; 983 auto Annotations = DB.getOrCreateArray({ 984 MDNode::get(Ctx, FunctionNameAnnotation), 985 MDNode::get(Ctx, CFGHashAnnotation), 986 MDNode::get(Ctx, NumCountersAnnotation), 987 }); 988 auto *DICounter = DB.createGlobalVariableExpression( 989 SP, CounterPtr->getName(), /*LinkageName=*/StringRef(), SP->getFile(), 990 /*LineNo=*/0, DB.createUnspecifiedType("Profile Data Type"), 991 CounterPtr->hasLocalLinkage(), /*IsDefined=*/true, /*Expr=*/nullptr, 992 /*Decl=*/nullptr, /*TemplateParams=*/nullptr, /*AlignInBits=*/0, 993 Annotations); 994 CounterPtr->addDebugInfo(DICounter); 995 DB.finalize(); 996 } else { 997 std::string Msg = ("Missing debug info for function " + Fn->getName() + 998 "; required for profile correlation.") 999 .str(); 1000 Ctx.diagnose( 1001 DiagnosticInfoPGOProfile(M->getName().data(), Msg, DS_Warning)); 1002 } 1003 } 1004 1005 auto *Int8PtrTy = Type::getInt8PtrTy(Ctx); 1006 // Allocate statically the array of pointers to value profile nodes for 1007 // the current function. 1008 Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy); 1009 uint64_t NS = 0; 1010 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 1011 NS += PD.NumValueSites[Kind]; 1012 if (NS > 0 && ValueProfileStaticAlloc && 1013 !needsRuntimeRegistrationOfSectionRange(TT)) { 1014 ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS); 1015 auto *ValuesVar = new GlobalVariable( 1016 *M, ValuesTy, false, Linkage, Constant::getNullValue(ValuesTy), 1017 getVarName(Inc, getInstrProfValuesVarPrefix(), Renamed)); 1018 ValuesVar->setVisibility(Visibility); 1019 ValuesVar->setSection( 1020 getInstrProfSectionName(IPSK_vals, TT.getObjectFormat())); 1021 ValuesVar->setAlignment(Align(8)); 1022 MaybeSetComdat(ValuesVar); 1023 ValuesPtrExpr = 1024 ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx)); 1025 } 1026 1027 if (DebugInfoCorrelate) { 1028 // Mark the counter variable as used so that it isn't optimized out. 1029 CompilerUsedVars.push_back(PD.RegionCounters); 1030 return PD.RegionCounters; 1031 } 1032 1033 // Create data variable. 1034 auto *IntPtrTy = M->getDataLayout().getIntPtrType(M->getContext()); 1035 auto *Int16Ty = Type::getInt16Ty(Ctx); 1036 auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1); 1037 Type *DataTypes[] = { 1038 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType, 1039 #include "llvm/ProfileData/InstrProfData.inc" 1040 }; 1041 auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes)); 1042 1043 Constant *FunctionAddr = shouldRecordFunctionAddr(Fn) 1044 ? ConstantExpr::getBitCast(Fn, Int8PtrTy) 1045 : ConstantPointerNull::get(Int8PtrTy); 1046 1047 Constant *Int16ArrayVals[IPVK_Last + 1]; 1048 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 1049 Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]); 1050 1051 // If the data variable is not referenced by code (if we don't emit 1052 // @llvm.instrprof.value.profile, NS will be 0), and the counter keeps the 1053 // data variable live under linker GC, the data variable can be private. This 1054 // optimization applies to ELF. 1055 // 1056 // On COFF, a comdat leader cannot be local so we require DataReferencedByCode 1057 // to be false. 1058 // 1059 // If profd is in a deduplicate comdat, NS==0 with a hash suffix guarantees 1060 // that other copies must have the same CFG and cannot have value profiling. 1061 // If no hash suffix, other profd copies may be referenced by code. 1062 if (NS == 0 && !(DataReferencedByCode && NeedComdat && !Renamed) && 1063 (TT.isOSBinFormatELF() || 1064 (!DataReferencedByCode && TT.isOSBinFormatCOFF()))) { 1065 Linkage = GlobalValue::PrivateLinkage; 1066 Visibility = GlobalValue::DefaultVisibility; 1067 } 1068 auto *Data = 1069 new GlobalVariable(*M, DataTy, false, Linkage, nullptr, DataVarName); 1070 // Reference the counter variable with a label difference (link-time 1071 // constant). 1072 auto *RelativeCounterPtr = 1073 ConstantExpr::getSub(ConstantExpr::getPtrToInt(CounterPtr, IntPtrTy), 1074 ConstantExpr::getPtrToInt(Data, IntPtrTy)); 1075 1076 Constant *DataVals[] = { 1077 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init, 1078 #include "llvm/ProfileData/InstrProfData.inc" 1079 }; 1080 Data->setInitializer(ConstantStruct::get(DataTy, DataVals)); 1081 1082 Data->setVisibility(Visibility); 1083 Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat())); 1084 Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT)); 1085 MaybeSetComdat(Data); 1086 Data->setLinkage(Linkage); 1087 1088 PD.DataVar = Data; 1089 1090 // Mark the data variable as used so that it isn't stripped out. 1091 CompilerUsedVars.push_back(Data); 1092 // Now that the linkage set by the FE has been passed to the data and counter 1093 // variables, reset Name variable's linkage and visibility to private so that 1094 // it can be removed later by the compiler. 1095 NamePtr->setLinkage(GlobalValue::PrivateLinkage); 1096 // Collect the referenced names to be used by emitNameData. 1097 ReferencedNames.push_back(NamePtr); 1098 1099 return PD.RegionCounters; 1100 } 1101 1102 void InstrProfiling::emitVNodes() { 1103 if (!ValueProfileStaticAlloc) 1104 return; 1105 1106 // For now only support this on platforms that do 1107 // not require runtime registration to discover 1108 // named section start/end. 1109 if (needsRuntimeRegistrationOfSectionRange(TT)) 1110 return; 1111 1112 size_t TotalNS = 0; 1113 for (auto &PD : ProfileDataMap) { 1114 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 1115 TotalNS += PD.second.NumValueSites[Kind]; 1116 } 1117 1118 if (!TotalNS) 1119 return; 1120 1121 uint64_t NumCounters = TotalNS * NumCountersPerValueSite; 1122 // Heuristic for small programs with very few total value sites. 1123 // The default value of vp-counters-per-site is chosen based on 1124 // the observation that large apps usually have a low percentage 1125 // of value sites that actually have any profile data, and thus 1126 // the average number of counters per site is low. For small 1127 // apps with very few sites, this may not be true. Bump up the 1128 // number of counters in this case. 1129 #define INSTR_PROF_MIN_VAL_COUNTS 10 1130 if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS) 1131 NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2); 1132 1133 auto &Ctx = M->getContext(); 1134 Type *VNodeTypes[] = { 1135 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType, 1136 #include "llvm/ProfileData/InstrProfData.inc" 1137 }; 1138 auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes)); 1139 1140 ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters); 1141 auto *VNodesVar = new GlobalVariable( 1142 *M, VNodesTy, false, GlobalValue::PrivateLinkage, 1143 Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName()); 1144 VNodesVar->setSection( 1145 getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat())); 1146 // VNodesVar is used by runtime but not referenced via relocation by other 1147 // sections. Conservatively make it linker retained. 1148 UsedVars.push_back(VNodesVar); 1149 } 1150 1151 void InstrProfiling::emitNameData() { 1152 std::string UncompressedData; 1153 1154 if (ReferencedNames.empty()) 1155 return; 1156 1157 std::string CompressedNameStr; 1158 if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr, 1159 DoInstrProfNameCompression)) { 1160 report_fatal_error(Twine(toString(std::move(E))), false); 1161 } 1162 1163 auto &Ctx = M->getContext(); 1164 auto *NamesVal = 1165 ConstantDataArray::getString(Ctx, StringRef(CompressedNameStr), false); 1166 NamesVar = new GlobalVariable(*M, NamesVal->getType(), true, 1167 GlobalValue::PrivateLinkage, NamesVal, 1168 getInstrProfNamesVarName()); 1169 NamesSize = CompressedNameStr.size(); 1170 NamesVar->setSection( 1171 getInstrProfSectionName(IPSK_name, TT.getObjectFormat())); 1172 // On COFF, it's important to reduce the alignment down to 1 to prevent the 1173 // linker from inserting padding before the start of the names section or 1174 // between names entries. 1175 NamesVar->setAlignment(Align(1)); 1176 // NamesVar is used by runtime but not referenced via relocation by other 1177 // sections. Conservatively make it linker retained. 1178 UsedVars.push_back(NamesVar); 1179 1180 for (auto *NamePtr : ReferencedNames) 1181 NamePtr->eraseFromParent(); 1182 } 1183 1184 void InstrProfiling::emitRegistration() { 1185 if (!needsRuntimeRegistrationOfSectionRange(TT)) 1186 return; 1187 1188 // Construct the function. 1189 auto *VoidTy = Type::getVoidTy(M->getContext()); 1190 auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext()); 1191 auto *Int64Ty = Type::getInt64Ty(M->getContext()); 1192 auto *RegisterFTy = FunctionType::get(VoidTy, false); 1193 auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage, 1194 getInstrProfRegFuncsName(), M); 1195 RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); 1196 if (Options.NoRedZone) 1197 RegisterF->addFnAttr(Attribute::NoRedZone); 1198 1199 auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false); 1200 auto *RuntimeRegisterF = 1201 Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage, 1202 getInstrProfRegFuncName(), M); 1203 1204 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF)); 1205 for (Value *Data : CompilerUsedVars) 1206 if (!isa<Function>(Data)) 1207 IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy)); 1208 for (Value *Data : UsedVars) 1209 if (Data != NamesVar && !isa<Function>(Data)) 1210 IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy)); 1211 1212 if (NamesVar) { 1213 Type *ParamTypes[] = {VoidPtrTy, Int64Ty}; 1214 auto *NamesRegisterTy = 1215 FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false); 1216 auto *NamesRegisterF = 1217 Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage, 1218 getInstrProfNamesRegFuncName(), M); 1219 IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy), 1220 IRB.getInt64(NamesSize)}); 1221 } 1222 1223 IRB.CreateRetVoid(); 1224 } 1225 1226 bool InstrProfiling::emitRuntimeHook() { 1227 // We expect the linker to be invoked with -u<hook_var> flag for Linux 1228 // in which case there is no need to emit the external variable. 1229 if (TT.isOSLinux()) 1230 return false; 1231 1232 // If the module's provided its own runtime, we don't need to do anything. 1233 if (M->getGlobalVariable(getInstrProfRuntimeHookVarName())) 1234 return false; 1235 1236 // Declare an external variable that will pull in the runtime initialization. 1237 auto *Int32Ty = Type::getInt32Ty(M->getContext()); 1238 auto *Var = 1239 new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage, 1240 nullptr, getInstrProfRuntimeHookVarName()); 1241 1242 if (TT.isOSBinFormatELF()) { 1243 // Mark the user variable as used so that it isn't stripped out. 1244 CompilerUsedVars.push_back(Var); 1245 } else { 1246 // Make a function that uses it. 1247 auto *User = Function::Create(FunctionType::get(Int32Ty, false), 1248 GlobalValue::LinkOnceODRLinkage, 1249 getInstrProfRuntimeHookVarUseFuncName(), M); 1250 User->addFnAttr(Attribute::NoInline); 1251 if (Options.NoRedZone) 1252 User->addFnAttr(Attribute::NoRedZone); 1253 User->setVisibility(GlobalValue::HiddenVisibility); 1254 if (TT.supportsCOMDAT()) 1255 User->setComdat(M->getOrInsertComdat(User->getName())); 1256 1257 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User)); 1258 auto *Load = IRB.CreateLoad(Int32Ty, Var); 1259 IRB.CreateRet(Load); 1260 1261 // Mark the function as used so that it isn't stripped out. 1262 CompilerUsedVars.push_back(User); 1263 } 1264 return true; 1265 } 1266 1267 void InstrProfiling::emitUses() { 1268 // The metadata sections are parallel arrays. Optimizers (e.g. 1269 // GlobalOpt/ConstantMerge) may not discard associated sections as a unit, so 1270 // we conservatively retain all unconditionally in the compiler. 1271 // 1272 // On ELF and Mach-O, the linker can guarantee the associated sections will be 1273 // retained or discarded as a unit, so llvm.compiler.used is sufficient. 1274 // Similarly on COFF, if prof data is not referenced by code we use one comdat 1275 // and ensure this GC property as well. Otherwise, we have to conservatively 1276 // make all of the sections retained by the linker. 1277 if (TT.isOSBinFormatELF() || TT.isOSBinFormatMachO() || 1278 (TT.isOSBinFormatCOFF() && !profDataReferencedByCode(*M))) 1279 appendToCompilerUsed(*M, CompilerUsedVars); 1280 else 1281 appendToUsed(*M, CompilerUsedVars); 1282 1283 // We do not add proper references from used metadata sections to NamesVar and 1284 // VNodesVar, so we have to be conservative and place them in llvm.used 1285 // regardless of the target, 1286 appendToUsed(*M, UsedVars); 1287 } 1288 1289 void InstrProfiling::emitInitialization() { 1290 // Create ProfileFileName variable. Don't don't this for the 1291 // context-sensitive instrumentation lowering: This lowering is after 1292 // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should 1293 // have already create the variable before LTO/ThinLTO linking. 1294 if (!IsCS) 1295 createProfileFileNameVar(*M, Options.InstrProfileOutput); 1296 Function *RegisterF = M->getFunction(getInstrProfRegFuncsName()); 1297 if (!RegisterF) 1298 return; 1299 1300 // Create the initialization function. 1301 auto *VoidTy = Type::getVoidTy(M->getContext()); 1302 auto *F = Function::Create(FunctionType::get(VoidTy, false), 1303 GlobalValue::InternalLinkage, 1304 getInstrProfInitFuncName(), M); 1305 F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); 1306 F->addFnAttr(Attribute::NoInline); 1307 if (Options.NoRedZone) 1308 F->addFnAttr(Attribute::NoRedZone); 1309 1310 // Add the basic block and the necessary calls. 1311 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F)); 1312 IRB.CreateCall(RegisterF, {}); 1313 IRB.CreateRetVoid(); 1314 1315 appendToGlobalCtors(*M, F, 0); 1316 } 1317