1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling. 10 // It also builds the data structures and initialization code needed for 11 // updating execution counts and emitting the profile at runtime. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h" 16 #include "llvm/ADT/ArrayRef.h" 17 #include "llvm/ADT/SmallVector.h" 18 #include "llvm/ADT/StringRef.h" 19 #include "llvm/ADT/Triple.h" 20 #include "llvm/ADT/Twine.h" 21 #include "llvm/Analysis/BlockFrequencyInfo.h" 22 #include "llvm/Analysis/BranchProbabilityInfo.h" 23 #include "llvm/Analysis/LoopInfo.h" 24 #include "llvm/Analysis/TargetLibraryInfo.h" 25 #include "llvm/IR/Attributes.h" 26 #include "llvm/IR/BasicBlock.h" 27 #include "llvm/IR/Constant.h" 28 #include "llvm/IR/Constants.h" 29 #include "llvm/IR/DIBuilder.h" 30 #include "llvm/IR/DerivedTypes.h" 31 #include "llvm/IR/DiagnosticInfo.h" 32 #include "llvm/IR/Dominators.h" 33 #include "llvm/IR/Function.h" 34 #include "llvm/IR/GlobalValue.h" 35 #include "llvm/IR/GlobalVariable.h" 36 #include "llvm/IR/IRBuilder.h" 37 #include "llvm/IR/Instruction.h" 38 #include "llvm/IR/Instructions.h" 39 #include "llvm/IR/IntrinsicInst.h" 40 #include "llvm/IR/Module.h" 41 #include "llvm/IR/Type.h" 42 #include "llvm/InitializePasses.h" 43 #include "llvm/Pass.h" 44 #include "llvm/ProfileData/InstrProf.h" 45 #include "llvm/ProfileData/InstrProfCorrelator.h" 46 #include "llvm/Support/Casting.h" 47 #include "llvm/Support/CommandLine.h" 48 #include "llvm/Support/Error.h" 49 #include "llvm/Support/ErrorHandling.h" 50 #include "llvm/Transforms/Utils/BasicBlockUtils.h" 51 #include "llvm/Transforms/Utils/ModuleUtils.h" 52 #include "llvm/Transforms/Utils/SSAUpdater.h" 53 #include <algorithm> 54 #include <cassert> 55 #include <cstddef> 56 #include <cstdint> 57 #include <string> 58 59 using namespace llvm; 60 61 #define DEBUG_TYPE "instrprof" 62 63 namespace llvm { 64 cl::opt<bool> 65 DebugInfoCorrelate("debug-info-correlate", cl::ZeroOrMore, 66 cl::desc("Use debug info to correlate profiles."), 67 cl::init(false)); 68 } // namespace llvm 69 70 namespace { 71 72 cl::opt<bool> DoHashBasedCounterSplit( 73 "hash-based-counter-split", 74 cl::desc("Rename counter variable of a comdat function based on cfg hash"), 75 cl::init(true)); 76 77 cl::opt<bool> 78 RuntimeCounterRelocation("runtime-counter-relocation", 79 cl::desc("Enable relocating counters at runtime."), 80 cl::init(false)); 81 82 cl::opt<bool> ValueProfileStaticAlloc( 83 "vp-static-alloc", 84 cl::desc("Do static counter allocation for value profiler"), 85 cl::init(true)); 86 87 cl::opt<double> NumCountersPerValueSite( 88 "vp-counters-per-site", 89 cl::desc("The average number of profile counters allocated " 90 "per value profiling site."), 91 // This is set to a very small value because in real programs, only 92 // a very small percentage of value sites have non-zero targets, e.g, 1/30. 93 // For those sites with non-zero profile, the average number of targets 94 // is usually smaller than 2. 95 cl::init(1.0)); 96 97 cl::opt<bool> AtomicCounterUpdateAll( 98 "instrprof-atomic-counter-update-all", cl::ZeroOrMore, 99 cl::desc("Make all profile counter updates atomic (for testing only)"), 100 cl::init(false)); 101 102 cl::opt<bool> AtomicCounterUpdatePromoted( 103 "atomic-counter-update-promoted", cl::ZeroOrMore, 104 cl::desc("Do counter update using atomic fetch add " 105 " for promoted counters only"), 106 cl::init(false)); 107 108 cl::opt<bool> AtomicFirstCounter( 109 "atomic-first-counter", cl::ZeroOrMore, 110 cl::desc("Use atomic fetch add for first counter in a function (usually " 111 "the entry counter)"), 112 cl::init(false)); 113 114 // If the option is not specified, the default behavior about whether 115 // counter promotion is done depends on how instrumentaiton lowering 116 // pipeline is setup, i.e., the default value of true of this option 117 // does not mean the promotion will be done by default. Explicitly 118 // setting this option can override the default behavior. 119 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore, 120 cl::desc("Do counter register promotion"), 121 cl::init(false)); 122 cl::opt<unsigned> MaxNumOfPromotionsPerLoop( 123 cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20), 124 cl::desc("Max number counter promotions per loop to avoid" 125 " increasing register pressure too much")); 126 127 // A debug option 128 cl::opt<int> 129 MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1), 130 cl::desc("Max number of allowed counter promotions")); 131 132 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting( 133 cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3), 134 cl::desc("The max number of exiting blocks of a loop to allow " 135 " speculative counter promotion")); 136 137 cl::opt<bool> SpeculativeCounterPromotionToLoop( 138 cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false), 139 cl::desc("When the option is false, if the target block is in a loop, " 140 "the promotion will be disallowed unless the promoted counter " 141 " update can be further/iteratively promoted into an acyclic " 142 " region.")); 143 144 cl::opt<bool> IterativeCounterPromotion( 145 cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true), 146 cl::desc("Allow counter promotion across the whole loop nest.")); 147 148 cl::opt<bool> SkipRetExitBlock( 149 cl::ZeroOrMore, "skip-ret-exit-block", cl::init(true), 150 cl::desc("Suppress counter promotion if exit blocks contain ret.")); 151 152 class InstrProfilingLegacyPass : public ModulePass { 153 InstrProfiling InstrProf; 154 155 public: 156 static char ID; 157 158 InstrProfilingLegacyPass() : ModulePass(ID) {} 159 InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false) 160 : ModulePass(ID), InstrProf(Options, IsCS) { 161 initializeInstrProfilingLegacyPassPass(*PassRegistry::getPassRegistry()); 162 } 163 164 StringRef getPassName() const override { 165 return "Frontend instrumentation-based coverage lowering"; 166 } 167 168 bool runOnModule(Module &M) override { 169 auto GetTLI = [this](Function &F) -> TargetLibraryInfo & { 170 return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F); 171 }; 172 return InstrProf.run(M, GetTLI); 173 } 174 175 void getAnalysisUsage(AnalysisUsage &AU) const override { 176 AU.setPreservesCFG(); 177 AU.addRequired<TargetLibraryInfoWrapperPass>(); 178 } 179 }; 180 181 /// 182 /// A helper class to promote one counter RMW operation in the loop 183 /// into register update. 184 /// 185 /// RWM update for the counter will be sinked out of the loop after 186 /// the transformation. 187 /// 188 class PGOCounterPromoterHelper : public LoadAndStorePromoter { 189 public: 190 PGOCounterPromoterHelper( 191 Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init, 192 BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks, 193 ArrayRef<Instruction *> InsertPts, 194 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands, 195 LoopInfo &LI) 196 : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks), 197 InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) { 198 assert(isa<LoadInst>(L)); 199 assert(isa<StoreInst>(S)); 200 SSA.AddAvailableValue(PH, Init); 201 } 202 203 void doExtraRewritesBeforeFinalDeletion() override { 204 for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) { 205 BasicBlock *ExitBlock = ExitBlocks[i]; 206 Instruction *InsertPos = InsertPts[i]; 207 // Get LiveIn value into the ExitBlock. If there are multiple 208 // predecessors, the value is defined by a PHI node in this 209 // block. 210 Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock); 211 Value *Addr = cast<StoreInst>(Store)->getPointerOperand(); 212 Type *Ty = LiveInValue->getType(); 213 IRBuilder<> Builder(InsertPos); 214 if (AtomicCounterUpdatePromoted) 215 // automic update currently can only be promoted across the current 216 // loop, not the whole loop nest. 217 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue, 218 MaybeAlign(), 219 AtomicOrdering::SequentiallyConsistent); 220 else { 221 LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted"); 222 auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue); 223 auto *NewStore = Builder.CreateStore(NewVal, Addr); 224 225 // Now update the parent loop's candidate list: 226 if (IterativeCounterPromotion) { 227 auto *TargetLoop = LI.getLoopFor(ExitBlock); 228 if (TargetLoop) 229 LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore); 230 } 231 } 232 } 233 } 234 235 private: 236 Instruction *Store; 237 ArrayRef<BasicBlock *> ExitBlocks; 238 ArrayRef<Instruction *> InsertPts; 239 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates; 240 LoopInfo &LI; 241 }; 242 243 /// A helper class to do register promotion for all profile counter 244 /// updates in a loop. 245 /// 246 class PGOCounterPromoter { 247 public: 248 PGOCounterPromoter( 249 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands, 250 Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI) 251 : LoopToCandidates(LoopToCands), ExitBlocks(), InsertPts(), L(CurLoop), 252 LI(LI), BFI(BFI) { 253 254 // Skip collection of ExitBlocks and InsertPts for loops that will not be 255 // able to have counters promoted. 256 SmallVector<BasicBlock *, 8> LoopExitBlocks; 257 SmallPtrSet<BasicBlock *, 8> BlockSet; 258 259 L.getExitBlocks(LoopExitBlocks); 260 if (!isPromotionPossible(&L, LoopExitBlocks)) 261 return; 262 263 for (BasicBlock *ExitBlock : LoopExitBlocks) { 264 if (BlockSet.insert(ExitBlock).second) { 265 ExitBlocks.push_back(ExitBlock); 266 InsertPts.push_back(&*ExitBlock->getFirstInsertionPt()); 267 } 268 } 269 } 270 271 bool run(int64_t *NumPromoted) { 272 // Skip 'infinite' loops: 273 if (ExitBlocks.size() == 0) 274 return false; 275 276 // Skip if any of the ExitBlocks contains a ret instruction. 277 // This is to prevent dumping of incomplete profile -- if the 278 // the loop is a long running loop and dump is called in the middle 279 // of the loop, the result profile is incomplete. 280 // FIXME: add other heuristics to detect long running loops. 281 if (SkipRetExitBlock) { 282 for (auto BB : ExitBlocks) 283 if (isa<ReturnInst>(BB->getTerminator())) 284 return false; 285 } 286 287 unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L); 288 if (MaxProm == 0) 289 return false; 290 291 unsigned Promoted = 0; 292 for (auto &Cand : LoopToCandidates[&L]) { 293 294 SmallVector<PHINode *, 4> NewPHIs; 295 SSAUpdater SSA(&NewPHIs); 296 Value *InitVal = ConstantInt::get(Cand.first->getType(), 0); 297 298 // If BFI is set, we will use it to guide the promotions. 299 if (BFI) { 300 auto *BB = Cand.first->getParent(); 301 auto InstrCount = BFI->getBlockProfileCount(BB); 302 if (!InstrCount) 303 continue; 304 auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader()); 305 // If the average loop trip count is not greater than 1.5, we skip 306 // promotion. 307 if (PreheaderCount && 308 (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2)) 309 continue; 310 } 311 312 PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal, 313 L.getLoopPreheader(), ExitBlocks, 314 InsertPts, LoopToCandidates, LI); 315 Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second})); 316 Promoted++; 317 if (Promoted >= MaxProm) 318 break; 319 320 (*NumPromoted)++; 321 if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions) 322 break; 323 } 324 325 LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth=" 326 << L.getLoopDepth() << ")\n"); 327 return Promoted != 0; 328 } 329 330 private: 331 bool allowSpeculativeCounterPromotion(Loop *LP) { 332 SmallVector<BasicBlock *, 8> ExitingBlocks; 333 L.getExitingBlocks(ExitingBlocks); 334 // Not considierered speculative. 335 if (ExitingBlocks.size() == 1) 336 return true; 337 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting) 338 return false; 339 return true; 340 } 341 342 // Check whether the loop satisfies the basic conditions needed to perform 343 // Counter Promotions. 344 bool 345 isPromotionPossible(Loop *LP, 346 const SmallVectorImpl<BasicBlock *> &LoopExitBlocks) { 347 // We can't insert into a catchswitch. 348 if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) { 349 return isa<CatchSwitchInst>(Exit->getTerminator()); 350 })) 351 return false; 352 353 if (!LP->hasDedicatedExits()) 354 return false; 355 356 BasicBlock *PH = LP->getLoopPreheader(); 357 if (!PH) 358 return false; 359 360 return true; 361 } 362 363 // Returns the max number of Counter Promotions for LP. 364 unsigned getMaxNumOfPromotionsInLoop(Loop *LP) { 365 SmallVector<BasicBlock *, 8> LoopExitBlocks; 366 LP->getExitBlocks(LoopExitBlocks); 367 if (!isPromotionPossible(LP, LoopExitBlocks)) 368 return 0; 369 370 SmallVector<BasicBlock *, 8> ExitingBlocks; 371 LP->getExitingBlocks(ExitingBlocks); 372 373 // If BFI is set, we do more aggressive promotions based on BFI. 374 if (BFI) 375 return (unsigned)-1; 376 377 // Not considierered speculative. 378 if (ExitingBlocks.size() == 1) 379 return MaxNumOfPromotionsPerLoop; 380 381 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting) 382 return 0; 383 384 // Whether the target block is in a loop does not matter: 385 if (SpeculativeCounterPromotionToLoop) 386 return MaxNumOfPromotionsPerLoop; 387 388 // Now check the target block: 389 unsigned MaxProm = MaxNumOfPromotionsPerLoop; 390 for (auto *TargetBlock : LoopExitBlocks) { 391 auto *TargetLoop = LI.getLoopFor(TargetBlock); 392 if (!TargetLoop) 393 continue; 394 unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop); 395 unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size(); 396 MaxProm = 397 std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) - 398 PendingCandsInTarget); 399 } 400 return MaxProm; 401 } 402 403 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates; 404 SmallVector<BasicBlock *, 8> ExitBlocks; 405 SmallVector<Instruction *, 8> InsertPts; 406 Loop &L; 407 LoopInfo &LI; 408 BlockFrequencyInfo *BFI; 409 }; 410 411 enum class ValueProfilingCallType { 412 // Individual values are tracked. Currently used for indiret call target 413 // profiling. 414 Default, 415 416 // MemOp: the memop size value profiling. 417 MemOp 418 }; 419 420 } // end anonymous namespace 421 422 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) { 423 FunctionAnalysisManager &FAM = 424 AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager(); 425 auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & { 426 return FAM.getResult<TargetLibraryAnalysis>(F); 427 }; 428 if (!run(M, GetTLI)) 429 return PreservedAnalyses::all(); 430 431 return PreservedAnalyses::none(); 432 } 433 434 char InstrProfilingLegacyPass::ID = 0; 435 INITIALIZE_PASS_BEGIN(InstrProfilingLegacyPass, "instrprof", 436 "Frontend instrumentation-based coverage lowering.", 437 false, false) 438 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) 439 INITIALIZE_PASS_END(InstrProfilingLegacyPass, "instrprof", 440 "Frontend instrumentation-based coverage lowering.", false, 441 false) 442 443 ModulePass * 444 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options, 445 bool IsCS) { 446 return new InstrProfilingLegacyPass(Options, IsCS); 447 } 448 449 static InstrProfIncrementInst *castToIncrementInst(Instruction *Instr) { 450 InstrProfIncrementInst *Inc = dyn_cast<InstrProfIncrementInstStep>(Instr); 451 if (Inc) 452 return Inc; 453 return dyn_cast<InstrProfIncrementInst>(Instr); 454 } 455 456 bool InstrProfiling::lowerIntrinsics(Function *F) { 457 bool MadeChange = false; 458 PromotionCandidates.clear(); 459 for (BasicBlock &BB : *F) { 460 for (Instruction &Instr : llvm::make_early_inc_range(BB)) { 461 InstrProfIncrementInst *Inc = castToIncrementInst(&Instr); 462 if (Inc) { 463 lowerIncrement(Inc); 464 MadeChange = true; 465 } else if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(&Instr)) { 466 lowerValueProfileInst(Ind); 467 MadeChange = true; 468 } 469 } 470 } 471 472 if (!MadeChange) 473 return false; 474 475 promoteCounterLoadStores(F); 476 return true; 477 } 478 479 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const { 480 // Mach-O don't support weak external references. 481 if (TT.isOSBinFormatMachO()) 482 return false; 483 484 if (RuntimeCounterRelocation.getNumOccurrences() > 0) 485 return RuntimeCounterRelocation; 486 487 // Fuchsia uses runtime counter relocation by default. 488 return TT.isOSFuchsia(); 489 } 490 491 bool InstrProfiling::isCounterPromotionEnabled() const { 492 if (DoCounterPromotion.getNumOccurrences() > 0) 493 return DoCounterPromotion; 494 495 return Options.DoCounterPromotion; 496 } 497 498 void InstrProfiling::promoteCounterLoadStores(Function *F) { 499 if (!isCounterPromotionEnabled()) 500 return; 501 502 DominatorTree DT(*F); 503 LoopInfo LI(DT); 504 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates; 505 506 std::unique_ptr<BlockFrequencyInfo> BFI; 507 if (Options.UseBFIInPromotion) { 508 std::unique_ptr<BranchProbabilityInfo> BPI; 509 BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F))); 510 BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI)); 511 } 512 513 for (const auto &LoadStore : PromotionCandidates) { 514 auto *CounterLoad = LoadStore.first; 515 auto *CounterStore = LoadStore.second; 516 BasicBlock *BB = CounterLoad->getParent(); 517 Loop *ParentLoop = LI.getLoopFor(BB); 518 if (!ParentLoop) 519 continue; 520 LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore); 521 } 522 523 SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder(); 524 525 // Do a post-order traversal of the loops so that counter updates can be 526 // iteratively hoisted outside the loop nest. 527 for (auto *Loop : llvm::reverse(Loops)) { 528 PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get()); 529 Promoter.run(&TotalCountersPromoted); 530 } 531 } 532 533 static bool needsRuntimeHookUnconditionally(const Triple &TT) { 534 // On Fuchsia, we only need runtime hook if any counters are present. 535 if (TT.isOSFuchsia()) 536 return false; 537 538 return true; 539 } 540 541 /// Check if the module contains uses of any profiling intrinsics. 542 static bool containsProfilingIntrinsics(Module &M) { 543 if (auto *F = M.getFunction( 544 Intrinsic::getName(llvm::Intrinsic::instrprof_increment))) 545 if (!F->use_empty()) 546 return true; 547 if (auto *F = M.getFunction( 548 Intrinsic::getName(llvm::Intrinsic::instrprof_increment_step))) 549 if (!F->use_empty()) 550 return true; 551 if (auto *F = M.getFunction( 552 Intrinsic::getName(llvm::Intrinsic::instrprof_value_profile))) 553 if (!F->use_empty()) 554 return true; 555 return false; 556 } 557 558 bool InstrProfiling::run( 559 Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) { 560 this->M = &M; 561 this->GetTLI = std::move(GetTLI); 562 NamesVar = nullptr; 563 NamesSize = 0; 564 ProfileDataMap.clear(); 565 CompilerUsedVars.clear(); 566 UsedVars.clear(); 567 TT = Triple(M.getTargetTriple()); 568 569 bool MadeChange = false; 570 571 // Emit the runtime hook even if no counters are present. 572 if (needsRuntimeHookUnconditionally(TT)) 573 MadeChange = emitRuntimeHook(); 574 575 // Improve compile time by avoiding linear scans when there is no work. 576 GlobalVariable *CoverageNamesVar = 577 M.getNamedGlobal(getCoverageUnusedNamesVarName()); 578 if (!containsProfilingIntrinsics(M) && !CoverageNamesVar) 579 return MadeChange; 580 581 // We did not know how many value sites there would be inside 582 // the instrumented function. This is counting the number of instrumented 583 // target value sites to enter it as field in the profile data variable. 584 for (Function &F : M) { 585 InstrProfIncrementInst *FirstProfIncInst = nullptr; 586 for (BasicBlock &BB : F) 587 for (auto I = BB.begin(), E = BB.end(); I != E; I++) 588 if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I)) 589 computeNumValueSiteCounts(Ind); 590 else if (FirstProfIncInst == nullptr) 591 FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I); 592 593 // Value profiling intrinsic lowering requires per-function profile data 594 // variable to be created first. 595 if (FirstProfIncInst != nullptr) 596 static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst)); 597 } 598 599 for (Function &F : M) 600 MadeChange |= lowerIntrinsics(&F); 601 602 if (CoverageNamesVar) { 603 lowerCoverageData(CoverageNamesVar); 604 MadeChange = true; 605 } 606 607 if (!MadeChange) 608 return false; 609 610 emitVNodes(); 611 emitNameData(); 612 emitRuntimeHook(); 613 emitRegistration(); 614 emitUses(); 615 emitInitialization(); 616 return true; 617 } 618 619 static FunctionCallee getOrInsertValueProfilingCall( 620 Module &M, const TargetLibraryInfo &TLI, 621 ValueProfilingCallType CallType = ValueProfilingCallType::Default) { 622 LLVMContext &Ctx = M.getContext(); 623 auto *ReturnTy = Type::getVoidTy(M.getContext()); 624 625 AttributeList AL; 626 if (auto AK = TLI.getExtAttrForI32Param(false)) 627 AL = AL.addParamAttribute(M.getContext(), 2, AK); 628 629 assert((CallType == ValueProfilingCallType::Default || 630 CallType == ValueProfilingCallType::MemOp) && 631 "Must be Default or MemOp"); 632 Type *ParamTypes[] = { 633 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType 634 #include "llvm/ProfileData/InstrProfData.inc" 635 }; 636 auto *ValueProfilingCallTy = 637 FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false); 638 StringRef FuncName = CallType == ValueProfilingCallType::Default 639 ? getInstrProfValueProfFuncName() 640 : getInstrProfValueProfMemOpFuncName(); 641 return M.getOrInsertFunction(FuncName, ValueProfilingCallTy, AL); 642 } 643 644 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) { 645 GlobalVariable *Name = Ind->getName(); 646 uint64_t ValueKind = Ind->getValueKind()->getZExtValue(); 647 uint64_t Index = Ind->getIndex()->getZExtValue(); 648 auto &PD = ProfileDataMap[Name]; 649 PD.NumValueSites[ValueKind] = 650 std::max(PD.NumValueSites[ValueKind], (uint32_t)(Index + 1)); 651 } 652 653 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) { 654 // TODO: Value profiling heavily depends on the data section which is omitted 655 // in lightweight mode. We need to move the value profile pointer to the 656 // Counter struct to get this working. 657 assert( 658 !DebugInfoCorrelate && 659 "Value profiling is not yet supported with lightweight instrumentation"); 660 GlobalVariable *Name = Ind->getName(); 661 auto It = ProfileDataMap.find(Name); 662 assert(It != ProfileDataMap.end() && It->second.DataVar && 663 "value profiling detected in function with no counter incerement"); 664 665 GlobalVariable *DataVar = It->second.DataVar; 666 uint64_t ValueKind = Ind->getValueKind()->getZExtValue(); 667 uint64_t Index = Ind->getIndex()->getZExtValue(); 668 for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind) 669 Index += It->second.NumValueSites[Kind]; 670 671 IRBuilder<> Builder(Ind); 672 bool IsMemOpSize = (Ind->getValueKind()->getZExtValue() == 673 llvm::InstrProfValueKind::IPVK_MemOPSize); 674 CallInst *Call = nullptr; 675 auto *TLI = &GetTLI(*Ind->getFunction()); 676 677 // To support value profiling calls within Windows exception handlers, funclet 678 // information contained within operand bundles needs to be copied over to 679 // the library call. This is required for the IR to be processed by the 680 // WinEHPrepare pass. 681 SmallVector<OperandBundleDef, 1> OpBundles; 682 Ind->getOperandBundlesAsDefs(OpBundles); 683 if (!IsMemOpSize) { 684 Value *Args[3] = {Ind->getTargetValue(), 685 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()), 686 Builder.getInt32(Index)}; 687 Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args, 688 OpBundles); 689 } else { 690 Value *Args[3] = {Ind->getTargetValue(), 691 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()), 692 Builder.getInt32(Index)}; 693 Call = Builder.CreateCall( 694 getOrInsertValueProfilingCall(*M, *TLI, ValueProfilingCallType::MemOp), 695 Args, OpBundles); 696 } 697 if (auto AK = TLI->getExtAttrForI32Param(false)) 698 Call->addParamAttr(2, AK); 699 Ind->replaceAllUsesWith(Call); 700 Ind->eraseFromParent(); 701 } 702 703 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) { 704 GlobalVariable *Counters = getOrCreateRegionCounters(Inc); 705 706 IRBuilder<> Builder(Inc); 707 uint64_t Index = Inc->getIndex()->getZExtValue(); 708 Value *Addr = Builder.CreateConstInBoundsGEP2_32(Counters->getValueType(), 709 Counters, 0, Index); 710 711 if (isRuntimeCounterRelocationEnabled()) { 712 Type *Int64Ty = Type::getInt64Ty(M->getContext()); 713 Type *Int64PtrTy = Type::getInt64PtrTy(M->getContext()); 714 Function *Fn = Inc->getParent()->getParent(); 715 Instruction &I = Fn->getEntryBlock().front(); 716 LoadInst *LI = dyn_cast<LoadInst>(&I); 717 if (!LI) { 718 IRBuilder<> Builder(&I); 719 GlobalVariable *Bias = 720 M->getGlobalVariable(getInstrProfCounterBiasVarName()); 721 if (!Bias) { 722 // Compiler must define this variable when runtime counter relocation 723 // is being used. Runtime has a weak external reference that is used 724 // to check whether that's the case or not. 725 Bias = new GlobalVariable( 726 *M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage, 727 Constant::getNullValue(Int64Ty), getInstrProfCounterBiasVarName()); 728 Bias->setVisibility(GlobalVariable::HiddenVisibility); 729 // A definition that's weak (linkonce_odr) without being in a COMDAT 730 // section wouldn't lead to link errors, but it would lead to a dead 731 // data word from every TU but one. Putting it in COMDAT ensures there 732 // will be exactly one data slot in the link. 733 if (TT.supportsCOMDAT()) 734 Bias->setComdat(M->getOrInsertComdat(Bias->getName())); 735 } 736 LI = Builder.CreateLoad(Int64Ty, Bias); 737 } 738 auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), LI); 739 Addr = Builder.CreateIntToPtr(Add, Int64PtrTy); 740 } 741 742 if (Options.Atomic || AtomicCounterUpdateAll || 743 (Index == 0 && AtomicFirstCounter)) { 744 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(), 745 MaybeAlign(), AtomicOrdering::Monotonic); 746 } else { 747 Value *IncStep = Inc->getStep(); 748 Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount"); 749 auto *Count = Builder.CreateAdd(Load, Inc->getStep()); 750 auto *Store = Builder.CreateStore(Count, Addr); 751 if (isCounterPromotionEnabled()) 752 PromotionCandidates.emplace_back(cast<Instruction>(Load), Store); 753 } 754 Inc->eraseFromParent(); 755 } 756 757 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) { 758 ConstantArray *Names = 759 cast<ConstantArray>(CoverageNamesVar->getInitializer()); 760 for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) { 761 Constant *NC = Names->getOperand(I); 762 Value *V = NC->stripPointerCasts(); 763 assert(isa<GlobalVariable>(V) && "Missing reference to function name"); 764 GlobalVariable *Name = cast<GlobalVariable>(V); 765 766 Name->setLinkage(GlobalValue::PrivateLinkage); 767 ReferencedNames.push_back(Name); 768 NC->dropAllReferences(); 769 } 770 CoverageNamesVar->eraseFromParent(); 771 } 772 773 /// Get the name of a profiling variable for a particular function. 774 static std::string getVarName(InstrProfIncrementInst *Inc, StringRef Prefix, 775 bool &Renamed) { 776 StringRef NamePrefix = getInstrProfNameVarPrefix(); 777 StringRef Name = Inc->getName()->getName().substr(NamePrefix.size()); 778 Function *F = Inc->getParent()->getParent(); 779 Module *M = F->getParent(); 780 if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) || 781 !canRenameComdatFunc(*F)) { 782 Renamed = false; 783 return (Prefix + Name).str(); 784 } 785 Renamed = true; 786 uint64_t FuncHash = Inc->getHash()->getZExtValue(); 787 SmallVector<char, 24> HashPostfix; 788 if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix))) 789 return (Prefix + Name).str(); 790 return (Prefix + Name + "." + Twine(FuncHash)).str(); 791 } 792 793 static uint64_t getIntModuleFlagOrZero(const Module &M, StringRef Flag) { 794 auto *MD = dyn_cast_or_null<ConstantAsMetadata>(M.getModuleFlag(Flag)); 795 if (!MD) 796 return 0; 797 798 // If the flag is a ConstantAsMetadata, it should be an integer representable 799 // in 64-bits. 800 return cast<ConstantInt>(MD->getValue())->getZExtValue(); 801 } 802 803 static bool enablesValueProfiling(const Module &M) { 804 return isIRPGOFlagSet(&M) || 805 getIntModuleFlagOrZero(M, "EnableValueProfiling") != 0; 806 } 807 808 // Conservatively returns true if data variables may be referenced by code. 809 static bool profDataReferencedByCode(const Module &M) { 810 return enablesValueProfiling(M); 811 } 812 813 static inline bool shouldRecordFunctionAddr(Function *F) { 814 // Only record function addresses if IR PGO is enabled or if clang value 815 // profiling is enabled. Recording function addresses greatly increases object 816 // file size, because it prevents the inliner from deleting functions that 817 // have been inlined everywhere. 818 if (!profDataReferencedByCode(*F->getParent())) 819 return false; 820 821 // Check the linkage 822 bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage(); 823 if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() && 824 !HasAvailableExternallyLinkage) 825 return true; 826 827 // A function marked 'alwaysinline' with available_externally linkage can't 828 // have its address taken. Doing so would create an undefined external ref to 829 // the function, which would fail to link. 830 if (HasAvailableExternallyLinkage && 831 F->hasFnAttribute(Attribute::AlwaysInline)) 832 return false; 833 834 // Prohibit function address recording if the function is both internal and 835 // COMDAT. This avoids the profile data variable referencing internal symbols 836 // in COMDAT. 837 if (F->hasLocalLinkage() && F->hasComdat()) 838 return false; 839 840 // Check uses of this function for other than direct calls or invokes to it. 841 // Inline virtual functions have linkeOnceODR linkage. When a key method 842 // exists, the vtable will only be emitted in the TU where the key method 843 // is defined. In a TU where vtable is not available, the function won't 844 // be 'addresstaken'. If its address is not recorded here, the profile data 845 // with missing address may be picked by the linker leading to missing 846 // indirect call target info. 847 return F->hasAddressTaken() || F->hasLinkOnceLinkage(); 848 } 849 850 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) { 851 // Don't do this for Darwin. compiler-rt uses linker magic. 852 if (TT.isOSDarwin()) 853 return false; 854 // Use linker script magic to get data/cnts/name start/end. 855 if (TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() || 856 TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4CPU() || TT.isOSWindows()) 857 return false; 858 859 return true; 860 } 861 862 GlobalVariable * 863 InstrProfiling::getOrCreateRegionCounters(InstrProfIncrementInst *Inc) { 864 GlobalVariable *NamePtr = Inc->getName(); 865 auto &PD = ProfileDataMap[NamePtr]; 866 if (PD.RegionCounters) 867 return PD.RegionCounters; 868 869 // Match the linkage and visibility of the name global. 870 Function *Fn = Inc->getParent()->getParent(); 871 GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage(); 872 GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility(); 873 874 // Use internal rather than private linkage so the counter variable shows up 875 // in the symbol table when using debug info for correlation. 876 if (DebugInfoCorrelate && TT.isOSBinFormatMachO() && 877 Linkage == GlobalValue::PrivateLinkage) 878 Linkage = GlobalValue::InternalLinkage; 879 880 // Due to the limitation of binder as of 2021/09/28, the duplicate weak 881 // symbols in the same csect won't be discarded. When there are duplicate weak 882 // symbols, we can NOT guarantee that the relocations get resolved to the 883 // intended weak symbol, so we can not ensure the correctness of the relative 884 // CounterPtr, so we have to use private linkage for counter and data symbols. 885 if (TT.isOSBinFormatXCOFF()) { 886 Linkage = GlobalValue::PrivateLinkage; 887 Visibility = GlobalValue::DefaultVisibility; 888 } 889 // Move the name variable to the right section. Place them in a COMDAT group 890 // if the associated function is a COMDAT. This will make sure that only one 891 // copy of counters of the COMDAT function will be emitted after linking. Keep 892 // in mind that this pass may run before the inliner, so we need to create a 893 // new comdat group for the counters and profiling data. If we use the comdat 894 // of the parent function, that will result in relocations against discarded 895 // sections. 896 // 897 // If the data variable is referenced by code, counters and data have to be 898 // in different comdats for COFF because the Visual C++ linker will report 899 // duplicate symbol errors if there are multiple external symbols with the 900 // same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE. 901 // 902 // For ELF, when not using COMDAT, put counters, data and values into a 903 // nodeduplicate COMDAT which is lowered to a zero-flag section group. This 904 // allows -z start-stop-gc to discard the entire group when the function is 905 // discarded. 906 bool DataReferencedByCode = profDataReferencedByCode(*M); 907 bool NeedComdat = needsComdatForCounter(*Fn, *M); 908 bool Renamed; 909 std::string CntsVarName = 910 getVarName(Inc, getInstrProfCountersVarPrefix(), Renamed); 911 std::string DataVarName = 912 getVarName(Inc, getInstrProfDataVarPrefix(), Renamed); 913 auto MaybeSetComdat = [&](GlobalVariable *GV) { 914 bool UseComdat = (NeedComdat || TT.isOSBinFormatELF()); 915 if (UseComdat) { 916 StringRef GroupName = TT.isOSBinFormatCOFF() && DataReferencedByCode 917 ? GV->getName() 918 : CntsVarName; 919 Comdat *C = M->getOrInsertComdat(GroupName); 920 if (!NeedComdat) 921 C->setSelectionKind(Comdat::NoDeduplicate); 922 GV->setComdat(C); 923 } 924 }; 925 926 uint64_t NumCounters = Inc->getNumCounters()->getZExtValue(); 927 LLVMContext &Ctx = M->getContext(); 928 ArrayType *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters); 929 930 // Create the counters variable. 931 auto *CounterPtr = 932 new GlobalVariable(*M, CounterTy, false, Linkage, 933 Constant::getNullValue(CounterTy), CntsVarName); 934 CounterPtr->setVisibility(Visibility); 935 CounterPtr->setSection( 936 getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat())); 937 CounterPtr->setAlignment(Align(8)); 938 MaybeSetComdat(CounterPtr); 939 CounterPtr->setLinkage(Linkage); 940 PD.RegionCounters = CounterPtr; 941 if (DebugInfoCorrelate) { 942 if (auto *SP = Fn->getSubprogram()) { 943 DIBuilder DB(*M, true, SP->getUnit()); 944 Metadata *FunctionNameAnnotation[] = { 945 MDString::get(Ctx, InstrProfCorrelator::FunctionNameAttributeName), 946 MDString::get(Ctx, getPGOFuncNameVarInitializer(NamePtr)), 947 }; 948 Metadata *CFGHashAnnotation[] = { 949 MDString::get(Ctx, InstrProfCorrelator::CFGHashAttributeName), 950 ConstantAsMetadata::get(Inc->getHash()), 951 }; 952 Metadata *NumCountersAnnotation[] = { 953 MDString::get(Ctx, InstrProfCorrelator::NumCountersAttributeName), 954 ConstantAsMetadata::get(Inc->getNumCounters()), 955 }; 956 auto Annotations = DB.getOrCreateArray({ 957 MDNode::get(Ctx, FunctionNameAnnotation), 958 MDNode::get(Ctx, CFGHashAnnotation), 959 MDNode::get(Ctx, NumCountersAnnotation), 960 }); 961 auto *DICounter = DB.createGlobalVariableExpression( 962 SP, CounterPtr->getName(), /*LinkageName=*/StringRef(), SP->getFile(), 963 /*LineNo=*/0, DB.createUnspecifiedType("Profile Data Type"), 964 CounterPtr->hasLocalLinkage(), /*IsDefined=*/true, /*Expr=*/nullptr, 965 /*Decl=*/nullptr, /*TemplateParams=*/nullptr, /*AlignInBits=*/0, 966 Annotations); 967 CounterPtr->addDebugInfo(DICounter); 968 DB.finalize(); 969 } else { 970 std::string Msg = ("Missing debug info for function " + Fn->getName() + 971 "; required for profile correlation.") 972 .str(); 973 Ctx.diagnose( 974 DiagnosticInfoPGOProfile(M->getName().data(), Msg, DS_Warning)); 975 } 976 } 977 978 auto *Int8PtrTy = Type::getInt8PtrTy(Ctx); 979 // Allocate statically the array of pointers to value profile nodes for 980 // the current function. 981 Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy); 982 uint64_t NS = 0; 983 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 984 NS += PD.NumValueSites[Kind]; 985 if (NS > 0 && ValueProfileStaticAlloc && 986 !needsRuntimeRegistrationOfSectionRange(TT)) { 987 ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS); 988 auto *ValuesVar = new GlobalVariable( 989 *M, ValuesTy, false, Linkage, Constant::getNullValue(ValuesTy), 990 getVarName(Inc, getInstrProfValuesVarPrefix(), Renamed)); 991 ValuesVar->setVisibility(Visibility); 992 ValuesVar->setSection( 993 getInstrProfSectionName(IPSK_vals, TT.getObjectFormat())); 994 ValuesVar->setAlignment(Align(8)); 995 MaybeSetComdat(ValuesVar); 996 ValuesPtrExpr = 997 ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx)); 998 } 999 1000 if (DebugInfoCorrelate) 1001 return PD.RegionCounters; 1002 1003 // Create data variable. 1004 auto *IntPtrTy = M->getDataLayout().getIntPtrType(M->getContext()); 1005 auto *Int16Ty = Type::getInt16Ty(Ctx); 1006 auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1); 1007 Type *DataTypes[] = { 1008 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType, 1009 #include "llvm/ProfileData/InstrProfData.inc" 1010 }; 1011 auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes)); 1012 1013 Constant *FunctionAddr = shouldRecordFunctionAddr(Fn) 1014 ? ConstantExpr::getBitCast(Fn, Int8PtrTy) 1015 : ConstantPointerNull::get(Int8PtrTy); 1016 1017 Constant *Int16ArrayVals[IPVK_Last + 1]; 1018 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 1019 Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]); 1020 1021 // If the data variable is not referenced by code (if we don't emit 1022 // @llvm.instrprof.value.profile, NS will be 0), and the counter keeps the 1023 // data variable live under linker GC, the data variable can be private. This 1024 // optimization applies to ELF. 1025 // 1026 // On COFF, a comdat leader cannot be local so we require DataReferencedByCode 1027 // to be false. 1028 // 1029 // If profd is in a deduplicate comdat, NS==0 with a hash suffix guarantees 1030 // that other copies must have the same CFG and cannot have value profiling. 1031 // If no hash suffix, other profd copies may be referenced by code. 1032 if (NS == 0 && !(DataReferencedByCode && NeedComdat && !Renamed) && 1033 (TT.isOSBinFormatELF() || 1034 (!DataReferencedByCode && TT.isOSBinFormatCOFF()))) { 1035 Linkage = GlobalValue::PrivateLinkage; 1036 Visibility = GlobalValue::DefaultVisibility; 1037 } 1038 auto *Data = 1039 new GlobalVariable(*M, DataTy, false, Linkage, nullptr, DataVarName); 1040 // Reference the counter variable with a label difference (link-time 1041 // constant). 1042 auto *RelativeCounterPtr = 1043 ConstantExpr::getSub(ConstantExpr::getPtrToInt(CounterPtr, IntPtrTy), 1044 ConstantExpr::getPtrToInt(Data, IntPtrTy)); 1045 1046 Constant *DataVals[] = { 1047 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init, 1048 #include "llvm/ProfileData/InstrProfData.inc" 1049 }; 1050 Data->setInitializer(ConstantStruct::get(DataTy, DataVals)); 1051 1052 Data->setVisibility(Visibility); 1053 Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat())); 1054 Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT)); 1055 MaybeSetComdat(Data); 1056 Data->setLinkage(Linkage); 1057 1058 PD.DataVar = Data; 1059 1060 // Mark the data variable as used so that it isn't stripped out. 1061 CompilerUsedVars.push_back(Data); 1062 // Now that the linkage set by the FE has been passed to the data and counter 1063 // variables, reset Name variable's linkage and visibility to private so that 1064 // it can be removed later by the compiler. 1065 NamePtr->setLinkage(GlobalValue::PrivateLinkage); 1066 // Collect the referenced names to be used by emitNameData. 1067 ReferencedNames.push_back(NamePtr); 1068 1069 return PD.RegionCounters; 1070 } 1071 1072 void InstrProfiling::emitVNodes() { 1073 if (!ValueProfileStaticAlloc) 1074 return; 1075 1076 // For now only support this on platforms that do 1077 // not require runtime registration to discover 1078 // named section start/end. 1079 if (needsRuntimeRegistrationOfSectionRange(TT)) 1080 return; 1081 1082 size_t TotalNS = 0; 1083 for (auto &PD : ProfileDataMap) { 1084 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 1085 TotalNS += PD.second.NumValueSites[Kind]; 1086 } 1087 1088 if (!TotalNS) 1089 return; 1090 1091 uint64_t NumCounters = TotalNS * NumCountersPerValueSite; 1092 // Heuristic for small programs with very few total value sites. 1093 // The default value of vp-counters-per-site is chosen based on 1094 // the observation that large apps usually have a low percentage 1095 // of value sites that actually have any profile data, and thus 1096 // the average number of counters per site is low. For small 1097 // apps with very few sites, this may not be true. Bump up the 1098 // number of counters in this case. 1099 #define INSTR_PROF_MIN_VAL_COUNTS 10 1100 if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS) 1101 NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2); 1102 1103 auto &Ctx = M->getContext(); 1104 Type *VNodeTypes[] = { 1105 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType, 1106 #include "llvm/ProfileData/InstrProfData.inc" 1107 }; 1108 auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes)); 1109 1110 ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters); 1111 auto *VNodesVar = new GlobalVariable( 1112 *M, VNodesTy, false, GlobalValue::PrivateLinkage, 1113 Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName()); 1114 VNodesVar->setSection( 1115 getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat())); 1116 // VNodesVar is used by runtime but not referenced via relocation by other 1117 // sections. Conservatively make it linker retained. 1118 UsedVars.push_back(VNodesVar); 1119 } 1120 1121 void InstrProfiling::emitNameData() { 1122 std::string UncompressedData; 1123 1124 if (ReferencedNames.empty()) 1125 return; 1126 1127 std::string CompressedNameStr; 1128 if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr, 1129 DoInstrProfNameCompression)) { 1130 report_fatal_error(Twine(toString(std::move(E))), false); 1131 } 1132 1133 auto &Ctx = M->getContext(); 1134 auto *NamesVal = 1135 ConstantDataArray::getString(Ctx, StringRef(CompressedNameStr), false); 1136 NamesVar = new GlobalVariable(*M, NamesVal->getType(), true, 1137 GlobalValue::PrivateLinkage, NamesVal, 1138 getInstrProfNamesVarName()); 1139 NamesSize = CompressedNameStr.size(); 1140 NamesVar->setSection( 1141 getInstrProfSectionName(IPSK_name, TT.getObjectFormat())); 1142 // On COFF, it's important to reduce the alignment down to 1 to prevent the 1143 // linker from inserting padding before the start of the names section or 1144 // between names entries. 1145 NamesVar->setAlignment(Align(1)); 1146 // NamesVar is used by runtime but not referenced via relocation by other 1147 // sections. Conservatively make it linker retained. 1148 UsedVars.push_back(NamesVar); 1149 1150 for (auto *NamePtr : ReferencedNames) 1151 NamePtr->eraseFromParent(); 1152 } 1153 1154 void InstrProfiling::emitRegistration() { 1155 if (!needsRuntimeRegistrationOfSectionRange(TT)) 1156 return; 1157 1158 // Construct the function. 1159 auto *VoidTy = Type::getVoidTy(M->getContext()); 1160 auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext()); 1161 auto *Int64Ty = Type::getInt64Ty(M->getContext()); 1162 auto *RegisterFTy = FunctionType::get(VoidTy, false); 1163 auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage, 1164 getInstrProfRegFuncsName(), M); 1165 RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); 1166 if (Options.NoRedZone) 1167 RegisterF->addFnAttr(Attribute::NoRedZone); 1168 1169 auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false); 1170 auto *RuntimeRegisterF = 1171 Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage, 1172 getInstrProfRegFuncName(), M); 1173 1174 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF)); 1175 for (Value *Data : CompilerUsedVars) 1176 if (!isa<Function>(Data)) 1177 IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy)); 1178 for (Value *Data : UsedVars) 1179 if (Data != NamesVar && !isa<Function>(Data)) 1180 IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy)); 1181 1182 if (NamesVar) { 1183 Type *ParamTypes[] = {VoidPtrTy, Int64Ty}; 1184 auto *NamesRegisterTy = 1185 FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false); 1186 auto *NamesRegisterF = 1187 Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage, 1188 getInstrProfNamesRegFuncName(), M); 1189 IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy), 1190 IRB.getInt64(NamesSize)}); 1191 } 1192 1193 IRB.CreateRetVoid(); 1194 } 1195 1196 bool InstrProfiling::emitRuntimeHook() { 1197 // We expect the linker to be invoked with -u<hook_var> flag for Linux 1198 // in which case there is no need to emit the external variable. 1199 if (TT.isOSLinux()) 1200 return false; 1201 1202 // If the module's provided its own runtime, we don't need to do anything. 1203 if (M->getGlobalVariable(getInstrProfRuntimeHookVarName())) 1204 return false; 1205 1206 // Declare an external variable that will pull in the runtime initialization. 1207 auto *Int32Ty = Type::getInt32Ty(M->getContext()); 1208 auto *Var = 1209 new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage, 1210 nullptr, getInstrProfRuntimeHookVarName()); 1211 1212 if (TT.isOSBinFormatELF()) { 1213 // Mark the user variable as used so that it isn't stripped out. 1214 CompilerUsedVars.push_back(Var); 1215 } else { 1216 // Make a function that uses it. 1217 auto *User = Function::Create(FunctionType::get(Int32Ty, false), 1218 GlobalValue::LinkOnceODRLinkage, 1219 getInstrProfRuntimeHookVarUseFuncName(), M); 1220 User->addFnAttr(Attribute::NoInline); 1221 if (Options.NoRedZone) 1222 User->addFnAttr(Attribute::NoRedZone); 1223 User->setVisibility(GlobalValue::HiddenVisibility); 1224 if (TT.supportsCOMDAT()) 1225 User->setComdat(M->getOrInsertComdat(User->getName())); 1226 1227 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User)); 1228 auto *Load = IRB.CreateLoad(Int32Ty, Var); 1229 IRB.CreateRet(Load); 1230 1231 // Mark the function as used so that it isn't stripped out. 1232 CompilerUsedVars.push_back(User); 1233 } 1234 return true; 1235 } 1236 1237 void InstrProfiling::emitUses() { 1238 // The metadata sections are parallel arrays. Optimizers (e.g. 1239 // GlobalOpt/ConstantMerge) may not discard associated sections as a unit, so 1240 // we conservatively retain all unconditionally in the compiler. 1241 // 1242 // On ELF and Mach-O, the linker can guarantee the associated sections will be 1243 // retained or discarded as a unit, so llvm.compiler.used is sufficient. 1244 // Similarly on COFF, if prof data is not referenced by code we use one comdat 1245 // and ensure this GC property as well. Otherwise, we have to conservatively 1246 // make all of the sections retained by the linker. 1247 if (TT.isOSBinFormatELF() || TT.isOSBinFormatMachO() || 1248 (TT.isOSBinFormatCOFF() && !profDataReferencedByCode(*M))) 1249 appendToCompilerUsed(*M, CompilerUsedVars); 1250 else 1251 appendToUsed(*M, CompilerUsedVars); 1252 1253 // We do not add proper references from used metadata sections to NamesVar and 1254 // VNodesVar, so we have to be conservative and place them in llvm.used 1255 // regardless of the target, 1256 appendToUsed(*M, UsedVars); 1257 } 1258 1259 void InstrProfiling::emitInitialization() { 1260 // Create ProfileFileName variable. Don't don't this for the 1261 // context-sensitive instrumentation lowering: This lowering is after 1262 // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should 1263 // have already create the variable before LTO/ThinLTO linking. 1264 if (!IsCS) 1265 createProfileFileNameVar(*M, Options.InstrProfileOutput); 1266 Function *RegisterF = M->getFunction(getInstrProfRegFuncsName()); 1267 if (!RegisterF) 1268 return; 1269 1270 // Create the initialization function. 1271 auto *VoidTy = Type::getVoidTy(M->getContext()); 1272 auto *F = Function::Create(FunctionType::get(VoidTy, false), 1273 GlobalValue::InternalLinkage, 1274 getInstrProfInitFuncName(), M); 1275 F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); 1276 F->addFnAttr(Attribute::NoInline); 1277 if (Options.NoRedZone) 1278 F->addFnAttr(Attribute::NoRedZone); 1279 1280 // Add the basic block and the necessary calls. 1281 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F)); 1282 IRB.CreateCall(RegisterF, {}); 1283 IRB.CreateRetVoid(); 1284 1285 appendToGlobalCtors(*M, F, 0); 1286 } 1287