1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling. 10 // It also builds the data structures and initialization code needed for 11 // updating execution counts and emitting the profile at runtime. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h" 16 #include "llvm/ADT/ArrayRef.h" 17 #include "llvm/ADT/SmallVector.h" 18 #include "llvm/ADT/StringRef.h" 19 #include "llvm/ADT/Triple.h" 20 #include "llvm/ADT/Twine.h" 21 #include "llvm/Analysis/BlockFrequencyInfo.h" 22 #include "llvm/Analysis/BranchProbabilityInfo.h" 23 #include "llvm/Analysis/LoopInfo.h" 24 #include "llvm/Analysis/TargetLibraryInfo.h" 25 #include "llvm/IR/Attributes.h" 26 #include "llvm/IR/BasicBlock.h" 27 #include "llvm/IR/Constant.h" 28 #include "llvm/IR/Constants.h" 29 #include "llvm/IR/DerivedTypes.h" 30 #include "llvm/IR/Dominators.h" 31 #include "llvm/IR/Function.h" 32 #include "llvm/IR/GlobalValue.h" 33 #include "llvm/IR/GlobalVariable.h" 34 #include "llvm/IR/IRBuilder.h" 35 #include "llvm/IR/Instruction.h" 36 #include "llvm/IR/Instructions.h" 37 #include "llvm/IR/IntrinsicInst.h" 38 #include "llvm/IR/Module.h" 39 #include "llvm/IR/Type.h" 40 #include "llvm/InitializePasses.h" 41 #include "llvm/Pass.h" 42 #include "llvm/ProfileData/InstrProf.h" 43 #include "llvm/Support/Casting.h" 44 #include "llvm/Support/CommandLine.h" 45 #include "llvm/Support/Error.h" 46 #include "llvm/Support/ErrorHandling.h" 47 #include "llvm/Transforms/Utils/BasicBlockUtils.h" 48 #include "llvm/Transforms/Utils/ModuleUtils.h" 49 #include "llvm/Transforms/Utils/SSAUpdater.h" 50 #include <algorithm> 51 #include <cassert> 52 #include <cstddef> 53 #include <cstdint> 54 #include <string> 55 56 using namespace llvm; 57 58 #define DEBUG_TYPE "instrprof" 59 60 namespace { 61 62 cl::opt<bool> DoHashBasedCounterSplit( 63 "hash-based-counter-split", 64 cl::desc("Rename counter variable of a comdat function based on cfg hash"), 65 cl::init(true)); 66 67 cl::opt<bool> 68 RuntimeCounterRelocation("runtime-counter-relocation", 69 cl::desc("Enable relocating counters at runtime."), 70 cl::init(false)); 71 72 cl::opt<bool> ValueProfileStaticAlloc( 73 "vp-static-alloc", 74 cl::desc("Do static counter allocation for value profiler"), 75 cl::init(true)); 76 77 cl::opt<double> NumCountersPerValueSite( 78 "vp-counters-per-site", 79 cl::desc("The average number of profile counters allocated " 80 "per value profiling site."), 81 // This is set to a very small value because in real programs, only 82 // a very small percentage of value sites have non-zero targets, e.g, 1/30. 83 // For those sites with non-zero profile, the average number of targets 84 // is usually smaller than 2. 85 cl::init(1.0)); 86 87 cl::opt<bool> AtomicCounterUpdateAll( 88 "instrprof-atomic-counter-update-all", cl::ZeroOrMore, 89 cl::desc("Make all profile counter updates atomic (for testing only)"), 90 cl::init(false)); 91 92 cl::opt<bool> AtomicCounterUpdatePromoted( 93 "atomic-counter-update-promoted", cl::ZeroOrMore, 94 cl::desc("Do counter update using atomic fetch add " 95 " for promoted counters only"), 96 cl::init(false)); 97 98 cl::opt<bool> AtomicFirstCounter( 99 "atomic-first-counter", cl::ZeroOrMore, 100 cl::desc("Use atomic fetch add for first counter in a function (usually " 101 "the entry counter)"), 102 cl::init(false)); 103 104 // If the option is not specified, the default behavior about whether 105 // counter promotion is done depends on how instrumentaiton lowering 106 // pipeline is setup, i.e., the default value of true of this option 107 // does not mean the promotion will be done by default. Explicitly 108 // setting this option can override the default behavior. 109 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore, 110 cl::desc("Do counter register promotion"), 111 cl::init(false)); 112 cl::opt<unsigned> MaxNumOfPromotionsPerLoop( 113 cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20), 114 cl::desc("Max number counter promotions per loop to avoid" 115 " increasing register pressure too much")); 116 117 // A debug option 118 cl::opt<int> 119 MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1), 120 cl::desc("Max number of allowed counter promotions")); 121 122 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting( 123 cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3), 124 cl::desc("The max number of exiting blocks of a loop to allow " 125 " speculative counter promotion")); 126 127 cl::opt<bool> SpeculativeCounterPromotionToLoop( 128 cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false), 129 cl::desc("When the option is false, if the target block is in a loop, " 130 "the promotion will be disallowed unless the promoted counter " 131 " update can be further/iteratively promoted into an acyclic " 132 " region.")); 133 134 cl::opt<bool> IterativeCounterPromotion( 135 cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true), 136 cl::desc("Allow counter promotion across the whole loop nest.")); 137 138 cl::opt<bool> SkipRetExitBlock( 139 cl::ZeroOrMore, "skip-ret-exit-block", cl::init(true), 140 cl::desc("Suppress counter promotion if exit blocks contain ret.")); 141 142 class InstrProfilingLegacyPass : public ModulePass { 143 InstrProfiling InstrProf; 144 145 public: 146 static char ID; 147 148 InstrProfilingLegacyPass() : ModulePass(ID) {} 149 InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false) 150 : ModulePass(ID), InstrProf(Options, IsCS) { 151 initializeInstrProfilingLegacyPassPass(*PassRegistry::getPassRegistry()); 152 } 153 154 StringRef getPassName() const override { 155 return "Frontend instrumentation-based coverage lowering"; 156 } 157 158 bool runOnModule(Module &M) override { 159 auto GetTLI = [this](Function &F) -> TargetLibraryInfo & { 160 return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F); 161 }; 162 return InstrProf.run(M, GetTLI); 163 } 164 165 void getAnalysisUsage(AnalysisUsage &AU) const override { 166 AU.setPreservesCFG(); 167 AU.addRequired<TargetLibraryInfoWrapperPass>(); 168 } 169 }; 170 171 /// 172 /// A helper class to promote one counter RMW operation in the loop 173 /// into register update. 174 /// 175 /// RWM update for the counter will be sinked out of the loop after 176 /// the transformation. 177 /// 178 class PGOCounterPromoterHelper : public LoadAndStorePromoter { 179 public: 180 PGOCounterPromoterHelper( 181 Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init, 182 BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks, 183 ArrayRef<Instruction *> InsertPts, 184 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands, 185 LoopInfo &LI) 186 : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks), 187 InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) { 188 assert(isa<LoadInst>(L)); 189 assert(isa<StoreInst>(S)); 190 SSA.AddAvailableValue(PH, Init); 191 } 192 193 void doExtraRewritesBeforeFinalDeletion() override { 194 for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) { 195 BasicBlock *ExitBlock = ExitBlocks[i]; 196 Instruction *InsertPos = InsertPts[i]; 197 // Get LiveIn value into the ExitBlock. If there are multiple 198 // predecessors, the value is defined by a PHI node in this 199 // block. 200 Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock); 201 Value *Addr = cast<StoreInst>(Store)->getPointerOperand(); 202 Type *Ty = LiveInValue->getType(); 203 IRBuilder<> Builder(InsertPos); 204 if (AtomicCounterUpdatePromoted) 205 // automic update currently can only be promoted across the current 206 // loop, not the whole loop nest. 207 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue, 208 MaybeAlign(), 209 AtomicOrdering::SequentiallyConsistent); 210 else { 211 LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted"); 212 auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue); 213 auto *NewStore = Builder.CreateStore(NewVal, Addr); 214 215 // Now update the parent loop's candidate list: 216 if (IterativeCounterPromotion) { 217 auto *TargetLoop = LI.getLoopFor(ExitBlock); 218 if (TargetLoop) 219 LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore); 220 } 221 } 222 } 223 } 224 225 private: 226 Instruction *Store; 227 ArrayRef<BasicBlock *> ExitBlocks; 228 ArrayRef<Instruction *> InsertPts; 229 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates; 230 LoopInfo &LI; 231 }; 232 233 /// A helper class to do register promotion for all profile counter 234 /// updates in a loop. 235 /// 236 class PGOCounterPromoter { 237 public: 238 PGOCounterPromoter( 239 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands, 240 Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI) 241 : LoopToCandidates(LoopToCands), ExitBlocks(), InsertPts(), L(CurLoop), 242 LI(LI), BFI(BFI) { 243 244 // Skip collection of ExitBlocks and InsertPts for loops that will not be 245 // able to have counters promoted. 246 SmallVector<BasicBlock *, 8> LoopExitBlocks; 247 SmallPtrSet<BasicBlock *, 8> BlockSet; 248 249 L.getExitBlocks(LoopExitBlocks); 250 if (!isPromotionPossible(&L, LoopExitBlocks)) 251 return; 252 253 for (BasicBlock *ExitBlock : LoopExitBlocks) { 254 if (BlockSet.insert(ExitBlock).second) { 255 ExitBlocks.push_back(ExitBlock); 256 InsertPts.push_back(&*ExitBlock->getFirstInsertionPt()); 257 } 258 } 259 } 260 261 bool run(int64_t *NumPromoted) { 262 // Skip 'infinite' loops: 263 if (ExitBlocks.size() == 0) 264 return false; 265 266 // Skip if any of the ExitBlocks contains a ret instruction. 267 // This is to prevent dumping of incomplete profile -- if the 268 // the loop is a long running loop and dump is called in the middle 269 // of the loop, the result profile is incomplete. 270 // FIXME: add other heuristics to detect long running loops. 271 if (SkipRetExitBlock) { 272 for (auto BB : ExitBlocks) 273 if (isa<ReturnInst>(BB->getTerminator())) 274 return false; 275 } 276 277 unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L); 278 if (MaxProm == 0) 279 return false; 280 281 unsigned Promoted = 0; 282 for (auto &Cand : LoopToCandidates[&L]) { 283 284 SmallVector<PHINode *, 4> NewPHIs; 285 SSAUpdater SSA(&NewPHIs); 286 Value *InitVal = ConstantInt::get(Cand.first->getType(), 0); 287 288 // If BFI is set, we will use it to guide the promotions. 289 if (BFI) { 290 auto *BB = Cand.first->getParent(); 291 auto InstrCount = BFI->getBlockProfileCount(BB); 292 if (!InstrCount) 293 continue; 294 auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader()); 295 // If the average loop trip count is not greater than 1.5, we skip 296 // promotion. 297 if (PreheaderCount && 298 (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2)) 299 continue; 300 } 301 302 PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal, 303 L.getLoopPreheader(), ExitBlocks, 304 InsertPts, LoopToCandidates, LI); 305 Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second})); 306 Promoted++; 307 if (Promoted >= MaxProm) 308 break; 309 310 (*NumPromoted)++; 311 if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions) 312 break; 313 } 314 315 LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth=" 316 << L.getLoopDepth() << ")\n"); 317 return Promoted != 0; 318 } 319 320 private: 321 bool allowSpeculativeCounterPromotion(Loop *LP) { 322 SmallVector<BasicBlock *, 8> ExitingBlocks; 323 L.getExitingBlocks(ExitingBlocks); 324 // Not considierered speculative. 325 if (ExitingBlocks.size() == 1) 326 return true; 327 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting) 328 return false; 329 return true; 330 } 331 332 // Check whether the loop satisfies the basic conditions needed to perform 333 // Counter Promotions. 334 bool 335 isPromotionPossible(Loop *LP, 336 const SmallVectorImpl<BasicBlock *> &LoopExitBlocks) { 337 // We can't insert into a catchswitch. 338 if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) { 339 return isa<CatchSwitchInst>(Exit->getTerminator()); 340 })) 341 return false; 342 343 if (!LP->hasDedicatedExits()) 344 return false; 345 346 BasicBlock *PH = LP->getLoopPreheader(); 347 if (!PH) 348 return false; 349 350 return true; 351 } 352 353 // Returns the max number of Counter Promotions for LP. 354 unsigned getMaxNumOfPromotionsInLoop(Loop *LP) { 355 SmallVector<BasicBlock *, 8> LoopExitBlocks; 356 LP->getExitBlocks(LoopExitBlocks); 357 if (!isPromotionPossible(LP, LoopExitBlocks)) 358 return 0; 359 360 SmallVector<BasicBlock *, 8> ExitingBlocks; 361 LP->getExitingBlocks(ExitingBlocks); 362 363 // If BFI is set, we do more aggressive promotions based on BFI. 364 if (BFI) 365 return (unsigned)-1; 366 367 // Not considierered speculative. 368 if (ExitingBlocks.size() == 1) 369 return MaxNumOfPromotionsPerLoop; 370 371 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting) 372 return 0; 373 374 // Whether the target block is in a loop does not matter: 375 if (SpeculativeCounterPromotionToLoop) 376 return MaxNumOfPromotionsPerLoop; 377 378 // Now check the target block: 379 unsigned MaxProm = MaxNumOfPromotionsPerLoop; 380 for (auto *TargetBlock : LoopExitBlocks) { 381 auto *TargetLoop = LI.getLoopFor(TargetBlock); 382 if (!TargetLoop) 383 continue; 384 unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop); 385 unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size(); 386 MaxProm = 387 std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) - 388 PendingCandsInTarget); 389 } 390 return MaxProm; 391 } 392 393 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates; 394 SmallVector<BasicBlock *, 8> ExitBlocks; 395 SmallVector<Instruction *, 8> InsertPts; 396 Loop &L; 397 LoopInfo &LI; 398 BlockFrequencyInfo *BFI; 399 }; 400 401 enum class ValueProfilingCallType { 402 // Individual values are tracked. Currently used for indiret call target 403 // profiling. 404 Default, 405 406 // MemOp: the memop size value profiling. 407 MemOp 408 }; 409 410 } // end anonymous namespace 411 412 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) { 413 FunctionAnalysisManager &FAM = 414 AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager(); 415 auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & { 416 return FAM.getResult<TargetLibraryAnalysis>(F); 417 }; 418 if (!run(M, GetTLI)) 419 return PreservedAnalyses::all(); 420 421 return PreservedAnalyses::none(); 422 } 423 424 char InstrProfilingLegacyPass::ID = 0; 425 INITIALIZE_PASS_BEGIN(InstrProfilingLegacyPass, "instrprof", 426 "Frontend instrumentation-based coverage lowering.", 427 false, false) 428 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) 429 INITIALIZE_PASS_END(InstrProfilingLegacyPass, "instrprof", 430 "Frontend instrumentation-based coverage lowering.", false, 431 false) 432 433 ModulePass * 434 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options, 435 bool IsCS) { 436 return new InstrProfilingLegacyPass(Options, IsCS); 437 } 438 439 static InstrProfIncrementInst *castToIncrementInst(Instruction *Instr) { 440 InstrProfIncrementInst *Inc = dyn_cast<InstrProfIncrementInstStep>(Instr); 441 if (Inc) 442 return Inc; 443 return dyn_cast<InstrProfIncrementInst>(Instr); 444 } 445 446 bool InstrProfiling::lowerIntrinsics(Function *F) { 447 bool MadeChange = false; 448 PromotionCandidates.clear(); 449 for (BasicBlock &BB : *F) { 450 for (Instruction &Instr : llvm::make_early_inc_range(BB)) { 451 InstrProfIncrementInst *Inc = castToIncrementInst(&Instr); 452 if (Inc) { 453 lowerIncrement(Inc); 454 MadeChange = true; 455 } else if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(&Instr)) { 456 lowerValueProfileInst(Ind); 457 MadeChange = true; 458 } 459 } 460 } 461 462 if (!MadeChange) 463 return false; 464 465 promoteCounterLoadStores(F); 466 return true; 467 } 468 469 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const { 470 // Mach-O don't support weak external references. 471 if (TT.isOSBinFormatMachO()) 472 return false; 473 474 if (RuntimeCounterRelocation.getNumOccurrences() > 0) 475 return RuntimeCounterRelocation; 476 477 // Fuchsia uses runtime counter relocation by default. 478 return TT.isOSFuchsia(); 479 } 480 481 bool InstrProfiling::isCounterPromotionEnabled() const { 482 if (DoCounterPromotion.getNumOccurrences() > 0) 483 return DoCounterPromotion; 484 485 return Options.DoCounterPromotion; 486 } 487 488 void InstrProfiling::promoteCounterLoadStores(Function *F) { 489 if (!isCounterPromotionEnabled()) 490 return; 491 492 DominatorTree DT(*F); 493 LoopInfo LI(DT); 494 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates; 495 496 std::unique_ptr<BlockFrequencyInfo> BFI; 497 if (Options.UseBFIInPromotion) { 498 std::unique_ptr<BranchProbabilityInfo> BPI; 499 BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F))); 500 BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI)); 501 } 502 503 for (const auto &LoadStore : PromotionCandidates) { 504 auto *CounterLoad = LoadStore.first; 505 auto *CounterStore = LoadStore.second; 506 BasicBlock *BB = CounterLoad->getParent(); 507 Loop *ParentLoop = LI.getLoopFor(BB); 508 if (!ParentLoop) 509 continue; 510 LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore); 511 } 512 513 SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder(); 514 515 // Do a post-order traversal of the loops so that counter updates can be 516 // iteratively hoisted outside the loop nest. 517 for (auto *Loop : llvm::reverse(Loops)) { 518 PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get()); 519 Promoter.run(&TotalCountersPromoted); 520 } 521 } 522 523 static bool needsRuntimeHookUnconditionally(const Triple &TT) { 524 // On Fuchsia, we only need runtime hook if any counters are present. 525 if (TT.isOSFuchsia()) 526 return false; 527 528 return true; 529 } 530 531 /// Check if the module contains uses of any profiling intrinsics. 532 static bool containsProfilingIntrinsics(Module &M) { 533 if (auto *F = M.getFunction( 534 Intrinsic::getName(llvm::Intrinsic::instrprof_increment))) 535 if (!F->use_empty()) 536 return true; 537 if (auto *F = M.getFunction( 538 Intrinsic::getName(llvm::Intrinsic::instrprof_increment_step))) 539 if (!F->use_empty()) 540 return true; 541 if (auto *F = M.getFunction( 542 Intrinsic::getName(llvm::Intrinsic::instrprof_value_profile))) 543 if (!F->use_empty()) 544 return true; 545 return false; 546 } 547 548 bool InstrProfiling::run( 549 Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) { 550 this->M = &M; 551 this->GetTLI = std::move(GetTLI); 552 NamesVar = nullptr; 553 NamesSize = 0; 554 ProfileDataMap.clear(); 555 CompilerUsedVars.clear(); 556 UsedVars.clear(); 557 TT = Triple(M.getTargetTriple()); 558 559 bool MadeChange = false; 560 561 // Emit the runtime hook even if no counters are present. 562 if (needsRuntimeHookUnconditionally(TT)) 563 MadeChange = emitRuntimeHook(); 564 565 // Improve compile time by avoiding linear scans when there is no work. 566 GlobalVariable *CoverageNamesVar = 567 M.getNamedGlobal(getCoverageUnusedNamesVarName()); 568 if (!containsProfilingIntrinsics(M) && !CoverageNamesVar) 569 return MadeChange; 570 571 // We did not know how many value sites there would be inside 572 // the instrumented function. This is counting the number of instrumented 573 // target value sites to enter it as field in the profile data variable. 574 for (Function &F : M) { 575 InstrProfIncrementInst *FirstProfIncInst = nullptr; 576 for (BasicBlock &BB : F) 577 for (auto I = BB.begin(), E = BB.end(); I != E; I++) 578 if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I)) 579 computeNumValueSiteCounts(Ind); 580 else if (FirstProfIncInst == nullptr) 581 FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I); 582 583 // Value profiling intrinsic lowering requires per-function profile data 584 // variable to be created first. 585 if (FirstProfIncInst != nullptr) 586 static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst)); 587 } 588 589 for (Function &F : M) 590 MadeChange |= lowerIntrinsics(&F); 591 592 if (CoverageNamesVar) { 593 lowerCoverageData(CoverageNamesVar); 594 MadeChange = true; 595 } 596 597 if (!MadeChange) 598 return false; 599 600 emitVNodes(); 601 emitNameData(); 602 emitRuntimeHook(); 603 emitRegistration(); 604 emitUses(); 605 emitInitialization(); 606 return true; 607 } 608 609 static FunctionCallee getOrInsertValueProfilingCall( 610 Module &M, const TargetLibraryInfo &TLI, 611 ValueProfilingCallType CallType = ValueProfilingCallType::Default) { 612 LLVMContext &Ctx = M.getContext(); 613 auto *ReturnTy = Type::getVoidTy(M.getContext()); 614 615 AttributeList AL; 616 if (auto AK = TLI.getExtAttrForI32Param(false)) 617 AL = AL.addParamAttribute(M.getContext(), 2, AK); 618 619 assert((CallType == ValueProfilingCallType::Default || 620 CallType == ValueProfilingCallType::MemOp) && 621 "Must be Default or MemOp"); 622 Type *ParamTypes[] = { 623 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType 624 #include "llvm/ProfileData/InstrProfData.inc" 625 }; 626 auto *ValueProfilingCallTy = 627 FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false); 628 StringRef FuncName = CallType == ValueProfilingCallType::Default 629 ? getInstrProfValueProfFuncName() 630 : getInstrProfValueProfMemOpFuncName(); 631 return M.getOrInsertFunction(FuncName, ValueProfilingCallTy, AL); 632 } 633 634 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) { 635 GlobalVariable *Name = Ind->getName(); 636 uint64_t ValueKind = Ind->getValueKind()->getZExtValue(); 637 uint64_t Index = Ind->getIndex()->getZExtValue(); 638 auto It = ProfileDataMap.find(Name); 639 if (It == ProfileDataMap.end()) { 640 PerFunctionProfileData PD; 641 PD.NumValueSites[ValueKind] = Index + 1; 642 ProfileDataMap[Name] = PD; 643 } else if (It->second.NumValueSites[ValueKind] <= Index) 644 It->second.NumValueSites[ValueKind] = Index + 1; 645 } 646 647 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) { 648 GlobalVariable *Name = Ind->getName(); 649 auto It = ProfileDataMap.find(Name); 650 assert(It != ProfileDataMap.end() && It->second.DataVar && 651 "value profiling detected in function with no counter incerement"); 652 653 GlobalVariable *DataVar = It->second.DataVar; 654 uint64_t ValueKind = Ind->getValueKind()->getZExtValue(); 655 uint64_t Index = Ind->getIndex()->getZExtValue(); 656 for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind) 657 Index += It->second.NumValueSites[Kind]; 658 659 IRBuilder<> Builder(Ind); 660 bool IsMemOpSize = (Ind->getValueKind()->getZExtValue() == 661 llvm::InstrProfValueKind::IPVK_MemOPSize); 662 CallInst *Call = nullptr; 663 auto *TLI = &GetTLI(*Ind->getFunction()); 664 665 // To support value profiling calls within Windows exception handlers, funclet 666 // information contained within operand bundles needs to be copied over to 667 // the library call. This is required for the IR to be processed by the 668 // WinEHPrepare pass. 669 SmallVector<OperandBundleDef, 1> OpBundles; 670 Ind->getOperandBundlesAsDefs(OpBundles); 671 if (!IsMemOpSize) { 672 Value *Args[3] = {Ind->getTargetValue(), 673 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()), 674 Builder.getInt32(Index)}; 675 Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args, 676 OpBundles); 677 } else { 678 Value *Args[3] = {Ind->getTargetValue(), 679 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()), 680 Builder.getInt32(Index)}; 681 Call = Builder.CreateCall( 682 getOrInsertValueProfilingCall(*M, *TLI, ValueProfilingCallType::MemOp), 683 Args, OpBundles); 684 } 685 if (auto AK = TLI->getExtAttrForI32Param(false)) 686 Call->addParamAttr(2, AK); 687 Ind->replaceAllUsesWith(Call); 688 Ind->eraseFromParent(); 689 } 690 691 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) { 692 GlobalVariable *Counters = getOrCreateRegionCounters(Inc); 693 694 IRBuilder<> Builder(Inc); 695 uint64_t Index = Inc->getIndex()->getZExtValue(); 696 Value *Addr = Builder.CreateConstInBoundsGEP2_32(Counters->getValueType(), 697 Counters, 0, Index); 698 699 if (isRuntimeCounterRelocationEnabled()) { 700 Type *Int64Ty = Type::getInt64Ty(M->getContext()); 701 Type *Int64PtrTy = Type::getInt64PtrTy(M->getContext()); 702 Function *Fn = Inc->getParent()->getParent(); 703 Instruction &I = Fn->getEntryBlock().front(); 704 LoadInst *LI = dyn_cast<LoadInst>(&I); 705 if (!LI) { 706 IRBuilder<> Builder(&I); 707 GlobalVariable *Bias = 708 M->getGlobalVariable(getInstrProfCounterBiasVarName()); 709 if (!Bias) { 710 // Compiler must define this variable when runtime counter relocation 711 // is being used. Runtime has a weak external reference that is used 712 // to check whether that's the case or not. 713 Bias = new GlobalVariable( 714 *M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage, 715 Constant::getNullValue(Int64Ty), getInstrProfCounterBiasVarName()); 716 Bias->setVisibility(GlobalVariable::HiddenVisibility); 717 // A definition that's weak (linkonce_odr) without being in a COMDAT 718 // section wouldn't lead to link errors, but it would lead to a dead 719 // data word from every TU but one. Putting it in COMDAT ensures there 720 // will be exactly one data slot in the link. 721 if (TT.supportsCOMDAT()) 722 Bias->setComdat(M->getOrInsertComdat(Bias->getName())); 723 } 724 LI = Builder.CreateLoad(Int64Ty, Bias); 725 } 726 auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), LI); 727 Addr = Builder.CreateIntToPtr(Add, Int64PtrTy); 728 } 729 730 if (Options.Atomic || AtomicCounterUpdateAll || 731 (Index == 0 && AtomicFirstCounter)) { 732 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(), 733 MaybeAlign(), AtomicOrdering::Monotonic); 734 } else { 735 Value *IncStep = Inc->getStep(); 736 Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount"); 737 auto *Count = Builder.CreateAdd(Load, Inc->getStep()); 738 auto *Store = Builder.CreateStore(Count, Addr); 739 if (isCounterPromotionEnabled()) 740 PromotionCandidates.emplace_back(cast<Instruction>(Load), Store); 741 } 742 Inc->eraseFromParent(); 743 } 744 745 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) { 746 ConstantArray *Names = 747 cast<ConstantArray>(CoverageNamesVar->getInitializer()); 748 for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) { 749 Constant *NC = Names->getOperand(I); 750 Value *V = NC->stripPointerCasts(); 751 assert(isa<GlobalVariable>(V) && "Missing reference to function name"); 752 GlobalVariable *Name = cast<GlobalVariable>(V); 753 754 Name->setLinkage(GlobalValue::PrivateLinkage); 755 ReferencedNames.push_back(Name); 756 NC->dropAllReferences(); 757 } 758 CoverageNamesVar->eraseFromParent(); 759 } 760 761 /// Get the name of a profiling variable for a particular function. 762 static std::string getVarName(InstrProfIncrementInst *Inc, StringRef Prefix, 763 bool &Renamed) { 764 StringRef NamePrefix = getInstrProfNameVarPrefix(); 765 StringRef Name = Inc->getName()->getName().substr(NamePrefix.size()); 766 Function *F = Inc->getParent()->getParent(); 767 Module *M = F->getParent(); 768 if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) || 769 !canRenameComdatFunc(*F)) { 770 Renamed = false; 771 return (Prefix + Name).str(); 772 } 773 Renamed = true; 774 uint64_t FuncHash = Inc->getHash()->getZExtValue(); 775 SmallVector<char, 24> HashPostfix; 776 if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix))) 777 return (Prefix + Name).str(); 778 return (Prefix + Name + "." + Twine(FuncHash)).str(); 779 } 780 781 static uint64_t getIntModuleFlagOrZero(const Module &M, StringRef Flag) { 782 auto *MD = dyn_cast_or_null<ConstantAsMetadata>(M.getModuleFlag(Flag)); 783 if (!MD) 784 return 0; 785 786 // If the flag is a ConstantAsMetadata, it should be an integer representable 787 // in 64-bits. 788 return cast<ConstantInt>(MD->getValue())->getZExtValue(); 789 } 790 791 static bool enablesValueProfiling(const Module &M) { 792 return isIRPGOFlagSet(&M) || 793 getIntModuleFlagOrZero(M, "EnableValueProfiling") != 0; 794 } 795 796 // Conservatively returns true if data variables may be referenced by code. 797 static bool profDataReferencedByCode(const Module &M) { 798 return enablesValueProfiling(M); 799 } 800 801 static inline bool shouldRecordFunctionAddr(Function *F) { 802 // Only record function addresses if IR PGO is enabled or if clang value 803 // profiling is enabled. Recording function addresses greatly increases object 804 // file size, because it prevents the inliner from deleting functions that 805 // have been inlined everywhere. 806 if (!profDataReferencedByCode(*F->getParent())) 807 return false; 808 809 // Check the linkage 810 bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage(); 811 if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() && 812 !HasAvailableExternallyLinkage) 813 return true; 814 815 // A function marked 'alwaysinline' with available_externally linkage can't 816 // have its address taken. Doing so would create an undefined external ref to 817 // the function, which would fail to link. 818 if (HasAvailableExternallyLinkage && 819 F->hasFnAttribute(Attribute::AlwaysInline)) 820 return false; 821 822 // Prohibit function address recording if the function is both internal and 823 // COMDAT. This avoids the profile data variable referencing internal symbols 824 // in COMDAT. 825 if (F->hasLocalLinkage() && F->hasComdat()) 826 return false; 827 828 // Check uses of this function for other than direct calls or invokes to it. 829 // Inline virtual functions have linkeOnceODR linkage. When a key method 830 // exists, the vtable will only be emitted in the TU where the key method 831 // is defined. In a TU where vtable is not available, the function won't 832 // be 'addresstaken'. If its address is not recorded here, the profile data 833 // with missing address may be picked by the linker leading to missing 834 // indirect call target info. 835 return F->hasAddressTaken() || F->hasLinkOnceLinkage(); 836 } 837 838 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) { 839 // Don't do this for Darwin. compiler-rt uses linker magic. 840 if (TT.isOSDarwin()) 841 return false; 842 // Use linker script magic to get data/cnts/name start/end. 843 if (TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() || 844 TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4CPU() || TT.isOSWindows()) 845 return false; 846 847 return true; 848 } 849 850 GlobalVariable * 851 InstrProfiling::getOrCreateRegionCounters(InstrProfIncrementInst *Inc) { 852 GlobalVariable *NamePtr = Inc->getName(); 853 auto It = ProfileDataMap.find(NamePtr); 854 PerFunctionProfileData PD; 855 if (It != ProfileDataMap.end()) { 856 if (It->second.RegionCounters) 857 return It->second.RegionCounters; 858 PD = It->second; 859 } 860 861 // Match the linkage and visibility of the name global. 862 Function *Fn = Inc->getParent()->getParent(); 863 GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage(); 864 GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility(); 865 866 // Due to the limitation of binder as of 2021/09/28, the duplicate weak 867 // symbols in the same csect won't be discarded. When there are duplicate weak 868 // symbols, we can NOT guarantee that the relocations get resolved to the 869 // intended weak symbol, so we can not ensure the correctness of the relative 870 // CounterPtr, so we have to use private linkage for counter and data symbols. 871 if (TT.isOSBinFormatXCOFF()) { 872 Linkage = GlobalValue::PrivateLinkage; 873 Visibility = GlobalValue::DefaultVisibility; 874 } 875 // Move the name variable to the right section. Place them in a COMDAT group 876 // if the associated function is a COMDAT. This will make sure that only one 877 // copy of counters of the COMDAT function will be emitted after linking. Keep 878 // in mind that this pass may run before the inliner, so we need to create a 879 // new comdat group for the counters and profiling data. If we use the comdat 880 // of the parent function, that will result in relocations against discarded 881 // sections. 882 // 883 // If the data variable is referenced by code, counters and data have to be 884 // in different comdats for COFF because the Visual C++ linker will report 885 // duplicate symbol errors if there are multiple external symbols with the 886 // same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE. 887 // 888 // For ELF, when not using COMDAT, put counters, data and values into a 889 // nodeduplicate COMDAT which is lowered to a zero-flag section group. This 890 // allows -z start-stop-gc to discard the entire group when the function is 891 // discarded. 892 bool DataReferencedByCode = profDataReferencedByCode(*M); 893 bool NeedComdat = needsComdatForCounter(*Fn, *M); 894 bool Renamed; 895 std::string CntsVarName = 896 getVarName(Inc, getInstrProfCountersVarPrefix(), Renamed); 897 std::string DataVarName = 898 getVarName(Inc, getInstrProfDataVarPrefix(), Renamed); 899 auto MaybeSetComdat = [&](GlobalVariable *GV) { 900 bool UseComdat = (NeedComdat || TT.isOSBinFormatELF()); 901 if (UseComdat) { 902 StringRef GroupName = TT.isOSBinFormatCOFF() && DataReferencedByCode 903 ? GV->getName() 904 : CntsVarName; 905 Comdat *C = M->getOrInsertComdat(GroupName); 906 if (!NeedComdat) 907 C->setSelectionKind(Comdat::NoDeduplicate); 908 GV->setComdat(C); 909 } 910 }; 911 912 uint64_t NumCounters = Inc->getNumCounters()->getZExtValue(); 913 LLVMContext &Ctx = M->getContext(); 914 ArrayType *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters); 915 916 // Create the counters variable. 917 auto *CounterPtr = 918 new GlobalVariable(*M, CounterTy, false, Linkage, 919 Constant::getNullValue(CounterTy), CntsVarName); 920 CounterPtr->setVisibility(Visibility); 921 CounterPtr->setSection( 922 getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat())); 923 CounterPtr->setAlignment(Align(8)); 924 MaybeSetComdat(CounterPtr); 925 CounterPtr->setLinkage(Linkage); 926 927 auto *Int8PtrTy = Type::getInt8PtrTy(Ctx); 928 // Allocate statically the array of pointers to value profile nodes for 929 // the current function. 930 Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy); 931 uint64_t NS = 0; 932 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 933 NS += PD.NumValueSites[Kind]; 934 if (NS > 0 && ValueProfileStaticAlloc && 935 !needsRuntimeRegistrationOfSectionRange(TT)) { 936 ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS); 937 auto *ValuesVar = new GlobalVariable( 938 *M, ValuesTy, false, Linkage, Constant::getNullValue(ValuesTy), 939 getVarName(Inc, getInstrProfValuesVarPrefix(), Renamed)); 940 ValuesVar->setVisibility(Visibility); 941 ValuesVar->setSection( 942 getInstrProfSectionName(IPSK_vals, TT.getObjectFormat())); 943 ValuesVar->setAlignment(Align(8)); 944 MaybeSetComdat(ValuesVar); 945 ValuesPtrExpr = 946 ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx)); 947 } 948 949 // Create data variable. 950 auto *IntPtrTy = M->getDataLayout().getIntPtrType(M->getContext()); 951 auto *Int16Ty = Type::getInt16Ty(Ctx); 952 auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1); 953 Type *DataTypes[] = { 954 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType, 955 #include "llvm/ProfileData/InstrProfData.inc" 956 }; 957 auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes)); 958 959 Constant *FunctionAddr = shouldRecordFunctionAddr(Fn) 960 ? ConstantExpr::getBitCast(Fn, Int8PtrTy) 961 : ConstantPointerNull::get(Int8PtrTy); 962 963 Constant *Int16ArrayVals[IPVK_Last + 1]; 964 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 965 Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]); 966 967 // If the data variable is not referenced by code (if we don't emit 968 // @llvm.instrprof.value.profile, NS will be 0), and the counter keeps the 969 // data variable live under linker GC, the data variable can be private. This 970 // optimization applies to ELF. 971 // 972 // On COFF, a comdat leader cannot be local so we require DataReferencedByCode 973 // to be false. 974 // 975 // If profd is in a deduplicate comdat, NS==0 with a hash suffix guarantees 976 // that other copies must have the same CFG and cannot have value profiling. 977 // If no hash suffix, other profd copies may be referenced by code. 978 if (NS == 0 && !(DataReferencedByCode && NeedComdat && !Renamed) && 979 (TT.isOSBinFormatELF() || 980 (!DataReferencedByCode && TT.isOSBinFormatCOFF()))) { 981 Linkage = GlobalValue::PrivateLinkage; 982 Visibility = GlobalValue::DefaultVisibility; 983 } 984 auto *Data = 985 new GlobalVariable(*M, DataTy, false, Linkage, nullptr, DataVarName); 986 // Reference the counter variable with a label difference (link-time 987 // constant). 988 auto *RelativeCounterPtr = 989 ConstantExpr::getSub(ConstantExpr::getPtrToInt(CounterPtr, IntPtrTy), 990 ConstantExpr::getPtrToInt(Data, IntPtrTy)); 991 992 Constant *DataVals[] = { 993 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init, 994 #include "llvm/ProfileData/InstrProfData.inc" 995 }; 996 Data->setInitializer(ConstantStruct::get(DataTy, DataVals)); 997 998 Data->setVisibility(Visibility); 999 Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat())); 1000 Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT)); 1001 MaybeSetComdat(Data); 1002 Data->setLinkage(Linkage); 1003 1004 PD.RegionCounters = CounterPtr; 1005 PD.DataVar = Data; 1006 ProfileDataMap[NamePtr] = PD; 1007 1008 // Mark the data variable as used so that it isn't stripped out. 1009 CompilerUsedVars.push_back(Data); 1010 // Now that the linkage set by the FE has been passed to the data and counter 1011 // variables, reset Name variable's linkage and visibility to private so that 1012 // it can be removed later by the compiler. 1013 NamePtr->setLinkage(GlobalValue::PrivateLinkage); 1014 // Collect the referenced names to be used by emitNameData. 1015 ReferencedNames.push_back(NamePtr); 1016 1017 return CounterPtr; 1018 } 1019 1020 void InstrProfiling::emitVNodes() { 1021 if (!ValueProfileStaticAlloc) 1022 return; 1023 1024 // For now only support this on platforms that do 1025 // not require runtime registration to discover 1026 // named section start/end. 1027 if (needsRuntimeRegistrationOfSectionRange(TT)) 1028 return; 1029 1030 size_t TotalNS = 0; 1031 for (auto &PD : ProfileDataMap) { 1032 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 1033 TotalNS += PD.second.NumValueSites[Kind]; 1034 } 1035 1036 if (!TotalNS) 1037 return; 1038 1039 uint64_t NumCounters = TotalNS * NumCountersPerValueSite; 1040 // Heuristic for small programs with very few total value sites. 1041 // The default value of vp-counters-per-site is chosen based on 1042 // the observation that large apps usually have a low percentage 1043 // of value sites that actually have any profile data, and thus 1044 // the average number of counters per site is low. For small 1045 // apps with very few sites, this may not be true. Bump up the 1046 // number of counters in this case. 1047 #define INSTR_PROF_MIN_VAL_COUNTS 10 1048 if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS) 1049 NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2); 1050 1051 auto &Ctx = M->getContext(); 1052 Type *VNodeTypes[] = { 1053 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType, 1054 #include "llvm/ProfileData/InstrProfData.inc" 1055 }; 1056 auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes)); 1057 1058 ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters); 1059 auto *VNodesVar = new GlobalVariable( 1060 *M, VNodesTy, false, GlobalValue::PrivateLinkage, 1061 Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName()); 1062 VNodesVar->setSection( 1063 getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat())); 1064 // VNodesVar is used by runtime but not referenced via relocation by other 1065 // sections. Conservatively make it linker retained. 1066 UsedVars.push_back(VNodesVar); 1067 } 1068 1069 void InstrProfiling::emitNameData() { 1070 std::string UncompressedData; 1071 1072 if (ReferencedNames.empty()) 1073 return; 1074 1075 std::string CompressedNameStr; 1076 if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr, 1077 DoInstrProfNameCompression)) { 1078 report_fatal_error(Twine(toString(std::move(E))), false); 1079 } 1080 1081 auto &Ctx = M->getContext(); 1082 auto *NamesVal = 1083 ConstantDataArray::getString(Ctx, StringRef(CompressedNameStr), false); 1084 NamesVar = new GlobalVariable(*M, NamesVal->getType(), true, 1085 GlobalValue::PrivateLinkage, NamesVal, 1086 getInstrProfNamesVarName()); 1087 NamesSize = CompressedNameStr.size(); 1088 NamesVar->setSection( 1089 getInstrProfSectionName(IPSK_name, TT.getObjectFormat())); 1090 // On COFF, it's important to reduce the alignment down to 1 to prevent the 1091 // linker from inserting padding before the start of the names section or 1092 // between names entries. 1093 NamesVar->setAlignment(Align(1)); 1094 // NamesVar is used by runtime but not referenced via relocation by other 1095 // sections. Conservatively make it linker retained. 1096 UsedVars.push_back(NamesVar); 1097 1098 for (auto *NamePtr : ReferencedNames) 1099 NamePtr->eraseFromParent(); 1100 } 1101 1102 void InstrProfiling::emitRegistration() { 1103 if (!needsRuntimeRegistrationOfSectionRange(TT)) 1104 return; 1105 1106 // Construct the function. 1107 auto *VoidTy = Type::getVoidTy(M->getContext()); 1108 auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext()); 1109 auto *Int64Ty = Type::getInt64Ty(M->getContext()); 1110 auto *RegisterFTy = FunctionType::get(VoidTy, false); 1111 auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage, 1112 getInstrProfRegFuncsName(), M); 1113 RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); 1114 if (Options.NoRedZone) 1115 RegisterF->addFnAttr(Attribute::NoRedZone); 1116 1117 auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false); 1118 auto *RuntimeRegisterF = 1119 Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage, 1120 getInstrProfRegFuncName(), M); 1121 1122 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF)); 1123 for (Value *Data : CompilerUsedVars) 1124 if (!isa<Function>(Data)) 1125 IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy)); 1126 for (Value *Data : UsedVars) 1127 if (Data != NamesVar && !isa<Function>(Data)) 1128 IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy)); 1129 1130 if (NamesVar) { 1131 Type *ParamTypes[] = {VoidPtrTy, Int64Ty}; 1132 auto *NamesRegisterTy = 1133 FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false); 1134 auto *NamesRegisterF = 1135 Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage, 1136 getInstrProfNamesRegFuncName(), M); 1137 IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy), 1138 IRB.getInt64(NamesSize)}); 1139 } 1140 1141 IRB.CreateRetVoid(); 1142 } 1143 1144 bool InstrProfiling::emitRuntimeHook() { 1145 // We expect the linker to be invoked with -u<hook_var> flag for Linux 1146 // in which case there is no need to emit the external variable. 1147 if (TT.isOSLinux()) 1148 return false; 1149 1150 // If the module's provided its own runtime, we don't need to do anything. 1151 if (M->getGlobalVariable(getInstrProfRuntimeHookVarName())) 1152 return false; 1153 1154 // Declare an external variable that will pull in the runtime initialization. 1155 auto *Int32Ty = Type::getInt32Ty(M->getContext()); 1156 auto *Var = 1157 new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage, 1158 nullptr, getInstrProfRuntimeHookVarName()); 1159 1160 if (TT.isOSBinFormatELF()) { 1161 // Mark the user variable as used so that it isn't stripped out. 1162 CompilerUsedVars.push_back(Var); 1163 } else { 1164 // Make a function that uses it. 1165 auto *User = Function::Create(FunctionType::get(Int32Ty, false), 1166 GlobalValue::LinkOnceODRLinkage, 1167 getInstrProfRuntimeHookVarUseFuncName(), M); 1168 User->addFnAttr(Attribute::NoInline); 1169 if (Options.NoRedZone) 1170 User->addFnAttr(Attribute::NoRedZone); 1171 User->setVisibility(GlobalValue::HiddenVisibility); 1172 if (TT.supportsCOMDAT()) 1173 User->setComdat(M->getOrInsertComdat(User->getName())); 1174 1175 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User)); 1176 auto *Load = IRB.CreateLoad(Int32Ty, Var); 1177 IRB.CreateRet(Load); 1178 1179 // Mark the function as used so that it isn't stripped out. 1180 CompilerUsedVars.push_back(User); 1181 } 1182 return true; 1183 } 1184 1185 void InstrProfiling::emitUses() { 1186 // The metadata sections are parallel arrays. Optimizers (e.g. 1187 // GlobalOpt/ConstantMerge) may not discard associated sections as a unit, so 1188 // we conservatively retain all unconditionally in the compiler. 1189 // 1190 // On ELF and Mach-O, the linker can guarantee the associated sections will be 1191 // retained or discarded as a unit, so llvm.compiler.used is sufficient. 1192 // Similarly on COFF, if prof data is not referenced by code we use one comdat 1193 // and ensure this GC property as well. Otherwise, we have to conservatively 1194 // make all of the sections retained by the linker. 1195 if (TT.isOSBinFormatELF() || TT.isOSBinFormatMachO() || 1196 (TT.isOSBinFormatCOFF() && !profDataReferencedByCode(*M))) 1197 appendToCompilerUsed(*M, CompilerUsedVars); 1198 else 1199 appendToUsed(*M, CompilerUsedVars); 1200 1201 // We do not add proper references from used metadata sections to NamesVar and 1202 // VNodesVar, so we have to be conservative and place them in llvm.used 1203 // regardless of the target, 1204 appendToUsed(*M, UsedVars); 1205 } 1206 1207 void InstrProfiling::emitInitialization() { 1208 // Create ProfileFileName variable. Don't don't this for the 1209 // context-sensitive instrumentation lowering: This lowering is after 1210 // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should 1211 // have already create the variable before LTO/ThinLTO linking. 1212 if (!IsCS) 1213 createProfileFileNameVar(*M, Options.InstrProfileOutput); 1214 Function *RegisterF = M->getFunction(getInstrProfRegFuncsName()); 1215 if (!RegisterF) 1216 return; 1217 1218 // Create the initialization function. 1219 auto *VoidTy = Type::getVoidTy(M->getContext()); 1220 auto *F = Function::Create(FunctionType::get(VoidTy, false), 1221 GlobalValue::InternalLinkage, 1222 getInstrProfInitFuncName(), M); 1223 F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); 1224 F->addFnAttr(Attribute::NoInline); 1225 if (Options.NoRedZone) 1226 F->addFnAttr(Attribute::NoRedZone); 1227 1228 // Add the basic block and the necessary calls. 1229 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F)); 1230 IRB.CreateCall(RegisterF, {}); 1231 IRB.CreateRetVoid(); 1232 1233 appendToGlobalCtors(*M, F, 0); 1234 } 1235