1 //===-- InstrProfiling.cpp - Frontend instrumentation based profiling -----===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This pass lowers instrprof_* intrinsics emitted by a frontend for profiling. 10 // It also builds the data structures and initialization code needed for 11 // updating execution counts and emitting the profile at runtime. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/Transforms/Instrumentation/InstrProfiling.h" 16 #include "llvm/ADT/ArrayRef.h" 17 #include "llvm/ADT/SmallVector.h" 18 #include "llvm/ADT/StringRef.h" 19 #include "llvm/ADT/Triple.h" 20 #include "llvm/ADT/Twine.h" 21 #include "llvm/Analysis/BlockFrequencyInfo.h" 22 #include "llvm/Analysis/BranchProbabilityInfo.h" 23 #include "llvm/Analysis/LoopInfo.h" 24 #include "llvm/Analysis/TargetLibraryInfo.h" 25 #include "llvm/IR/Attributes.h" 26 #include "llvm/IR/BasicBlock.h" 27 #include "llvm/IR/Constant.h" 28 #include "llvm/IR/Constants.h" 29 #include "llvm/IR/DerivedTypes.h" 30 #include "llvm/IR/Dominators.h" 31 #include "llvm/IR/Function.h" 32 #include "llvm/IR/GlobalValue.h" 33 #include "llvm/IR/GlobalVariable.h" 34 #include "llvm/IR/IRBuilder.h" 35 #include "llvm/IR/Instruction.h" 36 #include "llvm/IR/Instructions.h" 37 #include "llvm/IR/IntrinsicInst.h" 38 #include "llvm/IR/Module.h" 39 #include "llvm/IR/Type.h" 40 #include "llvm/InitializePasses.h" 41 #include "llvm/Pass.h" 42 #include "llvm/ProfileData/InstrProf.h" 43 #include "llvm/Support/Casting.h" 44 #include "llvm/Support/CommandLine.h" 45 #include "llvm/Support/Error.h" 46 #include "llvm/Support/ErrorHandling.h" 47 #include "llvm/Transforms/Utils/BasicBlockUtils.h" 48 #include "llvm/Transforms/Utils/ModuleUtils.h" 49 #include "llvm/Transforms/Utils/SSAUpdater.h" 50 #include <algorithm> 51 #include <cassert> 52 #include <cstddef> 53 #include <cstdint> 54 #include <string> 55 56 using namespace llvm; 57 58 #define DEBUG_TYPE "instrprof" 59 60 // The start and end values of precise value profile range for memory 61 // intrinsic sizes 62 cl::opt<std::string> MemOPSizeRange( 63 "memop-size-range", 64 cl::desc("Set the range of size in memory intrinsic calls to be profiled " 65 "precisely, in a format of <start_val>:<end_val>"), 66 cl::init("")); 67 68 // The value that considered to be large value in memory intrinsic. 69 cl::opt<unsigned> MemOPSizeLarge( 70 "memop-size-large", 71 cl::desc("Set large value thresthold in memory intrinsic size profiling. " 72 "Value of 0 disables the large value profiling."), 73 cl::init(8192)); 74 75 namespace { 76 77 cl::opt<bool> DoNameCompression("enable-name-compression", 78 cl::desc("Enable name string compression"), 79 cl::init(true)); 80 81 cl::opt<bool> DoHashBasedCounterSplit( 82 "hash-based-counter-split", 83 cl::desc("Rename counter variable of a comdat function based on cfg hash"), 84 cl::init(true)); 85 86 cl::opt<bool> RuntimeCounterRelocation( 87 "runtime-counter-relocation", 88 cl::desc("Enable relocating counters at runtime."), 89 cl::init(false)); 90 91 cl::opt<bool> ValueProfileStaticAlloc( 92 "vp-static-alloc", 93 cl::desc("Do static counter allocation for value profiler"), 94 cl::init(true)); 95 96 cl::opt<double> NumCountersPerValueSite( 97 "vp-counters-per-site", 98 cl::desc("The average number of profile counters allocated " 99 "per value profiling site."), 100 // This is set to a very small value because in real programs, only 101 // a very small percentage of value sites have non-zero targets, e.g, 1/30. 102 // For those sites with non-zero profile, the average number of targets 103 // is usually smaller than 2. 104 cl::init(1.0)); 105 106 cl::opt<bool> AtomicCounterUpdateAll( 107 "instrprof-atomic-counter-update-all", cl::ZeroOrMore, 108 cl::desc("Make all profile counter updates atomic (for testing only)"), 109 cl::init(false)); 110 111 cl::opt<bool> AtomicCounterUpdatePromoted( 112 "atomic-counter-update-promoted", cl::ZeroOrMore, 113 cl::desc("Do counter update using atomic fetch add " 114 " for promoted counters only"), 115 cl::init(false)); 116 117 // If the option is not specified, the default behavior about whether 118 // counter promotion is done depends on how instrumentaiton lowering 119 // pipeline is setup, i.e., the default value of true of this option 120 // does not mean the promotion will be done by default. Explicitly 121 // setting this option can override the default behavior. 122 cl::opt<bool> DoCounterPromotion("do-counter-promotion", cl::ZeroOrMore, 123 cl::desc("Do counter register promotion"), 124 cl::init(false)); 125 cl::opt<unsigned> MaxNumOfPromotionsPerLoop( 126 cl::ZeroOrMore, "max-counter-promotions-per-loop", cl::init(20), 127 cl::desc("Max number counter promotions per loop to avoid" 128 " increasing register pressure too much")); 129 130 // A debug option 131 cl::opt<int> 132 MaxNumOfPromotions(cl::ZeroOrMore, "max-counter-promotions", cl::init(-1), 133 cl::desc("Max number of allowed counter promotions")); 134 135 cl::opt<unsigned> SpeculativeCounterPromotionMaxExiting( 136 cl::ZeroOrMore, "speculative-counter-promotion-max-exiting", cl::init(3), 137 cl::desc("The max number of exiting blocks of a loop to allow " 138 " speculative counter promotion")); 139 140 cl::opt<bool> SpeculativeCounterPromotionToLoop( 141 cl::ZeroOrMore, "speculative-counter-promotion-to-loop", cl::init(false), 142 cl::desc("When the option is false, if the target block is in a loop, " 143 "the promotion will be disallowed unless the promoted counter " 144 " update can be further/iteratively promoted into an acyclic " 145 " region.")); 146 147 cl::opt<bool> IterativeCounterPromotion( 148 cl::ZeroOrMore, "iterative-counter-promotion", cl::init(true), 149 cl::desc("Allow counter promotion across the whole loop nest.")); 150 151 class InstrProfilingLegacyPass : public ModulePass { 152 InstrProfiling InstrProf; 153 154 public: 155 static char ID; 156 157 InstrProfilingLegacyPass() : ModulePass(ID) {} 158 InstrProfilingLegacyPass(const InstrProfOptions &Options, bool IsCS = false) 159 : ModulePass(ID), InstrProf(Options, IsCS) {} 160 161 StringRef getPassName() const override { 162 return "Frontend instrumentation-based coverage lowering"; 163 } 164 165 bool runOnModule(Module &M) override { 166 auto GetTLI = [this](Function &F) -> TargetLibraryInfo & { 167 return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F); 168 }; 169 return InstrProf.run(M, GetTLI); 170 } 171 172 void getAnalysisUsage(AnalysisUsage &AU) const override { 173 AU.setPreservesCFG(); 174 AU.addRequired<TargetLibraryInfoWrapperPass>(); 175 } 176 }; 177 178 /// 179 /// A helper class to promote one counter RMW operation in the loop 180 /// into register update. 181 /// 182 /// RWM update for the counter will be sinked out of the loop after 183 /// the transformation. 184 /// 185 class PGOCounterPromoterHelper : public LoadAndStorePromoter { 186 public: 187 PGOCounterPromoterHelper( 188 Instruction *L, Instruction *S, SSAUpdater &SSA, Value *Init, 189 BasicBlock *PH, ArrayRef<BasicBlock *> ExitBlocks, 190 ArrayRef<Instruction *> InsertPts, 191 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands, 192 LoopInfo &LI) 193 : LoadAndStorePromoter({L, S}, SSA), Store(S), ExitBlocks(ExitBlocks), 194 InsertPts(InsertPts), LoopToCandidates(LoopToCands), LI(LI) { 195 assert(isa<LoadInst>(L)); 196 assert(isa<StoreInst>(S)); 197 SSA.AddAvailableValue(PH, Init); 198 } 199 200 void doExtraRewritesBeforeFinalDeletion() override { 201 for (unsigned i = 0, e = ExitBlocks.size(); i != e; ++i) { 202 BasicBlock *ExitBlock = ExitBlocks[i]; 203 Instruction *InsertPos = InsertPts[i]; 204 // Get LiveIn value into the ExitBlock. If there are multiple 205 // predecessors, the value is defined by a PHI node in this 206 // block. 207 Value *LiveInValue = SSA.GetValueInMiddleOfBlock(ExitBlock); 208 Value *Addr = cast<StoreInst>(Store)->getPointerOperand(); 209 Type *Ty = LiveInValue->getType(); 210 IRBuilder<> Builder(InsertPos); 211 if (AtomicCounterUpdatePromoted) 212 // automic update currently can only be promoted across the current 213 // loop, not the whole loop nest. 214 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, LiveInValue, 215 AtomicOrdering::SequentiallyConsistent); 216 else { 217 LoadInst *OldVal = Builder.CreateLoad(Ty, Addr, "pgocount.promoted"); 218 auto *NewVal = Builder.CreateAdd(OldVal, LiveInValue); 219 auto *NewStore = Builder.CreateStore(NewVal, Addr); 220 221 // Now update the parent loop's candidate list: 222 if (IterativeCounterPromotion) { 223 auto *TargetLoop = LI.getLoopFor(ExitBlock); 224 if (TargetLoop) 225 LoopToCandidates[TargetLoop].emplace_back(OldVal, NewStore); 226 } 227 } 228 } 229 } 230 231 private: 232 Instruction *Store; 233 ArrayRef<BasicBlock *> ExitBlocks; 234 ArrayRef<Instruction *> InsertPts; 235 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates; 236 LoopInfo &LI; 237 }; 238 239 /// A helper class to do register promotion for all profile counter 240 /// updates in a loop. 241 /// 242 class PGOCounterPromoter { 243 public: 244 PGOCounterPromoter( 245 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCands, 246 Loop &CurLoop, LoopInfo &LI, BlockFrequencyInfo *BFI) 247 : LoopToCandidates(LoopToCands), ExitBlocks(), InsertPts(), L(CurLoop), 248 LI(LI), BFI(BFI) { 249 250 SmallVector<BasicBlock *, 8> LoopExitBlocks; 251 SmallPtrSet<BasicBlock *, 8> BlockSet; 252 L.getExitBlocks(LoopExitBlocks); 253 254 for (BasicBlock *ExitBlock : LoopExitBlocks) { 255 if (BlockSet.insert(ExitBlock).second) { 256 ExitBlocks.push_back(ExitBlock); 257 InsertPts.push_back(&*ExitBlock->getFirstInsertionPt()); 258 } 259 } 260 } 261 262 bool run(int64_t *NumPromoted) { 263 // Skip 'infinite' loops: 264 if (ExitBlocks.size() == 0) 265 return false; 266 unsigned MaxProm = getMaxNumOfPromotionsInLoop(&L); 267 if (MaxProm == 0) 268 return false; 269 270 unsigned Promoted = 0; 271 for (auto &Cand : LoopToCandidates[&L]) { 272 273 SmallVector<PHINode *, 4> NewPHIs; 274 SSAUpdater SSA(&NewPHIs); 275 Value *InitVal = ConstantInt::get(Cand.first->getType(), 0); 276 277 // If BFI is set, we will use it to guide the promotions. 278 if (BFI) { 279 auto *BB = Cand.first->getParent(); 280 auto InstrCount = BFI->getBlockProfileCount(BB); 281 if (!InstrCount) 282 continue; 283 auto PreheaderCount = BFI->getBlockProfileCount(L.getLoopPreheader()); 284 // If the average loop trip count is not greater than 1.5, we skip 285 // promotion. 286 if (PreheaderCount && 287 (PreheaderCount.getValue() * 3) >= (InstrCount.getValue() * 2)) 288 continue; 289 } 290 291 PGOCounterPromoterHelper Promoter(Cand.first, Cand.second, SSA, InitVal, 292 L.getLoopPreheader(), ExitBlocks, 293 InsertPts, LoopToCandidates, LI); 294 Promoter.run(SmallVector<Instruction *, 2>({Cand.first, Cand.second})); 295 Promoted++; 296 if (Promoted >= MaxProm) 297 break; 298 299 (*NumPromoted)++; 300 if (MaxNumOfPromotions != -1 && *NumPromoted >= MaxNumOfPromotions) 301 break; 302 } 303 304 LLVM_DEBUG(dbgs() << Promoted << " counters promoted for loop (depth=" 305 << L.getLoopDepth() << ")\n"); 306 return Promoted != 0; 307 } 308 309 private: 310 bool allowSpeculativeCounterPromotion(Loop *LP) { 311 SmallVector<BasicBlock *, 8> ExitingBlocks; 312 L.getExitingBlocks(ExitingBlocks); 313 // Not considierered speculative. 314 if (ExitingBlocks.size() == 1) 315 return true; 316 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting) 317 return false; 318 return true; 319 } 320 321 // Returns the max number of Counter Promotions for LP. 322 unsigned getMaxNumOfPromotionsInLoop(Loop *LP) { 323 // We can't insert into a catchswitch. 324 SmallVector<BasicBlock *, 8> LoopExitBlocks; 325 LP->getExitBlocks(LoopExitBlocks); 326 if (llvm::any_of(LoopExitBlocks, [](BasicBlock *Exit) { 327 return isa<CatchSwitchInst>(Exit->getTerminator()); 328 })) 329 return 0; 330 331 if (!LP->hasDedicatedExits()) 332 return 0; 333 334 BasicBlock *PH = LP->getLoopPreheader(); 335 if (!PH) 336 return 0; 337 338 SmallVector<BasicBlock *, 8> ExitingBlocks; 339 LP->getExitingBlocks(ExitingBlocks); 340 341 // If BFI is set, we do more aggressive promotions based on BFI. 342 if (BFI) 343 return (unsigned)-1; 344 345 // Not considierered speculative. 346 if (ExitingBlocks.size() == 1) 347 return MaxNumOfPromotionsPerLoop; 348 349 if (ExitingBlocks.size() > SpeculativeCounterPromotionMaxExiting) 350 return 0; 351 352 // Whether the target block is in a loop does not matter: 353 if (SpeculativeCounterPromotionToLoop) 354 return MaxNumOfPromotionsPerLoop; 355 356 // Now check the target block: 357 unsigned MaxProm = MaxNumOfPromotionsPerLoop; 358 for (auto *TargetBlock : LoopExitBlocks) { 359 auto *TargetLoop = LI.getLoopFor(TargetBlock); 360 if (!TargetLoop) 361 continue; 362 unsigned MaxPromForTarget = getMaxNumOfPromotionsInLoop(TargetLoop); 363 unsigned PendingCandsInTarget = LoopToCandidates[TargetLoop].size(); 364 MaxProm = 365 std::min(MaxProm, std::max(MaxPromForTarget, PendingCandsInTarget) - 366 PendingCandsInTarget); 367 } 368 return MaxProm; 369 } 370 371 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> &LoopToCandidates; 372 SmallVector<BasicBlock *, 8> ExitBlocks; 373 SmallVector<Instruction *, 8> InsertPts; 374 Loop &L; 375 LoopInfo &LI; 376 BlockFrequencyInfo *BFI; 377 }; 378 379 } // end anonymous namespace 380 381 PreservedAnalyses InstrProfiling::run(Module &M, ModuleAnalysisManager &AM) { 382 FunctionAnalysisManager &FAM = 383 AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager(); 384 auto GetTLI = [&FAM](Function &F) -> TargetLibraryInfo & { 385 return FAM.getResult<TargetLibraryAnalysis>(F); 386 }; 387 if (!run(M, GetTLI)) 388 return PreservedAnalyses::all(); 389 390 return PreservedAnalyses::none(); 391 } 392 393 char InstrProfilingLegacyPass::ID = 0; 394 INITIALIZE_PASS_BEGIN( 395 InstrProfilingLegacyPass, "instrprof", 396 "Frontend instrumentation-based coverage lowering.", false, false) 397 INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass) 398 INITIALIZE_PASS_END( 399 InstrProfilingLegacyPass, "instrprof", 400 "Frontend instrumentation-based coverage lowering.", false, false) 401 402 ModulePass * 403 llvm::createInstrProfilingLegacyPass(const InstrProfOptions &Options, 404 bool IsCS) { 405 return new InstrProfilingLegacyPass(Options, IsCS); 406 } 407 408 static InstrProfIncrementInst *castToIncrementInst(Instruction *Instr) { 409 InstrProfIncrementInst *Inc = dyn_cast<InstrProfIncrementInstStep>(Instr); 410 if (Inc) 411 return Inc; 412 return dyn_cast<InstrProfIncrementInst>(Instr); 413 } 414 415 bool InstrProfiling::lowerIntrinsics(Function *F) { 416 bool MadeChange = false; 417 PromotionCandidates.clear(); 418 for (BasicBlock &BB : *F) { 419 for (auto I = BB.begin(), E = BB.end(); I != E;) { 420 auto Instr = I++; 421 InstrProfIncrementInst *Inc = castToIncrementInst(&*Instr); 422 if (Inc) { 423 lowerIncrement(Inc); 424 MadeChange = true; 425 } else if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(Instr)) { 426 lowerValueProfileInst(Ind); 427 MadeChange = true; 428 } 429 } 430 } 431 432 if (!MadeChange) 433 return false; 434 435 promoteCounterLoadStores(F); 436 return true; 437 } 438 439 bool InstrProfiling::isRuntimeCounterRelocationEnabled() const { 440 if (RuntimeCounterRelocation.getNumOccurrences() > 0) 441 return RuntimeCounterRelocation; 442 443 return TT.isOSFuchsia(); 444 } 445 446 bool InstrProfiling::isCounterPromotionEnabled() const { 447 if (DoCounterPromotion.getNumOccurrences() > 0) 448 return DoCounterPromotion; 449 450 return Options.DoCounterPromotion; 451 } 452 453 void InstrProfiling::promoteCounterLoadStores(Function *F) { 454 if (!isCounterPromotionEnabled()) 455 return; 456 457 DominatorTree DT(*F); 458 LoopInfo LI(DT); 459 DenseMap<Loop *, SmallVector<LoadStorePair, 8>> LoopPromotionCandidates; 460 461 std::unique_ptr<BlockFrequencyInfo> BFI; 462 if (Options.UseBFIInPromotion) { 463 std::unique_ptr<BranchProbabilityInfo> BPI; 464 BPI.reset(new BranchProbabilityInfo(*F, LI, &GetTLI(*F))); 465 BFI.reset(new BlockFrequencyInfo(*F, *BPI, LI)); 466 } 467 468 for (const auto &LoadStore : PromotionCandidates) { 469 auto *CounterLoad = LoadStore.first; 470 auto *CounterStore = LoadStore.second; 471 BasicBlock *BB = CounterLoad->getParent(); 472 Loop *ParentLoop = LI.getLoopFor(BB); 473 if (!ParentLoop) 474 continue; 475 LoopPromotionCandidates[ParentLoop].emplace_back(CounterLoad, CounterStore); 476 } 477 478 SmallVector<Loop *, 4> Loops = LI.getLoopsInPreorder(); 479 480 // Do a post-order traversal of the loops so that counter updates can be 481 // iteratively hoisted outside the loop nest. 482 for (auto *Loop : llvm::reverse(Loops)) { 483 PGOCounterPromoter Promoter(LoopPromotionCandidates, *Loop, LI, BFI.get()); 484 Promoter.run(&TotalCountersPromoted); 485 } 486 } 487 488 /// Check if the module contains uses of any profiling intrinsics. 489 static bool containsProfilingIntrinsics(Module &M) { 490 if (auto *F = M.getFunction( 491 Intrinsic::getName(llvm::Intrinsic::instrprof_increment))) 492 if (!F->use_empty()) 493 return true; 494 if (auto *F = M.getFunction( 495 Intrinsic::getName(llvm::Intrinsic::instrprof_increment_step))) 496 if (!F->use_empty()) 497 return true; 498 if (auto *F = M.getFunction( 499 Intrinsic::getName(llvm::Intrinsic::instrprof_value_profile))) 500 if (!F->use_empty()) 501 return true; 502 return false; 503 } 504 505 bool InstrProfiling::run( 506 Module &M, std::function<const TargetLibraryInfo &(Function &F)> GetTLI) { 507 this->M = &M; 508 this->GetTLI = std::move(GetTLI); 509 NamesVar = nullptr; 510 NamesSize = 0; 511 ProfileDataMap.clear(); 512 UsedVars.clear(); 513 getMemOPSizeRangeFromOption(MemOPSizeRange, MemOPSizeRangeStart, 514 MemOPSizeRangeLast); 515 TT = Triple(M.getTargetTriple()); 516 517 // Emit the runtime hook even if no counters are present. 518 bool MadeChange = emitRuntimeHook(); 519 520 // Improve compile time by avoiding linear scans when there is no work. 521 GlobalVariable *CoverageNamesVar = 522 M.getNamedGlobal(getCoverageUnusedNamesVarName()); 523 if (!containsProfilingIntrinsics(M) && !CoverageNamesVar) 524 return MadeChange; 525 526 // We did not know how many value sites there would be inside 527 // the instrumented function. This is counting the number of instrumented 528 // target value sites to enter it as field in the profile data variable. 529 for (Function &F : M) { 530 InstrProfIncrementInst *FirstProfIncInst = nullptr; 531 for (BasicBlock &BB : F) 532 for (auto I = BB.begin(), E = BB.end(); I != E; I++) 533 if (auto *Ind = dyn_cast<InstrProfValueProfileInst>(I)) 534 computeNumValueSiteCounts(Ind); 535 else if (FirstProfIncInst == nullptr) 536 FirstProfIncInst = dyn_cast<InstrProfIncrementInst>(I); 537 538 // Value profiling intrinsic lowering requires per-function profile data 539 // variable to be created first. 540 if (FirstProfIncInst != nullptr) 541 static_cast<void>(getOrCreateRegionCounters(FirstProfIncInst)); 542 } 543 544 for (Function &F : M) 545 MadeChange |= lowerIntrinsics(&F); 546 547 if (CoverageNamesVar) { 548 lowerCoverageData(CoverageNamesVar); 549 MadeChange = true; 550 } 551 552 if (!MadeChange) 553 return false; 554 555 emitVNodes(); 556 emitNameData(); 557 emitRegistration(); 558 emitUses(); 559 emitInitialization(); 560 return true; 561 } 562 563 static FunctionCallee 564 getOrInsertValueProfilingCall(Module &M, const TargetLibraryInfo &TLI, 565 bool IsRange = false) { 566 LLVMContext &Ctx = M.getContext(); 567 auto *ReturnTy = Type::getVoidTy(M.getContext()); 568 569 AttributeList AL; 570 if (auto AK = TLI.getExtAttrForI32Param(false)) 571 AL = AL.addParamAttribute(M.getContext(), 2, AK); 572 573 if (!IsRange) { 574 Type *ParamTypes[] = { 575 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType 576 #include "llvm/ProfileData/InstrProfData.inc" 577 }; 578 auto *ValueProfilingCallTy = 579 FunctionType::get(ReturnTy, makeArrayRef(ParamTypes), false); 580 return M.getOrInsertFunction(getInstrProfValueProfFuncName(), 581 ValueProfilingCallTy, AL); 582 } else { 583 Type *RangeParamTypes[] = { 584 #define VALUE_RANGE_PROF 1 585 #define VALUE_PROF_FUNC_PARAM(ParamType, ParamName, ParamLLVMType) ParamLLVMType 586 #include "llvm/ProfileData/InstrProfData.inc" 587 #undef VALUE_RANGE_PROF 588 }; 589 auto *ValueRangeProfilingCallTy = 590 FunctionType::get(ReturnTy, makeArrayRef(RangeParamTypes), false); 591 return M.getOrInsertFunction(getInstrProfValueRangeProfFuncName(), 592 ValueRangeProfilingCallTy, AL); 593 } 594 } 595 596 void InstrProfiling::computeNumValueSiteCounts(InstrProfValueProfileInst *Ind) { 597 GlobalVariable *Name = Ind->getName(); 598 uint64_t ValueKind = Ind->getValueKind()->getZExtValue(); 599 uint64_t Index = Ind->getIndex()->getZExtValue(); 600 auto It = ProfileDataMap.find(Name); 601 if (It == ProfileDataMap.end()) { 602 PerFunctionProfileData PD; 603 PD.NumValueSites[ValueKind] = Index + 1; 604 ProfileDataMap[Name] = PD; 605 } else if (It->second.NumValueSites[ValueKind] <= Index) 606 It->second.NumValueSites[ValueKind] = Index + 1; 607 } 608 609 void InstrProfiling::lowerValueProfileInst(InstrProfValueProfileInst *Ind) { 610 GlobalVariable *Name = Ind->getName(); 611 auto It = ProfileDataMap.find(Name); 612 assert(It != ProfileDataMap.end() && It->second.DataVar && 613 "value profiling detected in function with no counter incerement"); 614 615 GlobalVariable *DataVar = It->second.DataVar; 616 uint64_t ValueKind = Ind->getValueKind()->getZExtValue(); 617 uint64_t Index = Ind->getIndex()->getZExtValue(); 618 for (uint32_t Kind = IPVK_First; Kind < ValueKind; ++Kind) 619 Index += It->second.NumValueSites[Kind]; 620 621 IRBuilder<> Builder(Ind); 622 bool IsRange = (Ind->getValueKind()->getZExtValue() == 623 llvm::InstrProfValueKind::IPVK_MemOPSize); 624 CallInst *Call = nullptr; 625 auto *TLI = &GetTLI(*Ind->getFunction()); 626 if (!IsRange) { 627 Value *Args[3] = {Ind->getTargetValue(), 628 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()), 629 Builder.getInt32(Index)}; 630 Call = Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI), Args); 631 } else { 632 Value *Args[6] = { 633 Ind->getTargetValue(), 634 Builder.CreateBitCast(DataVar, Builder.getInt8PtrTy()), 635 Builder.getInt32(Index), 636 Builder.getInt64(MemOPSizeRangeStart), 637 Builder.getInt64(MemOPSizeRangeLast), 638 Builder.getInt64(MemOPSizeLarge == 0 ? INT64_MIN : MemOPSizeLarge)}; 639 Call = 640 Builder.CreateCall(getOrInsertValueProfilingCall(*M, *TLI, true), Args); 641 } 642 if (auto AK = TLI->getExtAttrForI32Param(false)) 643 Call->addParamAttr(2, AK); 644 Ind->replaceAllUsesWith(Call); 645 Ind->eraseFromParent(); 646 } 647 648 void InstrProfiling::lowerIncrement(InstrProfIncrementInst *Inc) { 649 GlobalVariable *Counters = getOrCreateRegionCounters(Inc); 650 651 IRBuilder<> Builder(Inc); 652 uint64_t Index = Inc->getIndex()->getZExtValue(); 653 Value *Addr = Builder.CreateConstInBoundsGEP2_64(Counters->getValueType(), 654 Counters, 0, Index); 655 656 if (isRuntimeCounterRelocationEnabled()) { 657 Type *Int64Ty = Type::getInt64Ty(M->getContext()); 658 Type *Int64PtrTy = Type::getInt64PtrTy(M->getContext()); 659 Function *Fn = Inc->getParent()->getParent(); 660 Instruction &I = Fn->getEntryBlock().front(); 661 LoadInst *LI = dyn_cast<LoadInst>(&I); 662 if (!LI) { 663 IRBuilder<> Builder(&I); 664 Type *Int64Ty = Type::getInt64Ty(M->getContext()); 665 GlobalVariable *Bias = M->getGlobalVariable(getInstrProfCounterBiasVarName()); 666 if (!Bias) 667 Bias = new GlobalVariable(*M, Int64Ty, false, GlobalValue::LinkOnceODRLinkage, 668 Constant::getNullValue(Int64Ty), 669 getInstrProfCounterBiasVarName()); 670 LI = Builder.CreateLoad(Int64Ty, Bias); 671 } 672 auto *Add = Builder.CreateAdd(Builder.CreatePtrToInt(Addr, Int64Ty), LI); 673 Addr = Builder.CreateIntToPtr(Add, Int64PtrTy); 674 } 675 676 if (Options.Atomic || AtomicCounterUpdateAll) { 677 Builder.CreateAtomicRMW(AtomicRMWInst::Add, Addr, Inc->getStep(), 678 AtomicOrdering::Monotonic); 679 } else { 680 Value *IncStep = Inc->getStep(); 681 Value *Load = Builder.CreateLoad(IncStep->getType(), Addr, "pgocount"); 682 auto *Count = Builder.CreateAdd(Load, Inc->getStep()); 683 auto *Store = Builder.CreateStore(Count, Addr); 684 if (isCounterPromotionEnabled()) 685 PromotionCandidates.emplace_back(cast<Instruction>(Load), Store); 686 } 687 Inc->eraseFromParent(); 688 } 689 690 void InstrProfiling::lowerCoverageData(GlobalVariable *CoverageNamesVar) { 691 ConstantArray *Names = 692 cast<ConstantArray>(CoverageNamesVar->getInitializer()); 693 for (unsigned I = 0, E = Names->getNumOperands(); I < E; ++I) { 694 Constant *NC = Names->getOperand(I); 695 Value *V = NC->stripPointerCasts(); 696 assert(isa<GlobalVariable>(V) && "Missing reference to function name"); 697 GlobalVariable *Name = cast<GlobalVariable>(V); 698 699 Name->setLinkage(GlobalValue::PrivateLinkage); 700 ReferencedNames.push_back(Name); 701 NC->dropAllReferences(); 702 } 703 CoverageNamesVar->eraseFromParent(); 704 } 705 706 /// Get the name of a profiling variable for a particular function. 707 static std::string getVarName(InstrProfIncrementInst *Inc, StringRef Prefix) { 708 StringRef NamePrefix = getInstrProfNameVarPrefix(); 709 StringRef Name = Inc->getName()->getName().substr(NamePrefix.size()); 710 Function *F = Inc->getParent()->getParent(); 711 Module *M = F->getParent(); 712 if (!DoHashBasedCounterSplit || !isIRPGOFlagSet(M) || 713 !canRenameComdatFunc(*F)) 714 return (Prefix + Name).str(); 715 uint64_t FuncHash = Inc->getHash()->getZExtValue(); 716 SmallVector<char, 24> HashPostfix; 717 if (Name.endswith((Twine(".") + Twine(FuncHash)).toStringRef(HashPostfix))) 718 return (Prefix + Name).str(); 719 return (Prefix + Name + "." + Twine(FuncHash)).str(); 720 } 721 722 static inline bool shouldRecordFunctionAddr(Function *F) { 723 // Check the linkage 724 bool HasAvailableExternallyLinkage = F->hasAvailableExternallyLinkage(); 725 if (!F->hasLinkOnceLinkage() && !F->hasLocalLinkage() && 726 !HasAvailableExternallyLinkage) 727 return true; 728 729 // A function marked 'alwaysinline' with available_externally linkage can't 730 // have its address taken. Doing so would create an undefined external ref to 731 // the function, which would fail to link. 732 if (HasAvailableExternallyLinkage && 733 F->hasFnAttribute(Attribute::AlwaysInline)) 734 return false; 735 736 // Prohibit function address recording if the function is both internal and 737 // COMDAT. This avoids the profile data variable referencing internal symbols 738 // in COMDAT. 739 if (F->hasLocalLinkage() && F->hasComdat()) 740 return false; 741 742 // Check uses of this function for other than direct calls or invokes to it. 743 // Inline virtual functions have linkeOnceODR linkage. When a key method 744 // exists, the vtable will only be emitted in the TU where the key method 745 // is defined. In a TU where vtable is not available, the function won't 746 // be 'addresstaken'. If its address is not recorded here, the profile data 747 // with missing address may be picked by the linker leading to missing 748 // indirect call target info. 749 return F->hasAddressTaken() || F->hasLinkOnceLinkage(); 750 } 751 752 static bool needsRuntimeRegistrationOfSectionRange(const Triple &TT) { 753 // Don't do this for Darwin. compiler-rt uses linker magic. 754 if (TT.isOSDarwin()) 755 return false; 756 // Use linker script magic to get data/cnts/name start/end. 757 if (TT.isOSLinux() || TT.isOSFreeBSD() || TT.isOSNetBSD() || 758 TT.isOSSolaris() || TT.isOSFuchsia() || TT.isPS4CPU() || 759 TT.isOSWindows()) 760 return false; 761 762 return true; 763 } 764 765 GlobalVariable * 766 InstrProfiling::getOrCreateRegionCounters(InstrProfIncrementInst *Inc) { 767 GlobalVariable *NamePtr = Inc->getName(); 768 auto It = ProfileDataMap.find(NamePtr); 769 PerFunctionProfileData PD; 770 if (It != ProfileDataMap.end()) { 771 if (It->second.RegionCounters) 772 return It->second.RegionCounters; 773 PD = It->second; 774 } 775 776 // Match the linkage and visibility of the name global. COFF supports using 777 // comdats with internal symbols, so do that if we can. 778 Function *Fn = Inc->getParent()->getParent(); 779 GlobalValue::LinkageTypes Linkage = NamePtr->getLinkage(); 780 GlobalValue::VisibilityTypes Visibility = NamePtr->getVisibility(); 781 if (TT.isOSBinFormatCOFF()) { 782 Linkage = GlobalValue::InternalLinkage; 783 Visibility = GlobalValue::DefaultVisibility; 784 } 785 786 // Move the name variable to the right section. Place them in a COMDAT group 787 // if the associated function is a COMDAT. This will make sure that only one 788 // copy of counters of the COMDAT function will be emitted after linking. Keep 789 // in mind that this pass may run before the inliner, so we need to create a 790 // new comdat group for the counters and profiling data. If we use the comdat 791 // of the parent function, that will result in relocations against discarded 792 // sections. 793 bool NeedComdat = needsComdatForCounter(*Fn, *M); 794 if (NeedComdat) { 795 if (TT.isOSBinFormatCOFF()) { 796 // For COFF, put the counters, data, and values each into their own 797 // comdats. We can't use a group because the Visual C++ linker will 798 // report duplicate symbol errors if there are multiple external symbols 799 // with the same name marked IMAGE_COMDAT_SELECT_ASSOCIATIVE. 800 Linkage = GlobalValue::LinkOnceODRLinkage; 801 Visibility = GlobalValue::HiddenVisibility; 802 } 803 } 804 auto MaybeSetComdat = [=](GlobalVariable *GV) { 805 if (NeedComdat) 806 GV->setComdat(M->getOrInsertComdat(GV->getName())); 807 }; 808 809 uint64_t NumCounters = Inc->getNumCounters()->getZExtValue(); 810 LLVMContext &Ctx = M->getContext(); 811 ArrayType *CounterTy = ArrayType::get(Type::getInt64Ty(Ctx), NumCounters); 812 813 // Create the counters variable. 814 auto *CounterPtr = 815 new GlobalVariable(*M, CounterTy, false, Linkage, 816 Constant::getNullValue(CounterTy), 817 getVarName(Inc, getInstrProfCountersVarPrefix())); 818 CounterPtr->setVisibility(Visibility); 819 CounterPtr->setSection( 820 getInstrProfSectionName(IPSK_cnts, TT.getObjectFormat())); 821 CounterPtr->setAlignment(Align(8)); 822 MaybeSetComdat(CounterPtr); 823 CounterPtr->setLinkage(Linkage); 824 825 auto *Int8PtrTy = Type::getInt8PtrTy(Ctx); 826 // Allocate statically the array of pointers to value profile nodes for 827 // the current function. 828 Constant *ValuesPtrExpr = ConstantPointerNull::get(Int8PtrTy); 829 if (ValueProfileStaticAlloc && !needsRuntimeRegistrationOfSectionRange(TT)) { 830 uint64_t NS = 0; 831 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 832 NS += PD.NumValueSites[Kind]; 833 if (NS) { 834 ArrayType *ValuesTy = ArrayType::get(Type::getInt64Ty(Ctx), NS); 835 836 auto *ValuesVar = 837 new GlobalVariable(*M, ValuesTy, false, Linkage, 838 Constant::getNullValue(ValuesTy), 839 getVarName(Inc, getInstrProfValuesVarPrefix())); 840 ValuesVar->setVisibility(Visibility); 841 ValuesVar->setSection( 842 getInstrProfSectionName(IPSK_vals, TT.getObjectFormat())); 843 ValuesVar->setAlignment(Align(8)); 844 MaybeSetComdat(ValuesVar); 845 ValuesPtrExpr = 846 ConstantExpr::getBitCast(ValuesVar, Type::getInt8PtrTy(Ctx)); 847 } 848 } 849 850 // Create data variable. 851 auto *Int16Ty = Type::getInt16Ty(Ctx); 852 auto *Int16ArrayTy = ArrayType::get(Int16Ty, IPVK_Last + 1); 853 Type *DataTypes[] = { 854 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) LLVMType, 855 #include "llvm/ProfileData/InstrProfData.inc" 856 }; 857 auto *DataTy = StructType::get(Ctx, makeArrayRef(DataTypes)); 858 859 Constant *FunctionAddr = shouldRecordFunctionAddr(Fn) 860 ? ConstantExpr::getBitCast(Fn, Int8PtrTy) 861 : ConstantPointerNull::get(Int8PtrTy); 862 863 Constant *Int16ArrayVals[IPVK_Last + 1]; 864 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 865 Int16ArrayVals[Kind] = ConstantInt::get(Int16Ty, PD.NumValueSites[Kind]); 866 867 Constant *DataVals[] = { 868 #define INSTR_PROF_DATA(Type, LLVMType, Name, Init) Init, 869 #include "llvm/ProfileData/InstrProfData.inc" 870 }; 871 auto *Data = new GlobalVariable(*M, DataTy, false, Linkage, 872 ConstantStruct::get(DataTy, DataVals), 873 getVarName(Inc, getInstrProfDataVarPrefix())); 874 Data->setVisibility(Visibility); 875 Data->setSection(getInstrProfSectionName(IPSK_data, TT.getObjectFormat())); 876 Data->setAlignment(Align(INSTR_PROF_DATA_ALIGNMENT)); 877 MaybeSetComdat(Data); 878 Data->setLinkage(Linkage); 879 880 PD.RegionCounters = CounterPtr; 881 PD.DataVar = Data; 882 ProfileDataMap[NamePtr] = PD; 883 884 // Mark the data variable as used so that it isn't stripped out. 885 UsedVars.push_back(Data); 886 // Now that the linkage set by the FE has been passed to the data and counter 887 // variables, reset Name variable's linkage and visibility to private so that 888 // it can be removed later by the compiler. 889 NamePtr->setLinkage(GlobalValue::PrivateLinkage); 890 // Collect the referenced names to be used by emitNameData. 891 ReferencedNames.push_back(NamePtr); 892 893 return CounterPtr; 894 } 895 896 void InstrProfiling::emitVNodes() { 897 if (!ValueProfileStaticAlloc) 898 return; 899 900 // For now only support this on platforms that do 901 // not require runtime registration to discover 902 // named section start/end. 903 if (needsRuntimeRegistrationOfSectionRange(TT)) 904 return; 905 906 size_t TotalNS = 0; 907 for (auto &PD : ProfileDataMap) { 908 for (uint32_t Kind = IPVK_First; Kind <= IPVK_Last; ++Kind) 909 TotalNS += PD.second.NumValueSites[Kind]; 910 } 911 912 if (!TotalNS) 913 return; 914 915 uint64_t NumCounters = TotalNS * NumCountersPerValueSite; 916 // Heuristic for small programs with very few total value sites. 917 // The default value of vp-counters-per-site is chosen based on 918 // the observation that large apps usually have a low percentage 919 // of value sites that actually have any profile data, and thus 920 // the average number of counters per site is low. For small 921 // apps with very few sites, this may not be true. Bump up the 922 // number of counters in this case. 923 #define INSTR_PROF_MIN_VAL_COUNTS 10 924 if (NumCounters < INSTR_PROF_MIN_VAL_COUNTS) 925 NumCounters = std::max(INSTR_PROF_MIN_VAL_COUNTS, (int)NumCounters * 2); 926 927 auto &Ctx = M->getContext(); 928 Type *VNodeTypes[] = { 929 #define INSTR_PROF_VALUE_NODE(Type, LLVMType, Name, Init) LLVMType, 930 #include "llvm/ProfileData/InstrProfData.inc" 931 }; 932 auto *VNodeTy = StructType::get(Ctx, makeArrayRef(VNodeTypes)); 933 934 ArrayType *VNodesTy = ArrayType::get(VNodeTy, NumCounters); 935 auto *VNodesVar = new GlobalVariable( 936 *M, VNodesTy, false, GlobalValue::PrivateLinkage, 937 Constant::getNullValue(VNodesTy), getInstrProfVNodesVarName()); 938 VNodesVar->setSection( 939 getInstrProfSectionName(IPSK_vnodes, TT.getObjectFormat())); 940 UsedVars.push_back(VNodesVar); 941 } 942 943 void InstrProfiling::emitNameData() { 944 std::string UncompressedData; 945 946 if (ReferencedNames.empty()) 947 return; 948 949 std::string CompressedNameStr; 950 if (Error E = collectPGOFuncNameStrings(ReferencedNames, CompressedNameStr, 951 DoNameCompression)) { 952 report_fatal_error(toString(std::move(E)), false); 953 } 954 955 auto &Ctx = M->getContext(); 956 auto *NamesVal = ConstantDataArray::getString( 957 Ctx, StringRef(CompressedNameStr), false); 958 NamesVar = new GlobalVariable(*M, NamesVal->getType(), true, 959 GlobalValue::PrivateLinkage, NamesVal, 960 getInstrProfNamesVarName()); 961 NamesSize = CompressedNameStr.size(); 962 NamesVar->setSection( 963 getInstrProfSectionName(IPSK_name, TT.getObjectFormat())); 964 // On COFF, it's important to reduce the alignment down to 1 to prevent the 965 // linker from inserting padding before the start of the names section or 966 // between names entries. 967 NamesVar->setAlignment(Align::None()); 968 UsedVars.push_back(NamesVar); 969 970 for (auto *NamePtr : ReferencedNames) 971 NamePtr->eraseFromParent(); 972 } 973 974 void InstrProfiling::emitRegistration() { 975 if (!needsRuntimeRegistrationOfSectionRange(TT)) 976 return; 977 978 // Construct the function. 979 auto *VoidTy = Type::getVoidTy(M->getContext()); 980 auto *VoidPtrTy = Type::getInt8PtrTy(M->getContext()); 981 auto *Int64Ty = Type::getInt64Ty(M->getContext()); 982 auto *RegisterFTy = FunctionType::get(VoidTy, false); 983 auto *RegisterF = Function::Create(RegisterFTy, GlobalValue::InternalLinkage, 984 getInstrProfRegFuncsName(), M); 985 RegisterF->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); 986 if (Options.NoRedZone) 987 RegisterF->addFnAttr(Attribute::NoRedZone); 988 989 auto *RuntimeRegisterTy = FunctionType::get(VoidTy, VoidPtrTy, false); 990 auto *RuntimeRegisterF = 991 Function::Create(RuntimeRegisterTy, GlobalVariable::ExternalLinkage, 992 getInstrProfRegFuncName(), M); 993 994 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", RegisterF)); 995 for (Value *Data : UsedVars) 996 if (Data != NamesVar && !isa<Function>(Data)) 997 IRB.CreateCall(RuntimeRegisterF, IRB.CreateBitCast(Data, VoidPtrTy)); 998 999 if (NamesVar) { 1000 Type *ParamTypes[] = {VoidPtrTy, Int64Ty}; 1001 auto *NamesRegisterTy = 1002 FunctionType::get(VoidTy, makeArrayRef(ParamTypes), false); 1003 auto *NamesRegisterF = 1004 Function::Create(NamesRegisterTy, GlobalVariable::ExternalLinkage, 1005 getInstrProfNamesRegFuncName(), M); 1006 IRB.CreateCall(NamesRegisterF, {IRB.CreateBitCast(NamesVar, VoidPtrTy), 1007 IRB.getInt64(NamesSize)}); 1008 } 1009 1010 IRB.CreateRetVoid(); 1011 } 1012 1013 bool InstrProfiling::emitRuntimeHook() { 1014 // We expect the linker to be invoked with -u<hook_var> flag for linux, 1015 // for which case there is no need to emit the user function. 1016 if (TT.isOSLinux()) 1017 return false; 1018 1019 // If the module's provided its own runtime, we don't need to do anything. 1020 if (M->getGlobalVariable(getInstrProfRuntimeHookVarName())) 1021 return false; 1022 1023 // Declare an external variable that will pull in the runtime initialization. 1024 auto *Int32Ty = Type::getInt32Ty(M->getContext()); 1025 auto *Var = 1026 new GlobalVariable(*M, Int32Ty, false, GlobalValue::ExternalLinkage, 1027 nullptr, getInstrProfRuntimeHookVarName()); 1028 1029 // Make a function that uses it. 1030 auto *User = Function::Create(FunctionType::get(Int32Ty, false), 1031 GlobalValue::LinkOnceODRLinkage, 1032 getInstrProfRuntimeHookVarUseFuncName(), M); 1033 User->addFnAttr(Attribute::NoInline); 1034 if (Options.NoRedZone) 1035 User->addFnAttr(Attribute::NoRedZone); 1036 User->setVisibility(GlobalValue::HiddenVisibility); 1037 if (TT.supportsCOMDAT()) 1038 User->setComdat(M->getOrInsertComdat(User->getName())); 1039 1040 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", User)); 1041 auto *Load = IRB.CreateLoad(Int32Ty, Var); 1042 IRB.CreateRet(Load); 1043 1044 // Mark the user variable as used so that it isn't stripped out. 1045 UsedVars.push_back(User); 1046 return true; 1047 } 1048 1049 void InstrProfiling::emitUses() { 1050 if (!UsedVars.empty()) 1051 appendToUsed(*M, UsedVars); 1052 } 1053 1054 void InstrProfiling::emitInitialization() { 1055 // Create ProfileFileName variable. Don't don't this for the 1056 // context-sensitive instrumentation lowering: This lowering is after 1057 // LTO/ThinLTO linking. Pass PGOInstrumentationGenCreateVar should 1058 // have already create the variable before LTO/ThinLTO linking. 1059 if (!IsCS) 1060 createProfileFileNameVar(*M, Options.InstrProfileOutput); 1061 Function *RegisterF = M->getFunction(getInstrProfRegFuncsName()); 1062 if (!RegisterF) 1063 return; 1064 1065 // Create the initialization function. 1066 auto *VoidTy = Type::getVoidTy(M->getContext()); 1067 auto *F = Function::Create(FunctionType::get(VoidTy, false), 1068 GlobalValue::InternalLinkage, 1069 getInstrProfInitFuncName(), M); 1070 F->setUnnamedAddr(GlobalValue::UnnamedAddr::Global); 1071 F->addFnAttr(Attribute::NoInline); 1072 if (Options.NoRedZone) 1073 F->addFnAttr(Attribute::NoRedZone); 1074 1075 // Add the basic block and the necessary calls. 1076 IRBuilder<> IRB(BasicBlock::Create(M->getContext(), "", F)); 1077 IRB.CreateCall(RegisterF, {}); 1078 IRB.CreateRetVoid(); 1079 1080 appendToGlobalCtors(*M, F, 0); 1081 } 1082