10b57cec5SDimitry Andric //===- CodeMetrics.cpp - Code cost measurements ---------------------------===//
20b57cec5SDimitry Andric //
30b57cec5SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
40b57cec5SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
50b57cec5SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
60b57cec5SDimitry Andric //
70b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
80b57cec5SDimitry Andric //
90b57cec5SDimitry Andric // This file implements code cost measurement utilities.
100b57cec5SDimitry Andric //
110b57cec5SDimitry Andric //===----------------------------------------------------------------------===//
120b57cec5SDimitry Andric 
130b57cec5SDimitry Andric #include "llvm/Analysis/CodeMetrics.h"
145ffd83dbSDimitry Andric #include "llvm/ADT/SmallPtrSet.h"
150b57cec5SDimitry Andric #include "llvm/Analysis/AssumptionCache.h"
160b57cec5SDimitry Andric #include "llvm/Analysis/LoopInfo.h"
170b57cec5SDimitry Andric #include "llvm/Analysis/TargetTransformInfo.h"
180b57cec5SDimitry Andric #include "llvm/Analysis/ValueTracking.h"
190b57cec5SDimitry Andric #include "llvm/IR/Function.h"
200b57cec5SDimitry Andric #include "llvm/Support/Debug.h"
21*5f7ddb14SDimitry Andric #include "llvm/Support/InstructionCost.h"
220b57cec5SDimitry Andric 
230b57cec5SDimitry Andric #define DEBUG_TYPE "code-metrics"
240b57cec5SDimitry Andric 
250b57cec5SDimitry Andric using namespace llvm;
260b57cec5SDimitry Andric 
270b57cec5SDimitry Andric static void
appendSpeculatableOperands(const Value * V,SmallPtrSetImpl<const Value * > & Visited,SmallVectorImpl<const Value * > & Worklist)280b57cec5SDimitry Andric appendSpeculatableOperands(const Value *V,
290b57cec5SDimitry Andric                            SmallPtrSetImpl<const Value *> &Visited,
300b57cec5SDimitry Andric                            SmallVectorImpl<const Value *> &Worklist) {
310b57cec5SDimitry Andric   const User *U = dyn_cast<User>(V);
320b57cec5SDimitry Andric   if (!U)
330b57cec5SDimitry Andric     return;
340b57cec5SDimitry Andric 
350b57cec5SDimitry Andric   for (const Value *Operand : U->operands())
360b57cec5SDimitry Andric     if (Visited.insert(Operand).second)
370b57cec5SDimitry Andric       if (isSafeToSpeculativelyExecute(Operand))
380b57cec5SDimitry Andric         Worklist.push_back(Operand);
390b57cec5SDimitry Andric }
400b57cec5SDimitry Andric 
completeEphemeralValues(SmallPtrSetImpl<const Value * > & Visited,SmallVectorImpl<const Value * > & Worklist,SmallPtrSetImpl<const Value * > & EphValues)410b57cec5SDimitry Andric static void completeEphemeralValues(SmallPtrSetImpl<const Value *> &Visited,
420b57cec5SDimitry Andric                                     SmallVectorImpl<const Value *> &Worklist,
430b57cec5SDimitry Andric                                     SmallPtrSetImpl<const Value *> &EphValues) {
440b57cec5SDimitry Andric   // Note: We don't speculate PHIs here, so we'll miss instruction chains kept
450b57cec5SDimitry Andric   // alive only by ephemeral values.
460b57cec5SDimitry Andric 
470b57cec5SDimitry Andric   // Walk the worklist using an index but without caching the size so we can
480b57cec5SDimitry Andric   // append more entries as we process the worklist. This forms a queue without
490b57cec5SDimitry Andric   // quadratic behavior by just leaving processed nodes at the head of the
500b57cec5SDimitry Andric   // worklist forever.
510b57cec5SDimitry Andric   for (int i = 0; i < (int)Worklist.size(); ++i) {
520b57cec5SDimitry Andric     const Value *V = Worklist[i];
530b57cec5SDimitry Andric 
540b57cec5SDimitry Andric     assert(Visited.count(V) &&
550b57cec5SDimitry Andric            "Failed to add a worklist entry to our visited set!");
560b57cec5SDimitry Andric 
570b57cec5SDimitry Andric     // If all uses of this value are ephemeral, then so is this value.
580b57cec5SDimitry Andric     if (!all_of(V->users(), [&](const User *U) { return EphValues.count(U); }))
590b57cec5SDimitry Andric       continue;
600b57cec5SDimitry Andric 
610b57cec5SDimitry Andric     EphValues.insert(V);
620b57cec5SDimitry Andric     LLVM_DEBUG(dbgs() << "Ephemeral Value: " << *V << "\n");
630b57cec5SDimitry Andric 
640b57cec5SDimitry Andric     // Append any more operands to consider.
650b57cec5SDimitry Andric     appendSpeculatableOperands(V, Visited, Worklist);
660b57cec5SDimitry Andric   }
670b57cec5SDimitry Andric }
680b57cec5SDimitry Andric 
690b57cec5SDimitry Andric // Find all ephemeral values.
collectEphemeralValues(const Loop * L,AssumptionCache * AC,SmallPtrSetImpl<const Value * > & EphValues)700b57cec5SDimitry Andric void CodeMetrics::collectEphemeralValues(
710b57cec5SDimitry Andric     const Loop *L, AssumptionCache *AC,
720b57cec5SDimitry Andric     SmallPtrSetImpl<const Value *> &EphValues) {
730b57cec5SDimitry Andric   SmallPtrSet<const Value *, 32> Visited;
740b57cec5SDimitry Andric   SmallVector<const Value *, 16> Worklist;
750b57cec5SDimitry Andric 
760b57cec5SDimitry Andric   for (auto &AssumeVH : AC->assumptions()) {
770b57cec5SDimitry Andric     if (!AssumeVH)
780b57cec5SDimitry Andric       continue;
790b57cec5SDimitry Andric     Instruction *I = cast<Instruction>(AssumeVH);
800b57cec5SDimitry Andric 
810b57cec5SDimitry Andric     // Filter out call sites outside of the loop so we don't do a function's
820b57cec5SDimitry Andric     // worth of work for each of its loops (and, in the common case, ephemeral
830b57cec5SDimitry Andric     // values in the loop are likely due to @llvm.assume calls in the loop).
840b57cec5SDimitry Andric     if (!L->contains(I->getParent()))
850b57cec5SDimitry Andric       continue;
860b57cec5SDimitry Andric 
870b57cec5SDimitry Andric     if (EphValues.insert(I).second)
880b57cec5SDimitry Andric       appendSpeculatableOperands(I, Visited, Worklist);
890b57cec5SDimitry Andric   }
900b57cec5SDimitry Andric 
910b57cec5SDimitry Andric   completeEphemeralValues(Visited, Worklist, EphValues);
920b57cec5SDimitry Andric }
930b57cec5SDimitry Andric 
collectEphemeralValues(const Function * F,AssumptionCache * AC,SmallPtrSetImpl<const Value * > & EphValues)940b57cec5SDimitry Andric void CodeMetrics::collectEphemeralValues(
950b57cec5SDimitry Andric     const Function *F, AssumptionCache *AC,
960b57cec5SDimitry Andric     SmallPtrSetImpl<const Value *> &EphValues) {
970b57cec5SDimitry Andric   SmallPtrSet<const Value *, 32> Visited;
980b57cec5SDimitry Andric   SmallVector<const Value *, 16> Worklist;
990b57cec5SDimitry Andric 
1000b57cec5SDimitry Andric   for (auto &AssumeVH : AC->assumptions()) {
1010b57cec5SDimitry Andric     if (!AssumeVH)
1020b57cec5SDimitry Andric       continue;
1030b57cec5SDimitry Andric     Instruction *I = cast<Instruction>(AssumeVH);
1040b57cec5SDimitry Andric     assert(I->getParent()->getParent() == F &&
1050b57cec5SDimitry Andric            "Found assumption for the wrong function!");
1060b57cec5SDimitry Andric 
1070b57cec5SDimitry Andric     if (EphValues.insert(I).second)
1080b57cec5SDimitry Andric       appendSpeculatableOperands(I, Visited, Worklist);
1090b57cec5SDimitry Andric   }
1100b57cec5SDimitry Andric 
1110b57cec5SDimitry Andric   completeEphemeralValues(Visited, Worklist, EphValues);
1120b57cec5SDimitry Andric }
1130b57cec5SDimitry Andric 
1140b57cec5SDimitry Andric /// Fill in the current structure with information gleaned from the specified
1150b57cec5SDimitry Andric /// block.
analyzeBasicBlock(const BasicBlock * BB,const TargetTransformInfo & TTI,const SmallPtrSetImpl<const Value * > & EphValues,bool PrepareForLTO)116af732203SDimitry Andric void CodeMetrics::analyzeBasicBlock(
117af732203SDimitry Andric     const BasicBlock *BB, const TargetTransformInfo &TTI,
118af732203SDimitry Andric     const SmallPtrSetImpl<const Value *> &EphValues, bool PrepareForLTO) {
1190b57cec5SDimitry Andric   ++NumBlocks;
120*5f7ddb14SDimitry Andric   // Use a proxy variable for NumInsts of type InstructionCost, so that it can
121*5f7ddb14SDimitry Andric   // use InstructionCost's arithmetic properties such as saturation when this
122*5f7ddb14SDimitry Andric   // feature is added to InstructionCost.
123*5f7ddb14SDimitry Andric   // When storing the value back to NumInsts, we can assume all costs are Valid
124*5f7ddb14SDimitry Andric   // because the IR should not contain any nodes that cannot be costed. If that
125*5f7ddb14SDimitry Andric   // happens the cost-model is broken.
126*5f7ddb14SDimitry Andric   InstructionCost NumInstsProxy = NumInsts;
127*5f7ddb14SDimitry Andric   InstructionCost NumInstsBeforeThisBB = NumInsts;
1280b57cec5SDimitry Andric   for (const Instruction &I : *BB) {
1290b57cec5SDimitry Andric     // Skip ephemeral values.
1300b57cec5SDimitry Andric     if (EphValues.count(&I))
1310b57cec5SDimitry Andric       continue;
1320b57cec5SDimitry Andric 
1330b57cec5SDimitry Andric     // Special handling for calls.
1340b57cec5SDimitry Andric     if (const auto *Call = dyn_cast<CallBase>(&I)) {
1350b57cec5SDimitry Andric       if (const Function *F = Call->getCalledFunction()) {
136af732203SDimitry Andric         bool IsLoweredToCall = TTI.isLoweredToCall(F);
1370b57cec5SDimitry Andric         // If a function is both internal and has a single use, then it is
1380b57cec5SDimitry Andric         // extremely likely to get inlined in the future (it was probably
1390b57cec5SDimitry Andric         // exposed by an interleaved devirtualization pass).
140af732203SDimitry Andric         // When preparing for LTO, liberally consider calls as inline
141af732203SDimitry Andric         // candidates.
142af732203SDimitry Andric         if (!Call->isNoInline() && IsLoweredToCall &&
143af732203SDimitry Andric             ((F->hasInternalLinkage() && F->hasOneUse()) || PrepareForLTO)) {
1440b57cec5SDimitry Andric           ++NumInlineCandidates;
145af732203SDimitry Andric         }
1460b57cec5SDimitry Andric 
1470b57cec5SDimitry Andric         // If this call is to function itself, then the function is recursive.
1480b57cec5SDimitry Andric         // Inlining it into other functions is a bad idea, because this is
1490b57cec5SDimitry Andric         // basically just a form of loop peeling, and our metrics aren't useful
1500b57cec5SDimitry Andric         // for that case.
1510b57cec5SDimitry Andric         if (F == BB->getParent())
1520b57cec5SDimitry Andric           isRecursive = true;
1530b57cec5SDimitry Andric 
154af732203SDimitry Andric         if (IsLoweredToCall)
1550b57cec5SDimitry Andric           ++NumCalls;
1560b57cec5SDimitry Andric       } else {
1570b57cec5SDimitry Andric         // We don't want inline asm to count as a call - that would prevent loop
1580b57cec5SDimitry Andric         // unrolling. The argument setup cost is still real, though.
1590b57cec5SDimitry Andric         if (!Call->isInlineAsm())
1600b57cec5SDimitry Andric           ++NumCalls;
1610b57cec5SDimitry Andric       }
1620b57cec5SDimitry Andric     }
1630b57cec5SDimitry Andric 
1640b57cec5SDimitry Andric     if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) {
1650b57cec5SDimitry Andric       if (!AI->isStaticAlloca())
1660b57cec5SDimitry Andric         this->usesDynamicAlloca = true;
1670b57cec5SDimitry Andric     }
1680b57cec5SDimitry Andric 
1690b57cec5SDimitry Andric     if (isa<ExtractElementInst>(I) || I.getType()->isVectorTy())
1700b57cec5SDimitry Andric       ++NumVectorInsts;
1710b57cec5SDimitry Andric 
1720b57cec5SDimitry Andric     if (I.getType()->isTokenTy() && I.isUsedOutsideOfBlock(BB))
1730b57cec5SDimitry Andric       notDuplicatable = true;
1740b57cec5SDimitry Andric 
1750b57cec5SDimitry Andric     if (const CallInst *CI = dyn_cast<CallInst>(&I)) {
1760b57cec5SDimitry Andric       if (CI->cannotDuplicate())
1770b57cec5SDimitry Andric         notDuplicatable = true;
1780b57cec5SDimitry Andric       if (CI->isConvergent())
1790b57cec5SDimitry Andric         convergent = true;
1800b57cec5SDimitry Andric     }
1810b57cec5SDimitry Andric 
1820b57cec5SDimitry Andric     if (const InvokeInst *InvI = dyn_cast<InvokeInst>(&I))
1830b57cec5SDimitry Andric       if (InvI->cannotDuplicate())
1840b57cec5SDimitry Andric         notDuplicatable = true;
1850b57cec5SDimitry Andric 
186*5f7ddb14SDimitry Andric     NumInstsProxy += TTI.getUserCost(&I, TargetTransformInfo::TCK_CodeSize);
187*5f7ddb14SDimitry Andric     NumInsts = *NumInstsProxy.getValue();
1880b57cec5SDimitry Andric   }
1890b57cec5SDimitry Andric 
1900b57cec5SDimitry Andric   if (isa<ReturnInst>(BB->getTerminator()))
1910b57cec5SDimitry Andric     ++NumRets;
1920b57cec5SDimitry Andric 
1930b57cec5SDimitry Andric   // We never want to inline functions that contain an indirectbr.  This is
1940b57cec5SDimitry Andric   // incorrect because all the blockaddress's (in static global initializers
1950b57cec5SDimitry Andric   // for example) would be referring to the original function, and this indirect
1960b57cec5SDimitry Andric   // jump would jump from the inlined copy of the function into the original
1970b57cec5SDimitry Andric   // function which is extremely undefined behavior.
1980b57cec5SDimitry Andric   // FIXME: This logic isn't really right; we can safely inline functions
1990b57cec5SDimitry Andric   // with indirectbr's as long as no other function or global references the
2000b57cec5SDimitry Andric   // blockaddress of a block within the current function.  And as a QOI issue,
2010b57cec5SDimitry Andric   // if someone is using a blockaddress without an indirectbr, and that
2020b57cec5SDimitry Andric   // reference somehow ends up in another function or global, we probably
2030b57cec5SDimitry Andric   // don't want to inline this function.
2040b57cec5SDimitry Andric   notDuplicatable |= isa<IndirectBrInst>(BB->getTerminator());
2050b57cec5SDimitry Andric 
2060b57cec5SDimitry Andric   // Remember NumInsts for this BB.
207*5f7ddb14SDimitry Andric   InstructionCost NumInstsThisBB = NumInstsProxy - NumInstsBeforeThisBB;
208*5f7ddb14SDimitry Andric   NumBBInsts[BB] = *NumInstsThisBB.getValue();
2090b57cec5SDimitry Andric }
210