1 //===- CodeMetrics.cpp - Code cost measurements ---------------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements code cost measurement utilities. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "llvm/Analysis/CodeMetrics.h" 15 #include "llvm/Function.h" 16 #include "llvm/Support/CallSite.h" 17 #include "llvm/IntrinsicInst.h" 18 #include "llvm/Target/TargetData.h" 19 20 using namespace llvm; 21 22 /// callIsSmall - If a call is likely to lower to a single target instruction, 23 /// or is otherwise deemed small return true. 24 /// TODO: Perhaps calls like memcpy, strcpy, etc? 25 bool llvm::callIsSmall(const Function *F) { 26 if (!F) return false; 27 28 if (F->hasLocalLinkage()) return false; 29 30 if (!F->hasName()) return false; 31 32 StringRef Name = F->getName(); 33 34 // These will all likely lower to a single selection DAG node. 35 if (Name == "copysign" || Name == "copysignf" || Name == "copysignl" || 36 Name == "fabs" || Name == "fabsf" || Name == "fabsl" || 37 Name == "sin" || Name == "sinf" || Name == "sinl" || 38 Name == "cos" || Name == "cosf" || Name == "cosl" || 39 Name == "sqrt" || Name == "sqrtf" || Name == "sqrtl" ) 40 return true; 41 42 // These are all likely to be optimized into something smaller. 43 if (Name == "pow" || Name == "powf" || Name == "powl" || 44 Name == "exp2" || Name == "exp2l" || Name == "exp2f" || 45 Name == "floor" || Name == "floorf" || Name == "ceil" || 46 Name == "round" || Name == "ffs" || Name == "ffsl" || 47 Name == "abs" || Name == "labs" || Name == "llabs") 48 return true; 49 50 return false; 51 } 52 53 bool llvm::isInstructionFree(const Instruction *I, const TargetData *TD) { 54 if (isa<PHINode>(I)) 55 return true; 56 57 // If a GEP has all constant indices, it will probably be folded with 58 // a load/store. 59 if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(I)) 60 return GEP->hasAllConstantIndices(); 61 62 if (const IntrinsicInst *II = dyn_cast<IntrinsicInst>(I)) { 63 switch (II->getIntrinsicID()) { 64 default: 65 return false; 66 case Intrinsic::dbg_declare: 67 case Intrinsic::dbg_value: 68 case Intrinsic::invariant_start: 69 case Intrinsic::invariant_end: 70 case Intrinsic::lifetime_start: 71 case Intrinsic::lifetime_end: 72 case Intrinsic::objectsize: 73 case Intrinsic::ptr_annotation: 74 case Intrinsic::var_annotation: 75 // These intrinsics don't count as size. 76 return true; 77 } 78 } 79 80 if (const CastInst *CI = dyn_cast<CastInst>(I)) { 81 // Noop casts, including ptr <-> int, don't count. 82 if (CI->isLosslessCast() || isa<IntToPtrInst>(CI) || isa<PtrToIntInst>(CI)) 83 return true; 84 // trunc to a native type is free (assuming the target has compare and 85 // shift-right of the same width). 86 if (TD && isa<TruncInst>(CI) && 87 TD->isLegalInteger(TD->getTypeSizeInBits(CI->getType()))) 88 return true; 89 // Result of a cmp instruction is often extended (to be used by other 90 // cmp instructions, logical or return instructions). These are usually 91 // nop on most sane targets. 92 if (isa<CmpInst>(CI->getOperand(0))) 93 return true; 94 } 95 96 return false; 97 } 98 99 /// analyzeBasicBlock - Fill in the current structure with information gleaned 100 /// from the specified block. 101 void CodeMetrics::analyzeBasicBlock(const BasicBlock *BB, 102 const TargetData *TD) { 103 ++NumBlocks; 104 unsigned NumInstsBeforeThisBB = NumInsts; 105 for (BasicBlock::const_iterator II = BB->begin(), E = BB->end(); 106 II != E; ++II) { 107 if (isInstructionFree(II, TD)) 108 continue; 109 110 // Special handling for calls. 111 if (isa<CallInst>(II) || isa<InvokeInst>(II)) { 112 ImmutableCallSite CS(cast<Instruction>(II)); 113 114 if (const Function *F = CS.getCalledFunction()) { 115 // If a function is both internal and has a single use, then it is 116 // extremely likely to get inlined in the future (it was probably 117 // exposed by an interleaved devirtualization pass). 118 if (!CS.isNoInline() && F->hasInternalLinkage() && F->hasOneUse()) 119 ++NumInlineCandidates; 120 121 // If this call is to function itself, then the function is recursive. 122 // Inlining it into other functions is a bad idea, because this is 123 // basically just a form of loop peeling, and our metrics aren't useful 124 // for that case. 125 if (F == BB->getParent()) 126 isRecursive = true; 127 } 128 129 if (!isa<IntrinsicInst>(II) && !callIsSmall(CS.getCalledFunction())) { 130 // Each argument to a call takes on average one instruction to set up. 131 NumInsts += CS.arg_size(); 132 133 // We don't want inline asm to count as a call - that would prevent loop 134 // unrolling. The argument setup cost is still real, though. 135 if (!isa<InlineAsm>(CS.getCalledValue())) 136 ++NumCalls; 137 } 138 } 139 140 if (const AllocaInst *AI = dyn_cast<AllocaInst>(II)) { 141 if (!AI->isStaticAlloca()) 142 this->usesDynamicAlloca = true; 143 } 144 145 if (isa<ExtractElementInst>(II) || II->getType()->isVectorTy()) 146 ++NumVectorInsts; 147 148 ++NumInsts; 149 } 150 151 if (isa<ReturnInst>(BB->getTerminator())) 152 ++NumRets; 153 154 // We never want to inline functions that contain an indirectbr. This is 155 // incorrect because all the blockaddress's (in static global initializers 156 // for example) would be referring to the original function, and this indirect 157 // jump would jump from the inlined copy of the function into the original 158 // function which is extremely undefined behavior. 159 // FIXME: This logic isn't really right; we can safely inline functions 160 // with indirectbr's as long as no other function or global references the 161 // blockaddress of a block within the current function. And as a QOI issue, 162 // if someone is using a blockaddress without an indirectbr, and that 163 // reference somehow ends up in another function or global, we probably 164 // don't want to inline this function. 165 if (isa<IndirectBrInst>(BB->getTerminator())) 166 containsIndirectBr = true; 167 168 // Remember NumInsts for this BB. 169 NumBBInsts[BB] = NumInsts - NumInstsBeforeThisBB; 170 } 171 172 void CodeMetrics::analyzeFunction(Function *F, const TargetData *TD) { 173 // If this function contains a call that "returns twice" (e.g., setjmp or 174 // _setjmp) and it isn't marked with "returns twice" itself, never inline it. 175 // This is a hack because we depend on the user marking their local variables 176 // as volatile if they are live across a setjmp call, and they probably 177 // won't do this in callers. 178 exposesReturnsTwice = F->callsFunctionThatReturnsTwice() && 179 !F->hasFnAttr(Attribute::ReturnsTwice); 180 181 // Look at the size of the callee. 182 for (Function::const_iterator BB = F->begin(), E = F->end(); BB != E; ++BB) 183 analyzeBasicBlock(&*BB, TD); 184 } 185