1 //===- PartialInlining.cpp - Inline parts of functions --------------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This pass performs partial inlining, typically by inlining an if statement 11 // that surrounds the body of the function. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/Transforms/IPO/PartialInlining.h" 16 #include "llvm/ADT/Statistic.h" 17 #include "llvm/Analysis/BlockFrequencyInfo.h" 18 #include "llvm/Analysis/BranchProbabilityInfo.h" 19 #include "llvm/Analysis/LoopInfo.h" 20 #include "llvm/IR/CFG.h" 21 #include "llvm/IR/Dominators.h" 22 #include "llvm/IR/Instructions.h" 23 #include "llvm/IR/Module.h" 24 #include "llvm/Pass.h" 25 #include "llvm/Transforms/IPO.h" 26 #include "llvm/Transforms/Utils/Cloning.h" 27 #include "llvm/Transforms/Utils/CodeExtractor.h" 28 using namespace llvm; 29 30 #define DEBUG_TYPE "partialinlining" 31 32 STATISTIC(NumPartialInlined, "Number of functions partially inlined"); 33 34 namespace { 35 struct PartialInlinerImpl { 36 PartialInlinerImpl(InlineFunctionInfo IFI) : IFI(IFI) {} 37 bool run(Module &M); 38 Function *unswitchFunction(Function *F); 39 40 private: 41 InlineFunctionInfo IFI; 42 }; 43 struct PartialInlinerLegacyPass : public ModulePass { 44 static char ID; // Pass identification, replacement for typeid 45 PartialInlinerLegacyPass() : ModulePass(ID) { 46 initializePartialInlinerLegacyPassPass(*PassRegistry::getPassRegistry()); 47 } 48 49 void getAnalysisUsage(AnalysisUsage &AU) const override { 50 AU.addRequired<AssumptionCacheTracker>(); 51 } 52 bool runOnModule(Module &M) override { 53 if (skipModule(M)) 54 return false; 55 56 AssumptionCacheTracker *ACT = &getAnalysis<AssumptionCacheTracker>(); 57 std::function<AssumptionCache &(Function &)> GetAssumptionCache = 58 [&ACT](Function &F) -> AssumptionCache & { 59 return ACT->getAssumptionCache(F); 60 }; 61 InlineFunctionInfo IFI(nullptr, &GetAssumptionCache); 62 return PartialInlinerImpl(IFI).run(M); 63 } 64 }; 65 } 66 67 Function *PartialInlinerImpl::unswitchFunction(Function *F) { 68 // First, verify that this function is an unswitching candidate... 69 BasicBlock *EntryBlock = &F->front(); 70 BranchInst *BR = dyn_cast<BranchInst>(EntryBlock->getTerminator()); 71 if (!BR || BR->isUnconditional()) 72 return nullptr; 73 74 BasicBlock *ReturnBlock = nullptr; 75 BasicBlock *NonReturnBlock = nullptr; 76 unsigned ReturnCount = 0; 77 for (BasicBlock *BB : successors(EntryBlock)) { 78 if (isa<ReturnInst>(BB->getTerminator())) { 79 ReturnBlock = BB; 80 ReturnCount++; 81 } else 82 NonReturnBlock = BB; 83 } 84 85 if (ReturnCount != 1) 86 return nullptr; 87 88 // Clone the function, so that we can hack away on it. 89 ValueToValueMapTy VMap; 90 Function *DuplicateFunction = CloneFunction(F, VMap); 91 DuplicateFunction->setLinkage(GlobalValue::InternalLinkage); 92 BasicBlock *NewEntryBlock = cast<BasicBlock>(VMap[EntryBlock]); 93 BasicBlock *NewReturnBlock = cast<BasicBlock>(VMap[ReturnBlock]); 94 BasicBlock *NewNonReturnBlock = cast<BasicBlock>(VMap[NonReturnBlock]); 95 96 // Go ahead and update all uses to the duplicate, so that we can just 97 // use the inliner functionality when we're done hacking. 98 F->replaceAllUsesWith(DuplicateFunction); 99 100 // Special hackery is needed with PHI nodes that have inputs from more than 101 // one extracted block. For simplicity, just split the PHIs into a two-level 102 // sequence of PHIs, some of which will go in the extracted region, and some 103 // of which will go outside. 104 BasicBlock *PreReturn = NewReturnBlock; 105 NewReturnBlock = NewReturnBlock->splitBasicBlock( 106 NewReturnBlock->getFirstNonPHI()->getIterator()); 107 BasicBlock::iterator I = PreReturn->begin(); 108 Instruction *Ins = &NewReturnBlock->front(); 109 while (I != PreReturn->end()) { 110 PHINode *OldPhi = dyn_cast<PHINode>(I); 111 if (!OldPhi) 112 break; 113 114 PHINode *RetPhi = PHINode::Create(OldPhi->getType(), 2, "", Ins); 115 OldPhi->replaceAllUsesWith(RetPhi); 116 Ins = NewReturnBlock->getFirstNonPHI(); 117 118 RetPhi->addIncoming(&*I, PreReturn); 119 RetPhi->addIncoming(OldPhi->getIncomingValueForBlock(NewEntryBlock), 120 NewEntryBlock); 121 OldPhi->removeIncomingValue(NewEntryBlock); 122 123 ++I; 124 } 125 NewEntryBlock->getTerminator()->replaceUsesOfWith(PreReturn, NewReturnBlock); 126 127 // Gather up the blocks that we're going to extract. 128 std::vector<BasicBlock *> ToExtract; 129 ToExtract.push_back(NewNonReturnBlock); 130 for (BasicBlock &BB : *DuplicateFunction) 131 if (&BB != NewEntryBlock && &BB != NewReturnBlock && 132 &BB != NewNonReturnBlock) 133 ToExtract.push_back(&BB); 134 135 // The CodeExtractor needs a dominator tree. 136 DominatorTree DT; 137 DT.recalculate(*DuplicateFunction); 138 139 // Manually calculate a BlockFrequencyInfo and BranchProbabilityInfo. 140 LoopInfo LI(DT); 141 BranchProbabilityInfo BPI(*DuplicateFunction, LI); 142 BlockFrequencyInfo BFI(*DuplicateFunction, BPI, LI); 143 144 // Extract the body of the if. 145 Function *ExtractedFunction = 146 CodeExtractor(ToExtract, &DT, /*AggregateArgs*/ false, &BFI, &BPI) 147 .extractCodeRegion(); 148 149 // Inline the top-level if test into all callers. 150 std::vector<User *> Users(DuplicateFunction->user_begin(), 151 DuplicateFunction->user_end()); 152 for (User *User : Users) 153 if (CallInst *CI = dyn_cast<CallInst>(User)) 154 InlineFunction(CI, IFI); 155 else if (InvokeInst *II = dyn_cast<InvokeInst>(User)) 156 InlineFunction(II, IFI); 157 158 // Ditch the duplicate, since we're done with it, and rewrite all remaining 159 // users (function pointers, etc.) back to the original function. 160 DuplicateFunction->replaceAllUsesWith(F); 161 DuplicateFunction->eraseFromParent(); 162 163 ++NumPartialInlined; 164 165 return ExtractedFunction; 166 } 167 168 bool PartialInlinerImpl::run(Module &M) { 169 std::vector<Function *> Worklist; 170 Worklist.reserve(M.size()); 171 for (Function &F : M) 172 if (!F.use_empty() && !F.isDeclaration()) 173 Worklist.push_back(&F); 174 175 bool Changed = false; 176 while (!Worklist.empty()) { 177 Function *CurrFunc = Worklist.back(); 178 Worklist.pop_back(); 179 180 if (CurrFunc->use_empty()) 181 continue; 182 183 bool Recursive = false; 184 for (User *U : CurrFunc->users()) 185 if (Instruction *I = dyn_cast<Instruction>(U)) 186 if (I->getParent()->getParent() == CurrFunc) { 187 Recursive = true; 188 break; 189 } 190 if (Recursive) 191 continue; 192 193 if (Function *NewFunc = unswitchFunction(CurrFunc)) { 194 Worklist.push_back(NewFunc); 195 Changed = true; 196 } 197 } 198 199 return Changed; 200 } 201 202 char PartialInlinerLegacyPass::ID = 0; 203 INITIALIZE_PASS_BEGIN(PartialInlinerLegacyPass, "partial-inliner", 204 "Partial Inliner", false, false) 205 INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker) 206 INITIALIZE_PASS_END(PartialInlinerLegacyPass, "partial-inliner", 207 "Partial Inliner", false, false) 208 209 ModulePass *llvm::createPartialInliningPass() { 210 return new PartialInlinerLegacyPass(); 211 } 212 213 PreservedAnalyses PartialInlinerPass::run(Module &M, 214 ModuleAnalysisManager &AM) { 215 auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager(); 216 std::function<AssumptionCache &(Function &)> GetAssumptionCache = 217 [&FAM](Function &F) -> AssumptionCache & { 218 return FAM.getResult<AssumptionAnalysis>(F); 219 }; 220 InlineFunctionInfo IFI(nullptr, &GetAssumptionCache); 221 if (PartialInlinerImpl(IFI).run(M)) 222 return PreservedAnalyses::none(); 223 return PreservedAnalyses::all(); 224 } 225