1 //===- PartialInlining.cpp - Inline parts of functions --------------------===//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This pass performs partial inlining, typically by inlining an if statement
11 // that surrounds the body of the function.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/Transforms/IPO/PartialInlining.h"
16 #include "llvm/ADT/Statistic.h"
17 #include "llvm/IR/CFG.h"
18 #include "llvm/IR/Dominators.h"
19 #include "llvm/IR/Instructions.h"
20 #include "llvm/IR/Module.h"
21 #include "llvm/Pass.h"
22 #include "llvm/Transforms/IPO.h"
23 #include "llvm/Transforms/Utils/Cloning.h"
24 #include "llvm/Transforms/Utils/CodeExtractor.h"
25 using namespace llvm;
26 
27 #define DEBUG_TYPE "partialinlining"
28 
29 STATISTIC(NumPartialInlined, "Number of functions partially inlined");
30 
31 namespace {
32 struct PartialInlinerImpl {
33   PartialInlinerImpl(InlineFunctionInfo IFI) : IFI(IFI) {}
34   bool run(Module &M);
35   Function *unswitchFunction(Function *F);
36 
37 private:
38   InlineFunctionInfo IFI;
39 };
40 struct PartialInlinerLegacyPass : public ModulePass {
41   static char ID; // Pass identification, replacement for typeid
42   PartialInlinerLegacyPass() : ModulePass(ID) {
43     initializePartialInlinerLegacyPassPass(*PassRegistry::getPassRegistry());
44   }
45 
46   void getAnalysisUsage(AnalysisUsage &AU) const override {
47     AU.addRequired<AssumptionCacheTracker>();
48   }
49   bool runOnModule(Module &M) override {
50     if (skipModule(M))
51       return false;
52 
53     AssumptionCacheTracker *ACT = &getAnalysis<AssumptionCacheTracker>();
54     std::function<AssumptionCache &(Function &)> GetAssumptionCache =
55         [&ACT](Function &F) -> AssumptionCache & {
56       return ACT->getAssumptionCache(F);
57     };
58     InlineFunctionInfo IFI(nullptr, &GetAssumptionCache);
59     return PartialInlinerImpl(IFI).run(M);
60   }
61 };
62 }
63 
64 Function *PartialInlinerImpl::unswitchFunction(Function *F) {
65   // First, verify that this function is an unswitching candidate...
66   BasicBlock *EntryBlock = &F->front();
67   BranchInst *BR = dyn_cast<BranchInst>(EntryBlock->getTerminator());
68   if (!BR || BR->isUnconditional())
69     return nullptr;
70 
71   BasicBlock *ReturnBlock = nullptr;
72   BasicBlock *NonReturnBlock = nullptr;
73   unsigned ReturnCount = 0;
74   for (BasicBlock *BB : successors(EntryBlock)) {
75     if (isa<ReturnInst>(BB->getTerminator())) {
76       ReturnBlock = BB;
77       ReturnCount++;
78     } else
79       NonReturnBlock = BB;
80   }
81 
82   if (ReturnCount != 1)
83     return nullptr;
84 
85   // Clone the function, so that we can hack away on it.
86   ValueToValueMapTy VMap;
87   Function *DuplicateFunction = CloneFunction(F, VMap);
88   DuplicateFunction->setLinkage(GlobalValue::InternalLinkage);
89   BasicBlock *NewEntryBlock = cast<BasicBlock>(VMap[EntryBlock]);
90   BasicBlock *NewReturnBlock = cast<BasicBlock>(VMap[ReturnBlock]);
91   BasicBlock *NewNonReturnBlock = cast<BasicBlock>(VMap[NonReturnBlock]);
92 
93   // Go ahead and update all uses to the duplicate, so that we can just
94   // use the inliner functionality when we're done hacking.
95   F->replaceAllUsesWith(DuplicateFunction);
96 
97   // Special hackery is needed with PHI nodes that have inputs from more than
98   // one extracted block.  For simplicity, just split the PHIs into a two-level
99   // sequence of PHIs, some of which will go in the extracted region, and some
100   // of which will go outside.
101   BasicBlock *PreReturn = NewReturnBlock;
102   NewReturnBlock = NewReturnBlock->splitBasicBlock(
103       NewReturnBlock->getFirstNonPHI()->getIterator());
104   BasicBlock::iterator I = PreReturn->begin();
105   Instruction *Ins = &NewReturnBlock->front();
106   while (I != PreReturn->end()) {
107     PHINode *OldPhi = dyn_cast<PHINode>(I);
108     if (!OldPhi)
109       break;
110 
111     PHINode *RetPhi = PHINode::Create(OldPhi->getType(), 2, "", Ins);
112     OldPhi->replaceAllUsesWith(RetPhi);
113     Ins = NewReturnBlock->getFirstNonPHI();
114 
115     RetPhi->addIncoming(&*I, PreReturn);
116     RetPhi->addIncoming(OldPhi->getIncomingValueForBlock(NewEntryBlock),
117                         NewEntryBlock);
118     OldPhi->removeIncomingValue(NewEntryBlock);
119 
120     ++I;
121   }
122   NewEntryBlock->getTerminator()->replaceUsesOfWith(PreReturn, NewReturnBlock);
123 
124   // Gather up the blocks that we're going to extract.
125   std::vector<BasicBlock *> ToExtract;
126   ToExtract.push_back(NewNonReturnBlock);
127   for (BasicBlock &BB : *DuplicateFunction)
128     if (&BB != NewEntryBlock && &BB != NewReturnBlock &&
129         &BB != NewNonReturnBlock)
130       ToExtract.push_back(&BB);
131 
132   // The CodeExtractor needs a dominator tree.
133   DominatorTree DT;
134   DT.recalculate(*DuplicateFunction);
135 
136   // Extract the body of the if.
137   Function *ExtractedFunction =
138       CodeExtractor(ToExtract, &DT).extractCodeRegion();
139 
140   // Inline the top-level if test into all callers.
141   std::vector<User *> Users(DuplicateFunction->user_begin(),
142                             DuplicateFunction->user_end());
143   for (User *User : Users)
144     if (CallInst *CI = dyn_cast<CallInst>(User))
145       InlineFunction(CI, IFI);
146     else if (InvokeInst *II = dyn_cast<InvokeInst>(User))
147       InlineFunction(II, IFI);
148 
149   // Ditch the duplicate, since we're done with it, and rewrite all remaining
150   // users (function pointers, etc.) back to the original function.
151   DuplicateFunction->replaceAllUsesWith(F);
152   DuplicateFunction->eraseFromParent();
153 
154   ++NumPartialInlined;
155 
156   return ExtractedFunction;
157 }
158 
159 bool PartialInlinerImpl::run(Module &M) {
160   std::vector<Function *> Worklist;
161   Worklist.reserve(M.size());
162   for (Function &F : M)
163     if (!F.use_empty() && !F.isDeclaration())
164       Worklist.push_back(&F);
165 
166   bool Changed = false;
167   while (!Worklist.empty()) {
168     Function *CurrFunc = Worklist.back();
169     Worklist.pop_back();
170 
171     if (CurrFunc->use_empty())
172       continue;
173 
174     bool Recursive = false;
175     for (User *U : CurrFunc->users())
176       if (Instruction *I = dyn_cast<Instruction>(U))
177         if (I->getParent()->getParent() == CurrFunc) {
178           Recursive = true;
179           break;
180         }
181     if (Recursive)
182       continue;
183 
184     if (Function *newFunc = unswitchFunction(CurrFunc)) {
185       Worklist.push_back(newFunc);
186       Changed = true;
187     }
188   }
189 
190   return Changed;
191 }
192 
193 char PartialInlinerLegacyPass::ID = 0;
194 INITIALIZE_PASS_BEGIN(PartialInlinerLegacyPass, "partial-inliner",
195                       "Partial Inliner", false, false)
196 INITIALIZE_PASS_DEPENDENCY(AssumptionCacheTracker)
197 INITIALIZE_PASS_END(PartialInlinerLegacyPass, "partial-inliner",
198                     "Partial Inliner", false, false)
199 
200 ModulePass *llvm::createPartialInliningPass() {
201   return new PartialInlinerLegacyPass();
202 }
203 
204 PreservedAnalyses PartialInlinerPass::run(Module &M,
205                                           ModuleAnalysisManager &AM) {
206   auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
207   std::function<AssumptionCache &(Function &)> GetAssumptionCache =
208       [&FAM](Function &F) -> AssumptionCache & {
209     return FAM.getResult<AssumptionAnalysis>(F);
210   };
211   InlineFunctionInfo IFI(nullptr, &GetAssumptionCache);
212   if (PartialInlinerImpl(IFI).run(M))
213     return PreservedAnalyses::none();
214   return PreservedAnalyses::all();
215 }
216