1 //===- LazyCallGraph.cpp - Analysis of a Module's call graph --------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 10 #include "llvm/Analysis/LazyCallGraph.h" 11 #include "llvm/ADT/SCCIterator.h" 12 #include "llvm/IR/CallSite.h" 13 #include "llvm/IR/InstVisitor.h" 14 #include "llvm/IR/Instructions.h" 15 #include "llvm/IR/PassManager.h" 16 #include "llvm/Support/raw_ostream.h" 17 18 using namespace llvm; 19 20 static void findCallees( 21 SmallVectorImpl<Constant *> &Worklist, SmallPtrSetImpl<Constant *> &Visited, 22 SmallVectorImpl<PointerUnion<Function *, LazyCallGraph::Node *>> &Callees, 23 SmallPtrSetImpl<Function *> &CalleeSet) { 24 while (!Worklist.empty()) { 25 Constant *C = Worklist.pop_back_val(); 26 27 if (Function *F = dyn_cast<Function>(C)) { 28 // Note that we consider *any* function with a definition to be a viable 29 // edge. Even if the function's definition is subject to replacement by 30 // some other module (say, a weak definition) there may still be 31 // optimizations which essentially speculate based on the definition and 32 // a way to check that the specific definition is in fact the one being 33 // used. For example, this could be done by moving the weak definition to 34 // a strong (internal) definition and making the weak definition be an 35 // alias. Then a test of the address of the weak function against the new 36 // strong definition's address would be an effective way to determine the 37 // safety of optimizing a direct call edge. 38 if (!F->isDeclaration() && CalleeSet.insert(F)) 39 Callees.push_back(F); 40 continue; 41 } 42 43 for (Value *Op : C->operand_values()) 44 if (Visited.insert(cast<Constant>(Op))) 45 Worklist.push_back(cast<Constant>(Op)); 46 } 47 } 48 49 LazyCallGraph::Node::Node(LazyCallGraph &G, Function &F) : G(G), F(F) { 50 SmallVector<Constant *, 16> Worklist; 51 SmallPtrSet<Constant *, 16> Visited; 52 // Find all the potential callees in this function. First walk the 53 // instructions and add every operand which is a constant to the worklist. 54 for (BasicBlock &BB : F) 55 for (Instruction &I : BB) 56 for (Value *Op : I.operand_values()) 57 if (Constant *C = dyn_cast<Constant>(Op)) 58 if (Visited.insert(C)) 59 Worklist.push_back(C); 60 61 // We've collected all the constant (and thus potentially function or 62 // function containing) operands to all of the instructions in the function. 63 // Process them (recursively) collecting every function found. 64 findCallees(Worklist, Visited, Callees, CalleeSet); 65 } 66 67 LazyCallGraph::Node::Node(LazyCallGraph &G, const Node &OtherN) 68 : G(G), F(OtherN.F), CalleeSet(OtherN.CalleeSet) { 69 // Loop over the other node's callees, adding the Function*s to our list 70 // directly, and recursing to add the Node*s. 71 Callees.reserve(OtherN.Callees.size()); 72 for (const auto &OtherCallee : OtherN.Callees) 73 if (Function *Callee = OtherCallee.dyn_cast<Function *>()) 74 Callees.push_back(Callee); 75 else 76 Callees.push_back(G.copyInto(*OtherCallee.get<Node *>())); 77 } 78 79 LazyCallGraph::Node::Node(LazyCallGraph &G, Node &&OtherN) 80 : G(G), F(OtherN.F), Callees(std::move(OtherN.Callees)), 81 CalleeSet(std::move(OtherN.CalleeSet)) { 82 // Loop over our Callees. They've been moved from another node, but we need 83 // to move the Node*s to live under our bump ptr allocator. 84 for (auto &Callee : Callees) 85 if (Node *ChildN = Callee.dyn_cast<Node *>()) 86 Callee = G.moveInto(std::move(*ChildN)); 87 } 88 89 LazyCallGraph::LazyCallGraph(Module &M) : M(M) { 90 for (Function &F : M) 91 if (!F.isDeclaration() && !F.hasLocalLinkage()) 92 if (EntryNodeSet.insert(&F)) 93 EntryNodes.push_back(&F); 94 95 // Now add entry nodes for functions reachable via initializers to globals. 96 SmallVector<Constant *, 16> Worklist; 97 SmallPtrSet<Constant *, 16> Visited; 98 for (GlobalVariable &GV : M.globals()) 99 if (GV.hasInitializer()) 100 if (Visited.insert(GV.getInitializer())) 101 Worklist.push_back(GV.getInitializer()); 102 103 findCallees(Worklist, Visited, EntryNodes, EntryNodeSet); 104 } 105 106 LazyCallGraph::LazyCallGraph(const LazyCallGraph &G) 107 : M(G.M), EntryNodeSet(G.EntryNodeSet) { 108 EntryNodes.reserve(G.EntryNodes.size()); 109 for (const auto &EntryNode : G.EntryNodes) 110 if (Function *Callee = EntryNode.dyn_cast<Function *>()) 111 EntryNodes.push_back(Callee); 112 else 113 EntryNodes.push_back(copyInto(*EntryNode.get<Node *>())); 114 } 115 116 // FIXME: This would be crazy simpler if BumpPtrAllocator were movable without 117 // invalidating any of the allocated memory. We should make that be the case at 118 // some point and delete this. 119 LazyCallGraph::LazyCallGraph(LazyCallGraph &&G) 120 : M(G.M), EntryNodes(std::move(G.EntryNodes)), 121 EntryNodeSet(std::move(G.EntryNodeSet)) { 122 // Loop over our EntryNodes. They've been moved from another graph, so we 123 // need to move the Node*s to live under our bump ptr allocator. We can just 124 // do this in-place. 125 for (auto &Entry : EntryNodes) 126 if (Node *EntryN = Entry.dyn_cast<Node *>()) 127 Entry = moveInto(std::move(*EntryN)); 128 } 129 130 LazyCallGraph::Node *LazyCallGraph::insertInto(Function &F, Node *&MappedN) { 131 return new (MappedN = BPA.Allocate()) Node(*this, F); 132 } 133 134 LazyCallGraph::Node *LazyCallGraph::copyInto(const Node &OtherN) { 135 Node *&N = NodeMap[&OtherN.F]; 136 if (N) 137 return N; 138 139 return new (N = BPA.Allocate()) Node(*this, OtherN); 140 } 141 142 LazyCallGraph::Node *LazyCallGraph::moveInto(Node &&OtherN) { 143 Node *&N = NodeMap[&OtherN.F]; 144 if (N) 145 return N; 146 147 return new (N = BPA.Allocate()) Node(*this, std::move(OtherN)); 148 } 149 150 char LazyCallGraphAnalysis::PassID; 151 152 LazyCallGraphPrinterPass::LazyCallGraphPrinterPass(raw_ostream &OS) : OS(OS) {} 153 154 static void printNodes(raw_ostream &OS, LazyCallGraph::Node &N, 155 SmallPtrSetImpl<LazyCallGraph::Node *> &Printed) { 156 // Recurse depth first through the nodes. 157 for (LazyCallGraph::Node *ChildN : N) 158 if (Printed.insert(ChildN)) 159 printNodes(OS, *ChildN, Printed); 160 161 OS << " Call edges in function: " << N.getFunction().getName() << "\n"; 162 for (LazyCallGraph::iterator I = N.begin(), E = N.end(); I != E; ++I) 163 OS << " -> " << I->getFunction().getName() << "\n"; 164 165 OS << "\n"; 166 } 167 168 PreservedAnalyses LazyCallGraphPrinterPass::run(Module *M, 169 ModuleAnalysisManager *AM) { 170 LazyCallGraph &G = AM->getResult<LazyCallGraphAnalysis>(M); 171 172 OS << "Printing the call graph for module: " << M->getModuleIdentifier() 173 << "\n\n"; 174 175 SmallPtrSet<LazyCallGraph::Node *, 16> Printed; 176 for (LazyCallGraph::Node *N : G) 177 if (Printed.insert(N)) 178 printNodes(OS, *N, Printed); 179 180 return PreservedAnalyses::all(); 181 } 182