1 //===-- GCStrategy.cpp - Garbage collection infrastructure -----------------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements target- and collector-independent garbage collection 11 // infrastructure. 12 // 13 // GCMachineCodeAnalysis identifies the GC safe points in the machine code. 14 // Roots are identified in SelectionDAGISel. 15 // 16 //===----------------------------------------------------------------------===// 17 18 #include "llvm/CodeGen/GCStrategy.h" 19 #include "llvm/CodeGen/MachineFrameInfo.h" 20 #include "llvm/CodeGen/MachineFunctionPass.h" 21 #include "llvm/CodeGen/MachineInstrBuilder.h" 22 #include "llvm/CodeGen/MachineModuleInfo.h" 23 #include "llvm/CodeGen/Passes.h" 24 #include "llvm/IR/Dominators.h" 25 #include "llvm/IR/IntrinsicInst.h" 26 #include "llvm/IR/Module.h" 27 #include "llvm/Support/Debug.h" 28 #include "llvm/Support/ErrorHandling.h" 29 #include "llvm/Support/raw_ostream.h" 30 #include "llvm/Target/TargetFrameLowering.h" 31 #include "llvm/Target/TargetInstrInfo.h" 32 #include "llvm/Target/TargetMachine.h" 33 #include "llvm/Target/TargetRegisterInfo.h" 34 #include "llvm/Target/TargetSubtargetInfo.h" 35 36 using namespace llvm; 37 38 namespace { 39 40 /// LowerIntrinsics - This pass rewrites calls to the llvm.gcread or 41 /// llvm.gcwrite intrinsics, replacing them with simple loads and stores as 42 /// directed by the GCStrategy. It also performs automatic root initialization 43 /// and custom intrinsic lowering. 44 class LowerIntrinsics : public FunctionPass { 45 static bool NeedsDefaultLoweringPass(const GCStrategy &C); 46 static bool NeedsCustomLoweringPass(const GCStrategy &C); 47 static bool CouldBecomeSafePoint(Instruction *I); 48 bool PerformDefaultLowering(Function &F, GCStrategy &Coll); 49 static bool InsertRootInitializers(Function &F, 50 AllocaInst **Roots, unsigned Count); 51 52 public: 53 static char ID; 54 55 LowerIntrinsics(); 56 const char *getPassName() const override; 57 void getAnalysisUsage(AnalysisUsage &AU) const override; 58 59 bool doInitialization(Module &M) override; 60 bool runOnFunction(Function &F) override; 61 }; 62 63 64 /// GCMachineCodeAnalysis - This is a target-independent pass over the machine 65 /// function representation to identify safe points for the garbage collector 66 /// in the machine code. It inserts labels at safe points and populates a 67 /// GCMetadata record for each function. 68 class GCMachineCodeAnalysis : public MachineFunctionPass { 69 const TargetMachine *TM; 70 GCFunctionInfo *FI; 71 MachineModuleInfo *MMI; 72 const TargetInstrInfo *TII; 73 74 void FindSafePoints(MachineFunction &MF); 75 void VisitCallPoint(MachineBasicBlock::iterator MI); 76 MCSymbol *InsertLabel(MachineBasicBlock &MBB, 77 MachineBasicBlock::iterator MI, 78 DebugLoc DL) const; 79 80 void FindStackOffsets(MachineFunction &MF); 81 82 public: 83 static char ID; 84 85 GCMachineCodeAnalysis(); 86 void getAnalysisUsage(AnalysisUsage &AU) const override; 87 88 bool runOnMachineFunction(MachineFunction &MF) override; 89 }; 90 91 } 92 93 // ----------------------------------------------------------------------------- 94 95 GCStrategy::GCStrategy() : 96 UseStatepoints(false), 97 NeededSafePoints(0), 98 CustomReadBarriers(false), 99 CustomWriteBarriers(false), 100 CustomRoots(false), 101 CustomSafePoints(false), 102 InitRoots(true), 103 UsesMetadata(false) 104 {} 105 106 // ----------------------------------------------------------------------------- 107 108 INITIALIZE_PASS_BEGIN(LowerIntrinsics, "gc-lowering", "GC Lowering", 109 false, false) 110 INITIALIZE_PASS_DEPENDENCY(GCModuleInfo) 111 INITIALIZE_PASS_END(LowerIntrinsics, "gc-lowering", "GC Lowering", false, false) 112 113 FunctionPass *llvm::createGCLoweringPass() { 114 return new LowerIntrinsics(); 115 } 116 117 char LowerIntrinsics::ID = 0; 118 119 LowerIntrinsics::LowerIntrinsics() 120 : FunctionPass(ID) { 121 initializeLowerIntrinsicsPass(*PassRegistry::getPassRegistry()); 122 } 123 124 const char *LowerIntrinsics::getPassName() const { 125 return "Lower Garbage Collection Instructions"; 126 } 127 128 void LowerIntrinsics::getAnalysisUsage(AnalysisUsage &AU) const { 129 FunctionPass::getAnalysisUsage(AU); 130 AU.addRequired<GCModuleInfo>(); 131 AU.addPreserved<DominatorTreeWrapperPass>(); 132 } 133 134 /// doInitialization - If this module uses the GC intrinsics, find them now. 135 bool LowerIntrinsics::doInitialization(Module &M) { 136 // FIXME: This is rather antisocial in the context of a JIT since it performs 137 // work against the entire module. But this cannot be done at 138 // runFunction time (initializeCustomLowering likely needs to change 139 // the module). 140 GCModuleInfo *MI = getAnalysisIfAvailable<GCModuleInfo>(); 141 assert(MI && "LowerIntrinsics didn't require GCModuleInfo!?"); 142 for (Module::iterator I = M.begin(), E = M.end(); I != E; ++I) 143 if (!I->isDeclaration() && I->hasGC()) 144 MI->getFunctionInfo(*I); // Instantiate the GC strategy. 145 146 bool MadeChange = false; 147 for (GCModuleInfo::iterator I = MI->begin(), E = MI->end(); I != E; ++I) 148 if (NeedsCustomLoweringPass(**I)) 149 if ((*I)->initializeCustomLowering(M)) 150 MadeChange = true; 151 152 return MadeChange; 153 } 154 155 bool LowerIntrinsics::InsertRootInitializers(Function &F, AllocaInst **Roots, 156 unsigned Count) { 157 // Scroll past alloca instructions. 158 BasicBlock::iterator IP = F.getEntryBlock().begin(); 159 while (isa<AllocaInst>(IP)) ++IP; 160 161 // Search for initializers in the initial BB. 162 SmallPtrSet<AllocaInst*,16> InitedRoots; 163 for (; !CouldBecomeSafePoint(IP); ++IP) 164 if (StoreInst *SI = dyn_cast<StoreInst>(IP)) 165 if (AllocaInst *AI = 166 dyn_cast<AllocaInst>(SI->getOperand(1)->stripPointerCasts())) 167 InitedRoots.insert(AI); 168 169 // Add root initializers. 170 bool MadeChange = false; 171 172 for (AllocaInst **I = Roots, **E = Roots + Count; I != E; ++I) 173 if (!InitedRoots.count(*I)) { 174 StoreInst* SI = new StoreInst(ConstantPointerNull::get(cast<PointerType>( 175 cast<PointerType>((*I)->getType())->getElementType())), 176 *I); 177 SI->insertAfter(*I); 178 MadeChange = true; 179 } 180 181 return MadeChange; 182 } 183 184 bool LowerIntrinsics::NeedsDefaultLoweringPass(const GCStrategy &C) { 185 // Default lowering is necessary only if read or write barriers have a default 186 // action. The default for roots is no action. 187 return !C.customWriteBarrier() 188 || !C.customReadBarrier() 189 || C.initializeRoots(); 190 } 191 192 bool LowerIntrinsics::NeedsCustomLoweringPass(const GCStrategy &C) { 193 // Custom lowering is only necessary if enabled for some action. 194 return C.customWriteBarrier() 195 || C.customReadBarrier() 196 || C.customRoots(); 197 } 198 199 /// CouldBecomeSafePoint - Predicate to conservatively determine whether the 200 /// instruction could introduce a safe point. 201 bool LowerIntrinsics::CouldBecomeSafePoint(Instruction *I) { 202 // The natural definition of instructions which could introduce safe points 203 // are: 204 // 205 // - call, invoke (AfterCall, BeforeCall) 206 // - phis (Loops) 207 // - invoke, ret, unwind (Exit) 208 // 209 // However, instructions as seemingly inoccuous as arithmetic can become 210 // libcalls upon lowering (e.g., div i64 on a 32-bit platform), so instead 211 // it is necessary to take a conservative approach. 212 213 if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) || 214 isa<StoreInst>(I) || isa<LoadInst>(I)) 215 return false; 216 217 // llvm.gcroot is safe because it doesn't do anything at runtime. 218 if (CallInst *CI = dyn_cast<CallInst>(I)) 219 if (Function *F = CI->getCalledFunction()) 220 if (unsigned IID = F->getIntrinsicID()) 221 if (IID == Intrinsic::gcroot) 222 return false; 223 224 return true; 225 } 226 227 /// runOnFunction - Replace gcread/gcwrite intrinsics with loads and stores. 228 /// Leave gcroot intrinsics; the code generator needs to see those. 229 bool LowerIntrinsics::runOnFunction(Function &F) { 230 // Quick exit for functions that do not use GC. 231 if (!F.hasGC()) 232 return false; 233 234 GCFunctionInfo &FI = getAnalysis<GCModuleInfo>().getFunctionInfo(F); 235 GCStrategy &S = FI.getStrategy(); 236 237 bool MadeChange = false; 238 239 if (NeedsDefaultLoweringPass(S)) 240 MadeChange |= PerformDefaultLowering(F, S); 241 242 bool UseCustomLoweringPass = NeedsCustomLoweringPass(S); 243 if (UseCustomLoweringPass) 244 MadeChange |= S.performCustomLowering(F); 245 246 // Custom lowering may modify the CFG, so dominators must be recomputed. 247 if (UseCustomLoweringPass) { 248 if (DominatorTreeWrapperPass *DTWP = 249 getAnalysisIfAvailable<DominatorTreeWrapperPass>()) 250 DTWP->getDomTree().recalculate(F); 251 } 252 253 return MadeChange; 254 } 255 256 bool LowerIntrinsics::PerformDefaultLowering(Function &F, GCStrategy &S) { 257 bool LowerWr = !S.customWriteBarrier(); 258 bool LowerRd = !S.customReadBarrier(); 259 bool InitRoots = S.initializeRoots(); 260 261 SmallVector<AllocaInst*, 32> Roots; 262 263 bool MadeChange = false; 264 for (Function::iterator BB = F.begin(), E = F.end(); BB != E; ++BB) { 265 for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;) { 266 if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++)) { 267 Function *F = CI->getCalledFunction(); 268 switch (F->getIntrinsicID()) { 269 case Intrinsic::gcwrite: 270 if (LowerWr) { 271 // Replace a write barrier with a simple store. 272 Value *St = new StoreInst(CI->getArgOperand(0), 273 CI->getArgOperand(2), CI); 274 CI->replaceAllUsesWith(St); 275 CI->eraseFromParent(); 276 } 277 break; 278 case Intrinsic::gcread: 279 if (LowerRd) { 280 // Replace a read barrier with a simple load. 281 Value *Ld = new LoadInst(CI->getArgOperand(1), "", CI); 282 Ld->takeName(CI); 283 CI->replaceAllUsesWith(Ld); 284 CI->eraseFromParent(); 285 } 286 break; 287 case Intrinsic::gcroot: 288 if (InitRoots) { 289 // Initialize the GC root, but do not delete the intrinsic. The 290 // backend needs the intrinsic to flag the stack slot. 291 Roots.push_back(cast<AllocaInst>( 292 CI->getArgOperand(0)->stripPointerCasts())); 293 } 294 break; 295 default: 296 continue; 297 } 298 299 MadeChange = true; 300 } 301 } 302 } 303 304 if (Roots.size()) 305 MadeChange |= InsertRootInitializers(F, Roots.begin(), Roots.size()); 306 307 return MadeChange; 308 } 309 310 // ----------------------------------------------------------------------------- 311 312 char GCMachineCodeAnalysis::ID = 0; 313 char &llvm::GCMachineCodeAnalysisID = GCMachineCodeAnalysis::ID; 314 315 INITIALIZE_PASS(GCMachineCodeAnalysis, "gc-analysis", 316 "Analyze Machine Code For Garbage Collection", false, false) 317 318 GCMachineCodeAnalysis::GCMachineCodeAnalysis() 319 : MachineFunctionPass(ID) {} 320 321 void GCMachineCodeAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { 322 MachineFunctionPass::getAnalysisUsage(AU); 323 AU.setPreservesAll(); 324 AU.addRequired<MachineModuleInfo>(); 325 AU.addRequired<GCModuleInfo>(); 326 } 327 328 MCSymbol *GCMachineCodeAnalysis::InsertLabel(MachineBasicBlock &MBB, 329 MachineBasicBlock::iterator MI, 330 DebugLoc DL) const { 331 MCSymbol *Label = MBB.getParent()->getContext().CreateTempSymbol(); 332 BuildMI(MBB, MI, DL, TII->get(TargetOpcode::GC_LABEL)).addSym(Label); 333 return Label; 334 } 335 336 void GCMachineCodeAnalysis::VisitCallPoint(MachineBasicBlock::iterator CI) { 337 // Find the return address (next instruction), too, so as to bracket the call 338 // instruction. 339 MachineBasicBlock::iterator RAI = CI; 340 ++RAI; 341 342 if (FI->getStrategy().needsSafePoint(GC::PreCall)) { 343 MCSymbol* Label = InsertLabel(*CI->getParent(), CI, CI->getDebugLoc()); 344 FI->addSafePoint(GC::PreCall, Label, CI->getDebugLoc()); 345 } 346 347 if (FI->getStrategy().needsSafePoint(GC::PostCall)) { 348 MCSymbol* Label = InsertLabel(*CI->getParent(), RAI, CI->getDebugLoc()); 349 FI->addSafePoint(GC::PostCall, Label, CI->getDebugLoc()); 350 } 351 } 352 353 void GCMachineCodeAnalysis::FindSafePoints(MachineFunction &MF) { 354 for (MachineFunction::iterator BBI = MF.begin(), 355 BBE = MF.end(); BBI != BBE; ++BBI) 356 for (MachineBasicBlock::iterator MI = BBI->begin(), 357 ME = BBI->end(); MI != ME; ++MI) 358 if (MI->isCall()) 359 VisitCallPoint(MI); 360 } 361 362 void GCMachineCodeAnalysis::FindStackOffsets(MachineFunction &MF) { 363 const TargetFrameLowering *TFI = TM->getSubtargetImpl()->getFrameLowering(); 364 assert(TFI && "TargetRegisterInfo not available!"); 365 366 for (GCFunctionInfo::roots_iterator RI = FI->roots_begin(); 367 RI != FI->roots_end();) { 368 // If the root references a dead object, no need to keep it. 369 if (MF.getFrameInfo()->isDeadObjectIndex(RI->Num)) { 370 RI = FI->removeStackRoot(RI); 371 } else { 372 RI->StackOffset = TFI->getFrameIndexOffset(MF, RI->Num); 373 ++RI; 374 } 375 } 376 } 377 378 bool GCMachineCodeAnalysis::runOnMachineFunction(MachineFunction &MF) { 379 // Quick exit for functions that do not use GC. 380 if (!MF.getFunction()->hasGC()) 381 return false; 382 383 FI = &getAnalysis<GCModuleInfo>().getFunctionInfo(*MF.getFunction()); 384 if (!FI->getStrategy().needsSafePoints()) 385 return false; 386 387 TM = &MF.getTarget(); 388 MMI = &getAnalysis<MachineModuleInfo>(); 389 TII = TM->getSubtargetImpl()->getInstrInfo(); 390 391 // Find the size of the stack frame. 392 FI->setFrameSize(MF.getFrameInfo()->getStackSize()); 393 394 // Find all safe points. 395 if (FI->getStrategy().customSafePoints()) { 396 FI->getStrategy().findCustomSafePoints(*FI, MF); 397 } else { 398 FindSafePoints(MF); 399 } 400 401 // Find the stack offsets for all roots. 402 FindStackOffsets(MF); 403 404 return false; 405 } 406