1 //===-- GCRootLowering.cpp - Garbage collection infrastructure ------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements the lowering for the gc.root mechanism. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/CodeGen/GCMetadata.h" 14 #include "llvm/CodeGen/GCStrategy.h" 15 #include "llvm/CodeGen/MachineFrameInfo.h" 16 #include "llvm/CodeGen/MachineFunctionPass.h" 17 #include "llvm/CodeGen/MachineInstrBuilder.h" 18 #include "llvm/CodeGen/MachineModuleInfo.h" 19 #include "llvm/CodeGen/Passes.h" 20 #include "llvm/CodeGen/TargetFrameLowering.h" 21 #include "llvm/CodeGen/TargetInstrInfo.h" 22 #include "llvm/CodeGen/TargetRegisterInfo.h" 23 #include "llvm/CodeGen/TargetSubtargetInfo.h" 24 #include "llvm/IR/Dominators.h" 25 #include "llvm/IR/IntrinsicInst.h" 26 #include "llvm/IR/Module.h" 27 #include "llvm/InitializePasses.h" 28 #include "llvm/Support/Debug.h" 29 #include "llvm/Support/ErrorHandling.h" 30 #include "llvm/Support/raw_ostream.h" 31 32 using namespace llvm; 33 34 namespace { 35 36 /// LowerIntrinsics - This pass rewrites calls to the llvm.gcread or 37 /// llvm.gcwrite intrinsics, replacing them with simple loads and stores as 38 /// directed by the GCStrategy. It also performs automatic root initialization 39 /// and custom intrinsic lowering. 40 class LowerIntrinsics : public FunctionPass { 41 bool DoLowering(Function &F, GCStrategy &S); 42 43 public: 44 static char ID; 45 46 LowerIntrinsics(); 47 StringRef getPassName() const override; 48 void getAnalysisUsage(AnalysisUsage &AU) const override; 49 50 bool doInitialization(Module &M) override; 51 bool runOnFunction(Function &F) override; 52 }; 53 54 /// GCMachineCodeAnalysis - This is a target-independent pass over the machine 55 /// function representation to identify safe points for the garbage collector 56 /// in the machine code. It inserts labels at safe points and populates a 57 /// GCMetadata record for each function. 58 class GCMachineCodeAnalysis : public MachineFunctionPass { 59 GCFunctionInfo *FI; 60 MachineModuleInfo *MMI; 61 const TargetInstrInfo *TII; 62 63 void FindSafePoints(MachineFunction &MF); 64 void VisitCallPoint(MachineBasicBlock::iterator CI); 65 MCSymbol *InsertLabel(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, 66 const DebugLoc &DL) const; 67 68 void FindStackOffsets(MachineFunction &MF); 69 70 public: 71 static char ID; 72 73 GCMachineCodeAnalysis(); 74 void getAnalysisUsage(AnalysisUsage &AU) const override; 75 76 bool runOnMachineFunction(MachineFunction &MF) override; 77 }; 78 } 79 80 // ----------------------------------------------------------------------------- 81 82 INITIALIZE_PASS_BEGIN(LowerIntrinsics, "gc-lowering", "GC Lowering", false, 83 false) 84 INITIALIZE_PASS_DEPENDENCY(GCModuleInfo) 85 INITIALIZE_PASS_END(LowerIntrinsics, "gc-lowering", "GC Lowering", false, false) 86 87 FunctionPass *llvm::createGCLoweringPass() { return new LowerIntrinsics(); } 88 89 char LowerIntrinsics::ID = 0; 90 91 LowerIntrinsics::LowerIntrinsics() : FunctionPass(ID) { 92 initializeLowerIntrinsicsPass(*PassRegistry::getPassRegistry()); 93 } 94 95 StringRef LowerIntrinsics::getPassName() const { 96 return "Lower Garbage Collection Instructions"; 97 } 98 99 void LowerIntrinsics::getAnalysisUsage(AnalysisUsage &AU) const { 100 FunctionPass::getAnalysisUsage(AU); 101 AU.addRequired<GCModuleInfo>(); 102 AU.addPreserved<DominatorTreeWrapperPass>(); 103 } 104 105 /// doInitialization - If this module uses the GC intrinsics, find them now. 106 bool LowerIntrinsics::doInitialization(Module &M) { 107 GCModuleInfo *MI = getAnalysisIfAvailable<GCModuleInfo>(); 108 assert(MI && "LowerIntrinsics didn't require GCModuleInfo!?"); 109 for (Module::iterator I = M.begin(), E = M.end(); I != E; ++I) 110 if (!I->isDeclaration() && I->hasGC()) 111 MI->getFunctionInfo(*I); // Instantiate the GC strategy. 112 113 return false; 114 } 115 116 /// CouldBecomeSafePoint - Predicate to conservatively determine whether the 117 /// instruction could introduce a safe point. 118 static bool CouldBecomeSafePoint(Instruction *I) { 119 // The natural definition of instructions which could introduce safe points 120 // are: 121 // 122 // - call, invoke (AfterCall, BeforeCall) 123 // - phis (Loops) 124 // - invoke, ret, unwind (Exit) 125 // 126 // However, instructions as seemingly inoccuous as arithmetic can become 127 // libcalls upon lowering (e.g., div i64 on a 32-bit platform), so instead 128 // it is necessary to take a conservative approach. 129 130 if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) || isa<StoreInst>(I) || 131 isa<LoadInst>(I)) 132 return false; 133 134 // llvm.gcroot is safe because it doesn't do anything at runtime. 135 if (CallInst *CI = dyn_cast<CallInst>(I)) 136 if (Function *F = CI->getCalledFunction()) 137 if (Intrinsic::ID IID = F->getIntrinsicID()) 138 if (IID == Intrinsic::gcroot) 139 return false; 140 141 return true; 142 } 143 144 static bool InsertRootInitializers(Function &F, ArrayRef<AllocaInst *> Roots) { 145 // Scroll past alloca instructions. 146 BasicBlock::iterator IP = F.getEntryBlock().begin(); 147 while (isa<AllocaInst>(IP)) 148 ++IP; 149 150 // Search for initializers in the initial BB. 151 SmallPtrSet<AllocaInst *, 16> InitedRoots; 152 for (; !CouldBecomeSafePoint(&*IP); ++IP) 153 if (StoreInst *SI = dyn_cast<StoreInst>(IP)) 154 if (AllocaInst *AI = 155 dyn_cast<AllocaInst>(SI->getOperand(1)->stripPointerCasts())) 156 InitedRoots.insert(AI); 157 158 // Add root initializers. 159 bool MadeChange = false; 160 161 for (AllocaInst *Root : Roots) 162 if (!InitedRoots.count(Root)) { 163 new StoreInst( 164 ConstantPointerNull::get(cast<PointerType>(Root->getAllocatedType())), 165 Root, Root->getNextNode()); 166 MadeChange = true; 167 } 168 169 return MadeChange; 170 } 171 172 /// runOnFunction - Replace gcread/gcwrite intrinsics with loads and stores. 173 /// Leave gcroot intrinsics; the code generator needs to see those. 174 bool LowerIntrinsics::runOnFunction(Function &F) { 175 // Quick exit for functions that do not use GC. 176 if (!F.hasGC()) 177 return false; 178 179 GCFunctionInfo &FI = getAnalysis<GCModuleInfo>().getFunctionInfo(F); 180 GCStrategy &S = FI.getStrategy(); 181 182 return DoLowering(F, S); 183 } 184 185 /// Lower barriers out of existance (if the associated GCStrategy hasn't 186 /// already done so...), and insert initializing stores to roots as a defensive 187 /// measure. Given we're going to report all roots live at all safepoints, we 188 /// need to be able to ensure each root has been initialized by the point the 189 /// first safepoint is reached. This really should have been done by the 190 /// frontend, but the old API made this non-obvious, so we do a potentially 191 /// redundant store just in case. 192 bool LowerIntrinsics::DoLowering(Function &F, GCStrategy &S) { 193 SmallVector<AllocaInst *, 32> Roots; 194 195 bool MadeChange = false; 196 for (BasicBlock &BB : F) 197 for (BasicBlock::iterator II = BB.begin(), E = BB.end(); II != E;) { 198 IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++); 199 if (!CI) 200 continue; 201 202 Function *F = CI->getCalledFunction(); 203 switch (F->getIntrinsicID()) { 204 default: break; 205 case Intrinsic::gcwrite: { 206 // Replace a write barrier with a simple store. 207 Value *St = new StoreInst(CI->getArgOperand(0), 208 CI->getArgOperand(2), CI); 209 CI->replaceAllUsesWith(St); 210 CI->eraseFromParent(); 211 MadeChange = true; 212 break; 213 } 214 case Intrinsic::gcread: { 215 // Replace a read barrier with a simple load. 216 Value *Ld = new LoadInst(CI->getType(), CI->getArgOperand(1), "", CI); 217 Ld->takeName(CI); 218 CI->replaceAllUsesWith(Ld); 219 CI->eraseFromParent(); 220 MadeChange = true; 221 break; 222 } 223 case Intrinsic::gcroot: { 224 // Initialize the GC root, but do not delete the intrinsic. The 225 // backend needs the intrinsic to flag the stack slot. 226 Roots.push_back( 227 cast<AllocaInst>(CI->getArgOperand(0)->stripPointerCasts())); 228 break; 229 } 230 } 231 } 232 233 if (Roots.size()) 234 MadeChange |= InsertRootInitializers(F, Roots); 235 236 return MadeChange; 237 } 238 239 // ----------------------------------------------------------------------------- 240 241 char GCMachineCodeAnalysis::ID = 0; 242 char &llvm::GCMachineCodeAnalysisID = GCMachineCodeAnalysis::ID; 243 244 INITIALIZE_PASS(GCMachineCodeAnalysis, "gc-analysis", 245 "Analyze Machine Code For Garbage Collection", false, false) 246 247 GCMachineCodeAnalysis::GCMachineCodeAnalysis() : MachineFunctionPass(ID) {} 248 249 void GCMachineCodeAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { 250 MachineFunctionPass::getAnalysisUsage(AU); 251 AU.setPreservesAll(); 252 AU.addRequired<MachineModuleInfoWrapperPass>(); 253 AU.addRequired<GCModuleInfo>(); 254 } 255 256 MCSymbol *GCMachineCodeAnalysis::InsertLabel(MachineBasicBlock &MBB, 257 MachineBasicBlock::iterator MI, 258 const DebugLoc &DL) const { 259 MCSymbol *Label = MBB.getParent()->getContext().createTempSymbol(); 260 BuildMI(MBB, MI, DL, TII->get(TargetOpcode::GC_LABEL)).addSym(Label); 261 return Label; 262 } 263 264 void GCMachineCodeAnalysis::VisitCallPoint(MachineBasicBlock::iterator CI) { 265 // Find the return address (next instruction), since that's what will be on 266 // the stack when the call is suspended and we need to inspect the stack. 267 MachineBasicBlock::iterator RAI = CI; 268 ++RAI; 269 270 MCSymbol *Label = InsertLabel(*CI->getParent(), RAI, CI->getDebugLoc()); 271 FI->addSafePoint(Label, CI->getDebugLoc()); 272 } 273 274 void GCMachineCodeAnalysis::FindSafePoints(MachineFunction &MF) { 275 for (MachineBasicBlock &MBB : MF) 276 for (MachineBasicBlock::iterator MI = MBB.begin(), ME = MBB.end(); 277 MI != ME; ++MI) 278 if (MI->isCall()) { 279 // Do not treat tail or sibling call sites as safe points. This is 280 // legal since any arguments passed to the callee which live in the 281 // remnants of the callers frame will be owned and updated by the 282 // callee if required. 283 if (MI->isTerminator()) 284 continue; 285 VisitCallPoint(MI); 286 } 287 } 288 289 void GCMachineCodeAnalysis::FindStackOffsets(MachineFunction &MF) { 290 const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering(); 291 assert(TFI && "TargetRegisterInfo not available!"); 292 293 for (GCFunctionInfo::roots_iterator RI = FI->roots_begin(); 294 RI != FI->roots_end();) { 295 // If the root references a dead object, no need to keep it. 296 if (MF.getFrameInfo().isDeadObjectIndex(RI->Num)) { 297 RI = FI->removeStackRoot(RI); 298 } else { 299 Register FrameReg; // FIXME: surely GCRoot ought to store the 300 // register that the offset is from? 301 RI->StackOffset = TFI->getFrameIndexReference(MF, RI->Num, FrameReg); 302 ++RI; 303 } 304 } 305 } 306 307 bool GCMachineCodeAnalysis::runOnMachineFunction(MachineFunction &MF) { 308 // Quick exit for functions that do not use GC. 309 if (!MF.getFunction().hasGC()) 310 return false; 311 312 FI = &getAnalysis<GCModuleInfo>().getFunctionInfo(MF.getFunction()); 313 MMI = &getAnalysis<MachineModuleInfoWrapperPass>().getMMI(); 314 TII = MF.getSubtarget().getInstrInfo(); 315 316 // Find the size of the stack frame. There may be no correct static frame 317 // size, we use UINT64_MAX to represent this. 318 const MachineFrameInfo &MFI = MF.getFrameInfo(); 319 const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo(); 320 const bool DynamicFrameSize = MFI.hasVarSizedObjects() || 321 RegInfo->needsStackRealignment(MF); 322 FI->setFrameSize(DynamicFrameSize ? UINT64_MAX : MFI.getStackSize()); 323 324 // Find all safe points. 325 if (FI->getStrategy().needsSafePoints()) 326 FindSafePoints(MF); 327 328 // Find the concrete stack offsets for all roots (stack slots) 329 FindStackOffsets(MF); 330 331 return false; 332 } 333