1 //===- TypeMetadataUtils.cpp - Utilities related to type metadata ---------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file contains functions that make it easier to manipulate type metadata 10 // for devirtualization. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "llvm/Analysis/TypeMetadataUtils.h" 15 #include "llvm/IR/Constants.h" 16 #include "llvm/IR/Dominators.h" 17 #include "llvm/IR/Instructions.h" 18 #include "llvm/IR/Intrinsics.h" 19 #include "llvm/IR/Module.h" 20 21 using namespace llvm; 22 23 // Search for virtual calls that call FPtr and add them to DevirtCalls. 24 static void 25 findCallsAtConstantOffset(SmallVectorImpl<DevirtCallSite> &DevirtCalls, 26 bool *HasNonCallUses, Value *FPtr, uint64_t Offset, 27 const CallInst *CI, DominatorTree &DT) { 28 for (const Use &U : FPtr->uses()) { 29 Instruction *User = cast<Instruction>(U.getUser()); 30 // Ignore this instruction if it is not dominated by the type intrinsic 31 // being analyzed. Otherwise we may transform a call sharing the same 32 // vtable pointer incorrectly. Specifically, this situation can arise 33 // after indirect call promotion and inlining, where we may have uses 34 // of the vtable pointer guarded by a function pointer check, and a fallback 35 // indirect call. 36 if (!DT.dominates(CI, User)) 37 continue; 38 if (isa<BitCastInst>(User)) { 39 findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, User, Offset, CI, 40 DT); 41 } else if (auto *CI = dyn_cast<CallInst>(User)) { 42 DevirtCalls.push_back({Offset, *CI}); 43 } else if (auto *II = dyn_cast<InvokeInst>(User)) { 44 DevirtCalls.push_back({Offset, *II}); 45 } else if (HasNonCallUses) { 46 *HasNonCallUses = true; 47 } 48 } 49 } 50 51 // Search for virtual calls that load from VPtr and add them to DevirtCalls. 52 static void findLoadCallsAtConstantOffset( 53 const Module *M, SmallVectorImpl<DevirtCallSite> &DevirtCalls, Value *VPtr, 54 int64_t Offset, const CallInst *CI, DominatorTree &DT) { 55 for (const Use &U : VPtr->uses()) { 56 Value *User = U.getUser(); 57 if (isa<BitCastInst>(User)) { 58 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset, CI, DT); 59 } else if (isa<LoadInst>(User)) { 60 findCallsAtConstantOffset(DevirtCalls, nullptr, User, Offset, CI, DT); 61 } else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) { 62 // Take into account the GEP offset. 63 if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) { 64 SmallVector<Value *, 8> Indices(GEP->op_begin() + 1, GEP->op_end()); 65 int64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType( 66 GEP->getSourceElementType(), Indices); 67 findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset, 68 CI, DT); 69 } 70 } 71 } 72 } 73 74 void llvm::findDevirtualizableCallsForTypeTest( 75 SmallVectorImpl<DevirtCallSite> &DevirtCalls, 76 SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI, 77 DominatorTree &DT) { 78 assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test); 79 80 const Module *M = CI->getParent()->getParent()->getParent(); 81 82 // Find llvm.assume intrinsics for this llvm.type.test call. 83 for (const Use &CIU : CI->uses()) { 84 if (auto *AssumeCI = dyn_cast<CallInst>(CIU.getUser())) { 85 Function *F = AssumeCI->getCalledFunction(); 86 if (F && F->getIntrinsicID() == Intrinsic::assume) 87 Assumes.push_back(AssumeCI); 88 } 89 } 90 91 // If we found any, search for virtual calls based on %p and add them to 92 // DevirtCalls. 93 if (!Assumes.empty()) 94 findLoadCallsAtConstantOffset( 95 M, DevirtCalls, CI->getArgOperand(0)->stripPointerCasts(), 0, CI, DT); 96 } 97 98 void llvm::findDevirtualizableCallsForTypeCheckedLoad( 99 SmallVectorImpl<DevirtCallSite> &DevirtCalls, 100 SmallVectorImpl<Instruction *> &LoadedPtrs, 101 SmallVectorImpl<Instruction *> &Preds, bool &HasNonCallUses, 102 const CallInst *CI, DominatorTree &DT) { 103 assert(CI->getCalledFunction()->getIntrinsicID() == 104 Intrinsic::type_checked_load); 105 106 auto *Offset = dyn_cast<ConstantInt>(CI->getArgOperand(1)); 107 if (!Offset) { 108 HasNonCallUses = true; 109 return; 110 } 111 112 for (const Use &U : CI->uses()) { 113 auto CIU = U.getUser(); 114 if (auto EVI = dyn_cast<ExtractValueInst>(CIU)) { 115 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 0) { 116 LoadedPtrs.push_back(EVI); 117 continue; 118 } 119 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 1) { 120 Preds.push_back(EVI); 121 continue; 122 } 123 } 124 HasNonCallUses = true; 125 } 126 127 for (Value *LoadedPtr : LoadedPtrs) 128 findCallsAtConstantOffset(DevirtCalls, &HasNonCallUses, LoadedPtr, 129 Offset->getZExtValue(), CI, DT); 130 } 131 132 Constant *llvm::getPointerAtOffset(Constant *I, uint64_t Offset, Module &M) { 133 if (I->getType()->isPointerTy()) { 134 if (Offset == 0) 135 return I; 136 return nullptr; 137 } 138 139 const DataLayout &DL = M.getDataLayout(); 140 141 if (auto *C = dyn_cast<ConstantStruct>(I)) { 142 const StructLayout *SL = DL.getStructLayout(C->getType()); 143 if (Offset >= SL->getSizeInBytes()) 144 return nullptr; 145 146 unsigned Op = SL->getElementContainingOffset(Offset); 147 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)), 148 Offset - SL->getElementOffset(Op), M); 149 } 150 if (auto *C = dyn_cast<ConstantArray>(I)) { 151 ArrayType *VTableTy = C->getType(); 152 uint64_t ElemSize = DL.getTypeAllocSize(VTableTy->getElementType()); 153 154 unsigned Op = Offset / ElemSize; 155 if (Op >= C->getNumOperands()) 156 return nullptr; 157 158 return getPointerAtOffset(cast<Constant>(I->getOperand(Op)), 159 Offset % ElemSize, M); 160 } 161 return nullptr; 162 } 163