1 //===- AssumptionCache.cpp - Cache finding @llvm.assume calls -------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file contains a pass that keeps track of @llvm.assume intrinsics in 10 // the functions of a module. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "llvm/Analysis/AssumeBundleQueries.h" 15 #include "llvm/Analysis/AssumptionCache.h" 16 #include "llvm/ADT/STLExtras.h" 17 #include "llvm/ADT/SmallPtrSet.h" 18 #include "llvm/ADT/SmallVector.h" 19 #include "llvm/IR/BasicBlock.h" 20 #include "llvm/IR/Function.h" 21 #include "llvm/IR/InstrTypes.h" 22 #include "llvm/IR/Instruction.h" 23 #include "llvm/IR/Instructions.h" 24 #include "llvm/IR/Intrinsics.h" 25 #include "llvm/IR/PassManager.h" 26 #include "llvm/IR/PatternMatch.h" 27 #include "llvm/InitializePasses.h" 28 #include "llvm/Pass.h" 29 #include "llvm/Support/Casting.h" 30 #include "llvm/Support/CommandLine.h" 31 #include "llvm/Support/ErrorHandling.h" 32 #include "llvm/Support/raw_ostream.h" 33 #include <algorithm> 34 #include <cassert> 35 #include <utility> 36 37 using namespace llvm; 38 using namespace llvm::PatternMatch; 39 40 static cl::opt<bool> 41 VerifyAssumptionCache("verify-assumption-cache", cl::Hidden, 42 cl::desc("Enable verification of assumption cache"), 43 cl::init(false)); 44 45 SmallVector<AssumptionCache::ResultElem, 1> & 46 AssumptionCache::getOrInsertAffectedValues(Value *V) { 47 // Try using find_as first to avoid creating extra value handles just for the 48 // purpose of doing the lookup. 49 auto AVI = AffectedValues.find_as(V); 50 if (AVI != AffectedValues.end()) 51 return AVI->second; 52 53 auto AVIP = AffectedValues.insert( 54 {AffectedValueCallbackVH(V, this), SmallVector<ResultElem, 1>()}); 55 return AVIP.first->second; 56 } 57 58 static void 59 findAffectedValues(CallInst *CI, 60 SmallVectorImpl<AssumptionCache::ResultElem> &Affected) { 61 // Note: This code must be kept in-sync with the code in 62 // computeKnownBitsFromAssume in ValueTracking. 63 64 auto AddAffected = [&Affected](Value *V, unsigned Idx = 65 AssumptionCache::ExprResultIdx) { 66 if (isa<Argument>(V)) { 67 Affected.push_back({V, Idx}); 68 } else if (auto *I = dyn_cast<Instruction>(V)) { 69 Affected.push_back({I, Idx}); 70 71 // Peek through unary operators to find the source of the condition. 72 Value *Op; 73 if (match(I, m_BitCast(m_Value(Op))) || 74 match(I, m_PtrToInt(m_Value(Op))) || match(I, m_Not(m_Value(Op)))) { 75 if (isa<Instruction>(Op) || isa<Argument>(Op)) 76 Affected.push_back({Op, Idx}); 77 } 78 } 79 }; 80 81 for (unsigned Idx = 0; Idx != CI->getNumOperandBundles(); Idx++) { 82 if (CI->getOperandBundleAt(Idx).Inputs.size() > ABA_WasOn && 83 CI->getOperandBundleAt(Idx).getTagName() != IgnoreBundleTag) 84 AddAffected(CI->getOperandBundleAt(Idx).Inputs[ABA_WasOn], Idx); 85 } 86 87 Value *Cond = CI->getArgOperand(0), *A, *B; 88 AddAffected(Cond); 89 90 CmpInst::Predicate Pred; 91 if (match(Cond, m_ICmp(Pred, m_Value(A), m_Value(B)))) { 92 AddAffected(A); 93 AddAffected(B); 94 95 if (Pred == ICmpInst::ICMP_EQ) { 96 // For equality comparisons, we handle the case of bit inversion. 97 auto AddAffectedFromEq = [&AddAffected](Value *V) { 98 Value *A; 99 if (match(V, m_Not(m_Value(A)))) { 100 AddAffected(A); 101 V = A; 102 } 103 104 Value *B; 105 // (A & B) or (A | B) or (A ^ B). 106 if (match(V, m_BitwiseLogic(m_Value(A), m_Value(B)))) { 107 AddAffected(A); 108 AddAffected(B); 109 // (A << C) or (A >>_s C) or (A >>_u C) where C is some constant. 110 } else if (match(V, m_Shift(m_Value(A), m_ConstantInt()))) { 111 AddAffected(A); 112 } 113 }; 114 115 AddAffectedFromEq(A); 116 AddAffectedFromEq(B); 117 } 118 119 Value *X; 120 // Handle (A + C1) u< C2, which is the canonical form of A > C3 && A < C4, 121 // and recognized by LVI at least. 122 if (Pred == ICmpInst::ICMP_ULT && 123 match(A, m_Add(m_Value(X), m_ConstantInt())) && 124 match(B, m_ConstantInt())) 125 AddAffected(X); 126 } 127 } 128 129 void AssumptionCache::updateAffectedValues(CallInst *CI) { 130 SmallVector<AssumptionCache::ResultElem, 16> Affected; 131 findAffectedValues(CI, Affected); 132 133 for (auto &AV : Affected) { 134 auto &AVV = getOrInsertAffectedValues(AV.Assume); 135 if (std::find_if(AVV.begin(), AVV.end(), [&](ResultElem &Elem) { 136 return Elem.Assume == CI && Elem.Index == AV.Index; 137 }) == AVV.end()) 138 AVV.push_back({CI, AV.Index}); 139 } 140 } 141 142 void AssumptionCache::unregisterAssumption(CallInst *CI) { 143 SmallVector<AssumptionCache::ResultElem, 16> Affected; 144 findAffectedValues(CI, Affected); 145 146 for (auto &AV : Affected) { 147 auto AVI = AffectedValues.find_as(AV.Assume); 148 if (AVI == AffectedValues.end()) 149 continue; 150 bool Found = false; 151 bool HasNonnull = false; 152 for (ResultElem &Elem : AVI->second) { 153 if (Elem.Assume == CI) { 154 Found = true; 155 Elem.Assume = nullptr; 156 } 157 HasNonnull |= !!Elem.Assume; 158 if (HasNonnull && Found) 159 break; 160 } 161 assert(Found && "already unregistered or incorrect cache state"); 162 if (!HasNonnull) 163 AffectedValues.erase(AVI); 164 } 165 166 erase_value(AssumeHandles, CI); 167 } 168 169 void AssumptionCache::AffectedValueCallbackVH::deleted() { 170 auto AVI = AC->AffectedValues.find(getValPtr()); 171 if (AVI != AC->AffectedValues.end()) 172 AC->AffectedValues.erase(AVI); 173 // 'this' now dangles! 174 } 175 176 void AssumptionCache::transferAffectedValuesInCache(Value *OV, Value *NV) { 177 auto &NAVV = getOrInsertAffectedValues(NV); 178 auto AVI = AffectedValues.find(OV); 179 if (AVI == AffectedValues.end()) 180 return; 181 182 for (auto &A : AVI->second) 183 if (!llvm::is_contained(NAVV, A)) 184 NAVV.push_back(A); 185 AffectedValues.erase(OV); 186 } 187 188 void AssumptionCache::AffectedValueCallbackVH::allUsesReplacedWith(Value *NV) { 189 if (!isa<Instruction>(NV) && !isa<Argument>(NV)) 190 return; 191 192 // Any assumptions that affected this value now affect the new value. 193 194 AC->transferAffectedValuesInCache(getValPtr(), NV); 195 // 'this' now might dangle! If the AffectedValues map was resized to add an 196 // entry for NV then this object might have been destroyed in favor of some 197 // copy in the grown map. 198 } 199 200 void AssumptionCache::scanFunction() { 201 assert(!Scanned && "Tried to scan the function twice!"); 202 assert(AssumeHandles.empty() && "Already have assumes when scanning!"); 203 204 // Go through all instructions in all blocks, add all calls to @llvm.assume 205 // to this cache. 206 for (BasicBlock &B : F) 207 for (Instruction &II : B) 208 if (match(&II, m_Intrinsic<Intrinsic::assume>())) 209 AssumeHandles.push_back({&II, ExprResultIdx}); 210 211 // Mark the scan as complete. 212 Scanned = true; 213 214 // Update affected values. 215 for (auto &A : AssumeHandles) 216 updateAffectedValues(cast<CallInst>(A)); 217 } 218 219 void AssumptionCache::registerAssumption(CallInst *CI) { 220 assert(match(CI, m_Intrinsic<Intrinsic::assume>()) && 221 "Registered call does not call @llvm.assume"); 222 223 // If we haven't scanned the function yet, just drop this assumption. It will 224 // be found when we scan later. 225 if (!Scanned) 226 return; 227 228 AssumeHandles.push_back({CI, ExprResultIdx}); 229 230 #ifndef NDEBUG 231 assert(CI->getParent() && 232 "Cannot register @llvm.assume call not in a basic block"); 233 assert(&F == CI->getParent()->getParent() && 234 "Cannot register @llvm.assume call not in this function"); 235 236 // We expect the number of assumptions to be small, so in an asserts build 237 // check that we don't accumulate duplicates and that all assumptions point 238 // to the same function. 239 SmallPtrSet<Value *, 16> AssumptionSet; 240 for (auto &VH : AssumeHandles) { 241 if (!VH) 242 continue; 243 244 assert(&F == cast<Instruction>(VH)->getParent()->getParent() && 245 "Cached assumption not inside this function!"); 246 assert(match(cast<CallInst>(VH), m_Intrinsic<Intrinsic::assume>()) && 247 "Cached something other than a call to @llvm.assume!"); 248 assert(AssumptionSet.insert(VH).second && 249 "Cache contains multiple copies of a call!"); 250 } 251 #endif 252 253 updateAffectedValues(CI); 254 } 255 256 AnalysisKey AssumptionAnalysis::Key; 257 258 PreservedAnalyses AssumptionPrinterPass::run(Function &F, 259 FunctionAnalysisManager &AM) { 260 AssumptionCache &AC = AM.getResult<AssumptionAnalysis>(F); 261 262 OS << "Cached assumptions for function: " << F.getName() << "\n"; 263 for (auto &VH : AC.assumptions()) 264 if (VH) 265 OS << " " << *cast<CallInst>(VH)->getArgOperand(0) << "\n"; 266 267 return PreservedAnalyses::all(); 268 } 269 270 void AssumptionCacheTracker::FunctionCallbackVH::deleted() { 271 auto I = ACT->AssumptionCaches.find_as(cast<Function>(getValPtr())); 272 if (I != ACT->AssumptionCaches.end()) 273 ACT->AssumptionCaches.erase(I); 274 // 'this' now dangles! 275 } 276 277 AssumptionCache &AssumptionCacheTracker::getAssumptionCache(Function &F) { 278 // We probe the function map twice to try and avoid creating a value handle 279 // around the function in common cases. This makes insertion a bit slower, 280 // but if we have to insert we're going to scan the whole function so that 281 // shouldn't matter. 282 auto I = AssumptionCaches.find_as(&F); 283 if (I != AssumptionCaches.end()) 284 return *I->second; 285 286 // Ok, build a new cache by scanning the function, insert it and the value 287 // handle into our map, and return the newly populated cache. 288 auto IP = AssumptionCaches.insert(std::make_pair( 289 FunctionCallbackVH(&F, this), std::make_unique<AssumptionCache>(F))); 290 assert(IP.second && "Scanning function already in the map?"); 291 return *IP.first->second; 292 } 293 294 AssumptionCache *AssumptionCacheTracker::lookupAssumptionCache(Function &F) { 295 auto I = AssumptionCaches.find_as(&F); 296 if (I != AssumptionCaches.end()) 297 return I->second.get(); 298 return nullptr; 299 } 300 301 void AssumptionCacheTracker::verifyAnalysis() const { 302 // FIXME: In the long term the verifier should not be controllable with a 303 // flag. We should either fix all passes to correctly update the assumption 304 // cache and enable the verifier unconditionally or somehow arrange for the 305 // assumption list to be updated automatically by passes. 306 if (!VerifyAssumptionCache) 307 return; 308 309 SmallPtrSet<const CallInst *, 4> AssumptionSet; 310 for (const auto &I : AssumptionCaches) { 311 for (auto &VH : I.second->assumptions()) 312 if (VH) 313 AssumptionSet.insert(cast<CallInst>(VH)); 314 315 for (const BasicBlock &B : cast<Function>(*I.first)) 316 for (const Instruction &II : B) 317 if (match(&II, m_Intrinsic<Intrinsic::assume>()) && 318 !AssumptionSet.count(cast<CallInst>(&II))) 319 report_fatal_error("Assumption in scanned function not in cache"); 320 } 321 } 322 323 AssumptionCacheTracker::AssumptionCacheTracker() : ImmutablePass(ID) { 324 initializeAssumptionCacheTrackerPass(*PassRegistry::getPassRegistry()); 325 } 326 327 AssumptionCacheTracker::~AssumptionCacheTracker() = default; 328 329 char AssumptionCacheTracker::ID = 0; 330 331 INITIALIZE_PASS(AssumptionCacheTracker, "assumption-cache-tracker", 332 "Assumption Cache Tracker", false, true) 333