1 //===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This pass implements whole program optimization of virtual calls in cases 11 // where we know (via bitset information) that the list of callee is fixed. This 12 // includes the following: 13 // - Single implementation devirtualization: if a virtual call has a single 14 // possible callee, replace all calls with a direct call to that callee. 15 // - Virtual constant propagation: if the virtual function's return type is an 16 // integer <=64 bits and all possible callees are readnone, for each class and 17 // each list of constant arguments: evaluate the function, store the return 18 // value alongside the virtual table, and rewrite each virtual call as a load 19 // from the virtual table. 20 // - Uniform return value optimization: if the conditions for virtual constant 21 // propagation hold and each function returns the same constant value, replace 22 // each virtual call with that constant. 23 // - Unique return value optimization for i1 return values: if the conditions 24 // for virtual constant propagation hold and a single vtable's function 25 // returns 0, or a single vtable's function returns 1, replace each virtual 26 // call with a comparison of the vptr against that vtable's address. 27 // 28 //===----------------------------------------------------------------------===// 29 30 #include "llvm/Transforms/IPO/WholeProgramDevirt.h" 31 #include "llvm/ADT/ArrayRef.h" 32 #include "llvm/ADT/DenseSet.h" 33 #include "llvm/ADT/MapVector.h" 34 #include "llvm/Analysis/BitSetUtils.h" 35 #include "llvm/IR/CallSite.h" 36 #include "llvm/IR/Constants.h" 37 #include "llvm/IR/DataLayout.h" 38 #include "llvm/IR/IRBuilder.h" 39 #include "llvm/IR/Instructions.h" 40 #include "llvm/IR/Intrinsics.h" 41 #include "llvm/IR/Module.h" 42 #include "llvm/Pass.h" 43 #include "llvm/Support/raw_ostream.h" 44 #include "llvm/Transforms/IPO.h" 45 #include "llvm/Transforms/Utils/Evaluator.h" 46 #include "llvm/Transforms/Utils/Local.h" 47 48 #include <set> 49 50 using namespace llvm; 51 using namespace wholeprogramdevirt; 52 53 #define DEBUG_TYPE "wholeprogramdevirt" 54 55 // Find the minimum offset that we may store a value of size Size bits at. If 56 // IsAfter is set, look for an offset before the object, otherwise look for an 57 // offset after the object. 58 uint64_t 59 wholeprogramdevirt::findLowestOffset(ArrayRef<VirtualCallTarget> Targets, 60 bool IsAfter, uint64_t Size) { 61 // Find a minimum offset taking into account only vtable sizes. 62 uint64_t MinByte = 0; 63 for (const VirtualCallTarget &Target : Targets) { 64 if (IsAfter) 65 MinByte = std::max(MinByte, Target.minAfterBytes()); 66 else 67 MinByte = std::max(MinByte, Target.minBeforeBytes()); 68 } 69 70 // Build a vector of arrays of bytes covering, for each target, a slice of the 71 // used region (see AccumBitVector::BytesUsed in 72 // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively, 73 // this aligns the used regions to start at MinByte. 74 // 75 // In this example, A, B and C are vtables, # is a byte already allocated for 76 // a virtual function pointer, AAAA... (etc.) are the used regions for the 77 // vtables and Offset(X) is the value computed for the Offset variable below 78 // for X. 79 // 80 // Offset(A) 81 // | | 82 // |MinByte 83 // A: ################AAAAAAAA|AAAAAAAA 84 // B: ########BBBBBBBBBBBBBBBB|BBBB 85 // C: ########################|CCCCCCCCCCCCCCCC 86 // | Offset(B) | 87 // 88 // This code produces the slices of A, B and C that appear after the divider 89 // at MinByte. 90 std::vector<ArrayRef<uint8_t>> Used; 91 for (const VirtualCallTarget &Target : Targets) { 92 ArrayRef<uint8_t> VTUsed = IsAfter ? Target.BS->Bits->After.BytesUsed 93 : Target.BS->Bits->Before.BytesUsed; 94 uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes() 95 : MinByte - Target.minBeforeBytes(); 96 97 // Disregard used regions that are smaller than Offset. These are 98 // effectively all-free regions that do not need to be checked. 99 if (VTUsed.size() > Offset) 100 Used.push_back(VTUsed.slice(Offset)); 101 } 102 103 if (Size == 1) { 104 // Find a free bit in each member of Used. 105 for (unsigned I = 0;; ++I) { 106 uint8_t BitsUsed = 0; 107 for (auto &&B : Used) 108 if (I < B.size()) 109 BitsUsed |= B[I]; 110 if (BitsUsed != 0xff) 111 return (MinByte + I) * 8 + 112 countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined); 113 } 114 } else { 115 // Find a free (Size/8) byte region in each member of Used. 116 // FIXME: see if alignment helps. 117 for (unsigned I = 0;; ++I) { 118 for (auto &&B : Used) { 119 unsigned Byte = 0; 120 while ((I + Byte) < B.size() && Byte < (Size / 8)) { 121 if (B[I + Byte]) 122 goto NextI; 123 ++Byte; 124 } 125 } 126 return (MinByte + I) * 8; 127 NextI:; 128 } 129 } 130 } 131 132 void wholeprogramdevirt::setBeforeReturnValues( 133 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore, 134 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) { 135 if (BitWidth == 1) 136 OffsetByte = -(AllocBefore / 8 + 1); 137 else 138 OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8); 139 OffsetBit = AllocBefore % 8; 140 141 for (VirtualCallTarget &Target : Targets) { 142 if (BitWidth == 1) 143 Target.setBeforeBit(AllocBefore); 144 else 145 Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8); 146 } 147 } 148 149 void wholeprogramdevirt::setAfterReturnValues( 150 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter, 151 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) { 152 if (BitWidth == 1) 153 OffsetByte = AllocAfter / 8; 154 else 155 OffsetByte = (AllocAfter + 7) / 8; 156 OffsetBit = AllocAfter % 8; 157 158 for (VirtualCallTarget &Target : Targets) { 159 if (BitWidth == 1) 160 Target.setAfterBit(AllocAfter); 161 else 162 Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8); 163 } 164 } 165 166 VirtualCallTarget::VirtualCallTarget(Function *Fn, const BitSetInfo *BS) 167 : Fn(Fn), BS(BS), 168 IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()) {} 169 170 namespace { 171 172 // A slot in a set of virtual tables. The BitSetID identifies the set of virtual 173 // tables, and the ByteOffset is the offset in bytes from the address point to 174 // the virtual function pointer. 175 struct VTableSlot { 176 Metadata *BitSetID; 177 uint64_t ByteOffset; 178 }; 179 180 } 181 182 namespace llvm { 183 184 template <> struct DenseMapInfo<VTableSlot> { 185 static VTableSlot getEmptyKey() { 186 return {DenseMapInfo<Metadata *>::getEmptyKey(), 187 DenseMapInfo<uint64_t>::getEmptyKey()}; 188 } 189 static VTableSlot getTombstoneKey() { 190 return {DenseMapInfo<Metadata *>::getTombstoneKey(), 191 DenseMapInfo<uint64_t>::getTombstoneKey()}; 192 } 193 static unsigned getHashValue(const VTableSlot &I) { 194 return DenseMapInfo<Metadata *>::getHashValue(I.BitSetID) ^ 195 DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset); 196 } 197 static bool isEqual(const VTableSlot &LHS, 198 const VTableSlot &RHS) { 199 return LHS.BitSetID == RHS.BitSetID && LHS.ByteOffset == RHS.ByteOffset; 200 } 201 }; 202 203 } 204 205 namespace { 206 207 // A virtual call site. VTable is the loaded virtual table pointer, and CS is 208 // the indirect virtual call. 209 struct VirtualCallSite { 210 Value *VTable; 211 CallSite CS; 212 213 void replaceAndErase(Value *New) { 214 CS->replaceAllUsesWith(New); 215 if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) { 216 BranchInst::Create(II->getNormalDest(), CS.getInstruction()); 217 II->getUnwindDest()->removePredecessor(II->getParent()); 218 } 219 CS->eraseFromParent(); 220 } 221 }; 222 223 struct DevirtModule { 224 Module &M; 225 IntegerType *Int8Ty; 226 PointerType *Int8PtrTy; 227 IntegerType *Int32Ty; 228 229 MapVector<VTableSlot, std::vector<VirtualCallSite>> CallSlots; 230 231 DevirtModule(Module &M) 232 : M(M), Int8Ty(Type::getInt8Ty(M.getContext())), 233 Int8PtrTy(Type::getInt8PtrTy(M.getContext())), 234 Int32Ty(Type::getInt32Ty(M.getContext())) {} 235 236 void buildBitSets(std::vector<VTableBits> &Bits, 237 DenseMap<Metadata *, std::set<BitSetInfo>> &BitSets); 238 bool tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot, 239 const std::set<BitSetInfo> &BitSetInfos, 240 uint64_t ByteOffset); 241 bool trySingleImplDevirt(ArrayRef<VirtualCallTarget> TargetsForSlot, 242 MutableArrayRef<VirtualCallSite> CallSites); 243 bool tryEvaluateFunctionsWithArgs( 244 MutableArrayRef<VirtualCallTarget> TargetsForSlot, 245 ArrayRef<ConstantInt *> Args); 246 bool tryUniformRetValOpt(IntegerType *RetType, 247 ArrayRef<VirtualCallTarget> TargetsForSlot, 248 MutableArrayRef<VirtualCallSite> CallSites); 249 bool tryUniqueRetValOpt(unsigned BitWidth, 250 ArrayRef<VirtualCallTarget> TargetsForSlot, 251 MutableArrayRef<VirtualCallSite> CallSites); 252 bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot, 253 ArrayRef<VirtualCallSite> CallSites); 254 255 void rebuildGlobal(VTableBits &B); 256 257 bool run(); 258 }; 259 260 struct WholeProgramDevirt : public ModulePass { 261 static char ID; 262 WholeProgramDevirt() : ModulePass(ID) { 263 initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry()); 264 } 265 bool runOnModule(Module &M) { 266 if (skipModule(M)) 267 return false; 268 269 return DevirtModule(M).run(); 270 } 271 }; 272 273 } // anonymous namespace 274 275 INITIALIZE_PASS(WholeProgramDevirt, "wholeprogramdevirt", 276 "Whole program devirtualization", false, false) 277 char WholeProgramDevirt::ID = 0; 278 279 ModulePass *llvm::createWholeProgramDevirtPass() { 280 return new WholeProgramDevirt; 281 } 282 283 PreservedAnalyses WholeProgramDevirtPass::run(Module &M, 284 ModuleAnalysisManager &) { 285 if (!DevirtModule(M).run()) 286 return PreservedAnalyses::all(); 287 return PreservedAnalyses::none(); 288 } 289 290 void DevirtModule::buildBitSets( 291 std::vector<VTableBits> &Bits, 292 DenseMap<Metadata *, std::set<BitSetInfo>> &BitSets) { 293 NamedMDNode *BitSetNM = M.getNamedMetadata("llvm.bitsets"); 294 if (!BitSetNM) 295 return; 296 297 DenseMap<GlobalVariable *, VTableBits *> GVToBits; 298 Bits.reserve(BitSetNM->getNumOperands()); 299 for (auto Op : BitSetNM->operands()) { 300 auto OpConstMD = dyn_cast_or_null<ConstantAsMetadata>(Op->getOperand(1)); 301 if (!OpConstMD) 302 continue; 303 auto BitSetID = Op->getOperand(0).get(); 304 305 Constant *OpConst = OpConstMD->getValue(); 306 if (auto GA = dyn_cast<GlobalAlias>(OpConst)) 307 OpConst = GA->getAliasee(); 308 auto OpGlobal = dyn_cast<GlobalVariable>(OpConst); 309 if (!OpGlobal) 310 continue; 311 312 uint64_t Offset = 313 cast<ConstantInt>( 314 cast<ConstantAsMetadata>(Op->getOperand(2))->getValue()) 315 ->getZExtValue(); 316 317 VTableBits *&BitsPtr = GVToBits[OpGlobal]; 318 if (!BitsPtr) { 319 Bits.emplace_back(); 320 Bits.back().GV = OpGlobal; 321 Bits.back().ObjectSize = M.getDataLayout().getTypeAllocSize( 322 OpGlobal->getInitializer()->getType()); 323 BitsPtr = &Bits.back(); 324 } 325 BitSets[BitSetID].insert({BitsPtr, Offset}); 326 } 327 } 328 329 bool DevirtModule::tryFindVirtualCallTargets( 330 std::vector<VirtualCallTarget> &TargetsForSlot, 331 const std::set<BitSetInfo> &BitSetInfos, uint64_t ByteOffset) { 332 for (const BitSetInfo &BS : BitSetInfos) { 333 if (!BS.Bits->GV->isConstant()) 334 return false; 335 336 auto Init = dyn_cast<ConstantArray>(BS.Bits->GV->getInitializer()); 337 if (!Init) 338 return false; 339 ArrayType *VTableTy = Init->getType(); 340 341 uint64_t ElemSize = 342 M.getDataLayout().getTypeAllocSize(VTableTy->getElementType()); 343 uint64_t GlobalSlotOffset = BS.Offset + ByteOffset; 344 if (GlobalSlotOffset % ElemSize != 0) 345 return false; 346 347 unsigned Op = GlobalSlotOffset / ElemSize; 348 if (Op >= Init->getNumOperands()) 349 return false; 350 351 auto Fn = dyn_cast<Function>(Init->getOperand(Op)->stripPointerCasts()); 352 if (!Fn) 353 return false; 354 355 // We can disregard __cxa_pure_virtual as a possible call target, as 356 // calls to pure virtuals are UB. 357 if (Fn->getName() == "__cxa_pure_virtual") 358 continue; 359 360 TargetsForSlot.push_back({Fn, &BS}); 361 } 362 363 // Give up if we couldn't find any targets. 364 return !TargetsForSlot.empty(); 365 } 366 367 bool DevirtModule::trySingleImplDevirt( 368 ArrayRef<VirtualCallTarget> TargetsForSlot, 369 MutableArrayRef<VirtualCallSite> CallSites) { 370 // See if the program contains a single implementation of this virtual 371 // function. 372 Function *TheFn = TargetsForSlot[0].Fn; 373 for (auto &&Target : TargetsForSlot) 374 if (TheFn != Target.Fn) 375 return false; 376 377 // If so, update each call site to call that implementation directly. 378 for (auto &&VCallSite : CallSites) { 379 VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast( 380 TheFn, VCallSite.CS.getCalledValue()->getType())); 381 } 382 return true; 383 } 384 385 bool DevirtModule::tryEvaluateFunctionsWithArgs( 386 MutableArrayRef<VirtualCallTarget> TargetsForSlot, 387 ArrayRef<ConstantInt *> Args) { 388 // Evaluate each function and store the result in each target's RetVal 389 // field. 390 for (VirtualCallTarget &Target : TargetsForSlot) { 391 if (Target.Fn->arg_size() != Args.size() + 1) 392 return false; 393 for (unsigned I = 0; I != Args.size(); ++I) 394 if (Target.Fn->getFunctionType()->getParamType(I + 1) != 395 Args[I]->getType()) 396 return false; 397 398 Evaluator Eval(M.getDataLayout(), nullptr); 399 SmallVector<Constant *, 2> EvalArgs; 400 EvalArgs.push_back( 401 Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0))); 402 EvalArgs.insert(EvalArgs.end(), Args.begin(), Args.end()); 403 Constant *RetVal; 404 if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) || 405 !isa<ConstantInt>(RetVal)) 406 return false; 407 Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue(); 408 } 409 return true; 410 } 411 412 bool DevirtModule::tryUniformRetValOpt( 413 IntegerType *RetType, ArrayRef<VirtualCallTarget> TargetsForSlot, 414 MutableArrayRef<VirtualCallSite> CallSites) { 415 // Uniform return value optimization. If all functions return the same 416 // constant, replace all calls with that constant. 417 uint64_t TheRetVal = TargetsForSlot[0].RetVal; 418 for (const VirtualCallTarget &Target : TargetsForSlot) 419 if (Target.RetVal != TheRetVal) 420 return false; 421 422 auto TheRetValConst = ConstantInt::get(RetType, TheRetVal); 423 for (auto Call : CallSites) 424 Call.replaceAndErase(TheRetValConst); 425 return true; 426 } 427 428 bool DevirtModule::tryUniqueRetValOpt( 429 unsigned BitWidth, ArrayRef<VirtualCallTarget> TargetsForSlot, 430 MutableArrayRef<VirtualCallSite> CallSites) { 431 // IsOne controls whether we look for a 0 or a 1. 432 auto tryUniqueRetValOptFor = [&](bool IsOne) { 433 const BitSetInfo *UniqueBitSet = 0; 434 for (const VirtualCallTarget &Target : TargetsForSlot) { 435 if (Target.RetVal == (IsOne ? 1 : 0)) { 436 if (UniqueBitSet) 437 return false; 438 UniqueBitSet = Target.BS; 439 } 440 } 441 442 // We should have found a unique bit set or bailed out by now. We already 443 // checked for a uniform return value in tryUniformRetValOpt. 444 assert(UniqueBitSet); 445 446 // Replace each call with the comparison. 447 for (auto &&Call : CallSites) { 448 IRBuilder<> B(Call.CS.getInstruction()); 449 Value *OneAddr = B.CreateBitCast(UniqueBitSet->Bits->GV, Int8PtrTy); 450 OneAddr = B.CreateConstGEP1_64(OneAddr, UniqueBitSet->Offset); 451 Value *Cmp = B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE, 452 Call.VTable, OneAddr); 453 Call.replaceAndErase(Cmp); 454 } 455 return true; 456 }; 457 458 if (BitWidth == 1) { 459 if (tryUniqueRetValOptFor(true)) 460 return true; 461 if (tryUniqueRetValOptFor(false)) 462 return true; 463 } 464 return false; 465 } 466 467 bool DevirtModule::tryVirtualConstProp( 468 MutableArrayRef<VirtualCallTarget> TargetsForSlot, 469 ArrayRef<VirtualCallSite> CallSites) { 470 // This only works if the function returns an integer. 471 auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType()); 472 if (!RetType) 473 return false; 474 unsigned BitWidth = RetType->getBitWidth(); 475 if (BitWidth > 64) 476 return false; 477 478 // Make sure that each function does not access memory, takes at least one 479 // argument, does not use its first argument (which we assume is 'this'), 480 // and has the same return type. 481 for (VirtualCallTarget &Target : TargetsForSlot) { 482 if (!Target.Fn->doesNotAccessMemory() || Target.Fn->arg_empty() || 483 !Target.Fn->arg_begin()->use_empty() || 484 Target.Fn->getReturnType() != RetType) 485 return false; 486 } 487 488 // Group call sites by the list of constant arguments they pass. 489 // The comparator ensures deterministic ordering. 490 struct ByAPIntValue { 491 bool operator()(const std::vector<ConstantInt *> &A, 492 const std::vector<ConstantInt *> &B) const { 493 return std::lexicographical_compare( 494 A.begin(), A.end(), B.begin(), B.end(), 495 [](ConstantInt *AI, ConstantInt *BI) { 496 return AI->getValue().ult(BI->getValue()); 497 }); 498 } 499 }; 500 std::map<std::vector<ConstantInt *>, std::vector<VirtualCallSite>, 501 ByAPIntValue> 502 VCallSitesByConstantArg; 503 for (auto &&VCallSite : CallSites) { 504 std::vector<ConstantInt *> Args; 505 if (VCallSite.CS.getType() != RetType) 506 continue; 507 for (auto &&Arg : 508 make_range(VCallSite.CS.arg_begin() + 1, VCallSite.CS.arg_end())) { 509 if (!isa<ConstantInt>(Arg)) 510 break; 511 Args.push_back(cast<ConstantInt>(&Arg)); 512 } 513 if (Args.size() + 1 != VCallSite.CS.arg_size()) 514 continue; 515 516 VCallSitesByConstantArg[Args].push_back(VCallSite); 517 } 518 519 for (auto &&CSByConstantArg : VCallSitesByConstantArg) { 520 if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first)) 521 continue; 522 523 if (tryUniformRetValOpt(RetType, TargetsForSlot, CSByConstantArg.second)) 524 continue; 525 526 if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second)) 527 continue; 528 529 // Find an allocation offset in bits in all vtables in the bitset. 530 uint64_t AllocBefore = 531 findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth); 532 uint64_t AllocAfter = 533 findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth); 534 535 // Calculate the total amount of padding needed to store a value at both 536 // ends of the object. 537 uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0; 538 for (auto &&Target : TargetsForSlot) { 539 TotalPaddingBefore += std::max<int64_t>( 540 (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0); 541 TotalPaddingAfter += std::max<int64_t>( 542 (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0); 543 } 544 545 // If the amount of padding is too large, give up. 546 // FIXME: do something smarter here. 547 if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128) 548 continue; 549 550 // Calculate the offset to the value as a (possibly negative) byte offset 551 // and (if applicable) a bit offset, and store the values in the targets. 552 int64_t OffsetByte; 553 uint64_t OffsetBit; 554 if (TotalPaddingBefore <= TotalPaddingAfter) 555 setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte, 556 OffsetBit); 557 else 558 setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte, 559 OffsetBit); 560 561 // Rewrite each call to a load from OffsetByte/OffsetBit. 562 for (auto Call : CSByConstantArg.second) { 563 IRBuilder<> B(Call.CS.getInstruction()); 564 Value *Addr = B.CreateConstGEP1_64(Call.VTable, OffsetByte); 565 if (BitWidth == 1) { 566 Value *Bits = B.CreateLoad(Addr); 567 Value *Bit = ConstantInt::get(Int8Ty, 1ULL << OffsetBit); 568 Value *BitsAndBit = B.CreateAnd(Bits, Bit); 569 auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0)); 570 Call.replaceAndErase(IsBitSet); 571 } else { 572 Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo()); 573 Value *Val = B.CreateLoad(RetType, ValAddr); 574 Call.replaceAndErase(Val); 575 } 576 } 577 } 578 return true; 579 } 580 581 void DevirtModule::rebuildGlobal(VTableBits &B) { 582 if (B.Before.Bytes.empty() && B.After.Bytes.empty()) 583 return; 584 585 // Align each byte array to pointer width. 586 unsigned PointerSize = M.getDataLayout().getPointerSize(); 587 B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize)); 588 B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize)); 589 590 // Before was stored in reverse order; flip it now. 591 for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I) 592 std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]); 593 594 // Build an anonymous global containing the before bytes, followed by the 595 // original initializer, followed by the after bytes. 596 auto NewInit = ConstantStruct::getAnon( 597 {ConstantDataArray::get(M.getContext(), B.Before.Bytes), 598 B.GV->getInitializer(), 599 ConstantDataArray::get(M.getContext(), B.After.Bytes)}); 600 auto NewGV = 601 new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(), 602 GlobalVariable::PrivateLinkage, NewInit, "", B.GV); 603 NewGV->setSection(B.GV->getSection()); 604 NewGV->setComdat(B.GV->getComdat()); 605 606 // Build an alias named after the original global, pointing at the second 607 // element (the original initializer). 608 auto Alias = GlobalAlias::create( 609 B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "", 610 ConstantExpr::getGetElementPtr( 611 NewInit->getType(), NewGV, 612 ArrayRef<Constant *>{ConstantInt::get(Int32Ty, 0), 613 ConstantInt::get(Int32Ty, 1)}), 614 &M); 615 Alias->setVisibility(B.GV->getVisibility()); 616 Alias->takeName(B.GV); 617 618 B.GV->replaceAllUsesWith(Alias); 619 B.GV->eraseFromParent(); 620 } 621 622 bool DevirtModule::run() { 623 Function *BitSetTestFunc = 624 M.getFunction(Intrinsic::getName(Intrinsic::bitset_test)); 625 if (!BitSetTestFunc || BitSetTestFunc->use_empty()) 626 return false; 627 628 Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume)); 629 if (!AssumeFunc || AssumeFunc->use_empty()) 630 return false; 631 632 // Find all virtual calls via a virtual table pointer %p under an assumption 633 // of the form llvm.assume(llvm.bitset.test(%p, %md)). This indicates that %p 634 // points to a vtable in the bitset %md. Group calls by (bitset, offset) pair 635 // (effectively the identity of the virtual function) and store to CallSlots. 636 DenseSet<Value *> SeenPtrs; 637 for (auto I = BitSetTestFunc->use_begin(), E = BitSetTestFunc->use_end(); 638 I != E;) { 639 auto CI = dyn_cast<CallInst>(I->getUser()); 640 ++I; 641 if (!CI) 642 continue; 643 644 // Search for virtual calls based on %p and add them to DevirtCalls. 645 SmallVector<DevirtCallSite, 1> DevirtCalls; 646 SmallVector<CallInst *, 1> Assumes; 647 findDevirtualizableCalls(DevirtCalls, Assumes, CI); 648 649 // If we found any, add them to CallSlots. Only do this if we haven't seen 650 // the vtable pointer before, as it may have been CSE'd with pointers from 651 // other call sites, and we don't want to process call sites multiple times. 652 if (!Assumes.empty()) { 653 Metadata *BitSet = 654 cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata(); 655 Value *Ptr = CI->getArgOperand(0)->stripPointerCasts(); 656 if (SeenPtrs.insert(Ptr).second) { 657 for (DevirtCallSite Call : DevirtCalls) { 658 CallSlots[{BitSet, Call.Offset}].push_back( 659 {CI->getArgOperand(0), Call.CS}); 660 } 661 } 662 } 663 664 // We no longer need the assumes or the bitset test. 665 for (auto Assume : Assumes) 666 Assume->eraseFromParent(); 667 // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we 668 // may use the vtable argument later. 669 if (CI->use_empty()) 670 CI->eraseFromParent(); 671 } 672 673 // Rebuild llvm.bitsets metadata into a map for easy lookup. 674 std::vector<VTableBits> Bits; 675 DenseMap<Metadata *, std::set<BitSetInfo>> BitSets; 676 buildBitSets(Bits, BitSets); 677 if (BitSets.empty()) 678 return true; 679 680 // For each (bitset, offset) pair: 681 bool DidVirtualConstProp = false; 682 for (auto &S : CallSlots) { 683 // Search each of the vtables in the bitset for the virtual function 684 // implementation at offset S.first.ByteOffset, and add to TargetsForSlot. 685 std::vector<VirtualCallTarget> TargetsForSlot; 686 if (!tryFindVirtualCallTargets(TargetsForSlot, BitSets[S.first.BitSetID], 687 S.first.ByteOffset)) 688 continue; 689 690 if (trySingleImplDevirt(TargetsForSlot, S.second)) 691 continue; 692 693 DidVirtualConstProp |= tryVirtualConstProp(TargetsForSlot, S.second); 694 } 695 696 // Rebuild each global we touched as part of virtual constant propagation to 697 // include the before and after bytes. 698 if (DidVirtualConstProp) 699 for (VTableBits &B : Bits) 700 rebuildGlobal(B); 701 702 return true; 703 } 704