1 //===- WholeProgramDevirt.cpp - Whole program virtual call optimization ---===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This pass implements whole program optimization of virtual calls in cases 11 // where we know (via bitset information) that the list of callee is fixed. This 12 // includes the following: 13 // - Single implementation devirtualization: if a virtual call has a single 14 // possible callee, replace all calls with a direct call to that callee. 15 // - Virtual constant propagation: if the virtual function's return type is an 16 // integer <=64 bits and all possible callees are readnone, for each class and 17 // each list of constant arguments: evaluate the function, store the return 18 // value alongside the virtual table, and rewrite each virtual call as a load 19 // from the virtual table. 20 // - Uniform return value optimization: if the conditions for virtual constant 21 // propagation hold and each function returns the same constant value, replace 22 // each virtual call with that constant. 23 // - Unique return value optimization for i1 return values: if the conditions 24 // for virtual constant propagation hold and a single vtable's function 25 // returns 0, or a single vtable's function returns 1, replace each virtual 26 // call with a comparison of the vptr against that vtable's address. 27 // 28 //===----------------------------------------------------------------------===// 29 30 #include "llvm/Transforms/IPO/WholeProgramDevirt.h" 31 #include "llvm/ADT/ArrayRef.h" 32 #include "llvm/ADT/DenseSet.h" 33 #include "llvm/ADT/MapVector.h" 34 #include "llvm/Analysis/BitSetUtils.h" 35 #include "llvm/IR/CallSite.h" 36 #include "llvm/IR/Constants.h" 37 #include "llvm/IR/DataLayout.h" 38 #include "llvm/IR/IRBuilder.h" 39 #include "llvm/IR/Instructions.h" 40 #include "llvm/IR/Intrinsics.h" 41 #include "llvm/IR/Module.h" 42 #include "llvm/Pass.h" 43 #include "llvm/Support/raw_ostream.h" 44 #include "llvm/Transforms/IPO.h" 45 #include "llvm/Transforms/Utils/Evaluator.h" 46 #include "llvm/Transforms/Utils/Local.h" 47 48 #include <set> 49 50 using namespace llvm; 51 using namespace wholeprogramdevirt; 52 53 #define DEBUG_TYPE "wholeprogramdevirt" 54 55 // Find the minimum offset that we may store a value of size Size bits at. If 56 // IsAfter is set, look for an offset before the object, otherwise look for an 57 // offset after the object. 58 uint64_t 59 wholeprogramdevirt::findLowestOffset(ArrayRef<VirtualCallTarget> Targets, 60 bool IsAfter, uint64_t Size) { 61 // Find a minimum offset taking into account only vtable sizes. 62 uint64_t MinByte = 0; 63 for (const VirtualCallTarget &Target : Targets) { 64 if (IsAfter) 65 MinByte = std::max(MinByte, Target.minAfterBytes()); 66 else 67 MinByte = std::max(MinByte, Target.minBeforeBytes()); 68 } 69 70 // Build a vector of arrays of bytes covering, for each target, a slice of the 71 // used region (see AccumBitVector::BytesUsed in 72 // llvm/Transforms/IPO/WholeProgramDevirt.h) starting at MinByte. Effectively, 73 // this aligns the used regions to start at MinByte. 74 // 75 // In this example, A, B and C are vtables, # is a byte already allocated for 76 // a virtual function pointer, AAAA... (etc.) are the used regions for the 77 // vtables and Offset(X) is the value computed for the Offset variable below 78 // for X. 79 // 80 // Offset(A) 81 // | | 82 // |MinByte 83 // A: ################AAAAAAAA|AAAAAAAA 84 // B: ########BBBBBBBBBBBBBBBB|BBBB 85 // C: ########################|CCCCCCCCCCCCCCCC 86 // | Offset(B) | 87 // 88 // This code produces the slices of A, B and C that appear after the divider 89 // at MinByte. 90 std::vector<ArrayRef<uint8_t>> Used; 91 for (const VirtualCallTarget &Target : Targets) { 92 ArrayRef<uint8_t> VTUsed = IsAfter ? Target.BS->Bits->After.BytesUsed 93 : Target.BS->Bits->Before.BytesUsed; 94 uint64_t Offset = IsAfter ? MinByte - Target.minAfterBytes() 95 : MinByte - Target.minBeforeBytes(); 96 97 // Disregard used regions that are smaller than Offset. These are 98 // effectively all-free regions that do not need to be checked. 99 if (VTUsed.size() > Offset) 100 Used.push_back(VTUsed.slice(Offset)); 101 } 102 103 if (Size == 1) { 104 // Find a free bit in each member of Used. 105 for (unsigned I = 0;; ++I) { 106 uint8_t BitsUsed = 0; 107 for (auto &&B : Used) 108 if (I < B.size()) 109 BitsUsed |= B[I]; 110 if (BitsUsed != 0xff) 111 return (MinByte + I) * 8 + 112 countTrailingZeros(uint8_t(~BitsUsed), ZB_Undefined); 113 } 114 } else { 115 // Find a free (Size/8) byte region in each member of Used. 116 // FIXME: see if alignment helps. 117 for (unsigned I = 0;; ++I) { 118 for (auto &&B : Used) { 119 unsigned Byte = 0; 120 while ((I + Byte) < B.size() && Byte < (Size / 8)) { 121 if (B[I + Byte]) 122 goto NextI; 123 ++Byte; 124 } 125 } 126 return (MinByte + I) * 8; 127 NextI:; 128 } 129 } 130 } 131 132 void wholeprogramdevirt::setBeforeReturnValues( 133 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocBefore, 134 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) { 135 if (BitWidth == 1) 136 OffsetByte = -(AllocBefore / 8 + 1); 137 else 138 OffsetByte = -((AllocBefore + 7) / 8 + (BitWidth + 7) / 8); 139 OffsetBit = AllocBefore % 8; 140 141 for (VirtualCallTarget &Target : Targets) { 142 if (BitWidth == 1) 143 Target.setBeforeBit(AllocBefore); 144 else 145 Target.setBeforeBytes(AllocBefore, (BitWidth + 7) / 8); 146 } 147 } 148 149 void wholeprogramdevirt::setAfterReturnValues( 150 MutableArrayRef<VirtualCallTarget> Targets, uint64_t AllocAfter, 151 unsigned BitWidth, int64_t &OffsetByte, uint64_t &OffsetBit) { 152 if (BitWidth == 1) 153 OffsetByte = AllocAfter / 8; 154 else 155 OffsetByte = (AllocAfter + 7) / 8; 156 OffsetBit = AllocAfter % 8; 157 158 for (VirtualCallTarget &Target : Targets) { 159 if (BitWidth == 1) 160 Target.setAfterBit(AllocAfter); 161 else 162 Target.setAfterBytes(AllocAfter, (BitWidth + 7) / 8); 163 } 164 } 165 166 VirtualCallTarget::VirtualCallTarget(Function *Fn, const BitSetInfo *BS) 167 : Fn(Fn), BS(BS), 168 IsBigEndian(Fn->getParent()->getDataLayout().isBigEndian()) {} 169 170 namespace { 171 172 // A slot in a set of virtual tables. The BitSetID identifies the set of virtual 173 // tables, and the ByteOffset is the offset in bytes from the address point to 174 // the virtual function pointer. 175 struct VTableSlot { 176 Metadata *BitSetID; 177 uint64_t ByteOffset; 178 }; 179 180 } 181 182 namespace llvm { 183 184 template <> struct DenseMapInfo<VTableSlot> { 185 static VTableSlot getEmptyKey() { 186 return {DenseMapInfo<Metadata *>::getEmptyKey(), 187 DenseMapInfo<uint64_t>::getEmptyKey()}; 188 } 189 static VTableSlot getTombstoneKey() { 190 return {DenseMapInfo<Metadata *>::getTombstoneKey(), 191 DenseMapInfo<uint64_t>::getTombstoneKey()}; 192 } 193 static unsigned getHashValue(const VTableSlot &I) { 194 return DenseMapInfo<Metadata *>::getHashValue(I.BitSetID) ^ 195 DenseMapInfo<uint64_t>::getHashValue(I.ByteOffset); 196 } 197 static bool isEqual(const VTableSlot &LHS, 198 const VTableSlot &RHS) { 199 return LHS.BitSetID == RHS.BitSetID && LHS.ByteOffset == RHS.ByteOffset; 200 } 201 }; 202 203 } 204 205 namespace { 206 207 // A virtual call site. VTable is the loaded virtual table pointer, and CS is 208 // the indirect virtual call. 209 struct VirtualCallSite { 210 Value *VTable; 211 CallSite CS; 212 213 void replaceAndErase(Value *New) { 214 CS->replaceAllUsesWith(New); 215 if (auto II = dyn_cast<InvokeInst>(CS.getInstruction())) { 216 BranchInst::Create(II->getNormalDest(), CS.getInstruction()); 217 II->getUnwindDest()->removePredecessor(II->getParent()); 218 } 219 CS->eraseFromParent(); 220 } 221 }; 222 223 struct DevirtModule { 224 Module &M; 225 IntegerType *Int8Ty; 226 PointerType *Int8PtrTy; 227 IntegerType *Int32Ty; 228 229 MapVector<VTableSlot, std::vector<VirtualCallSite>> CallSlots; 230 231 DevirtModule(Module &M) 232 : M(M), Int8Ty(Type::getInt8Ty(M.getContext())), 233 Int8PtrTy(Type::getInt8PtrTy(M.getContext())), 234 Int32Ty(Type::getInt32Ty(M.getContext())) {} 235 236 void buildBitSets(std::vector<VTableBits> &Bits, 237 DenseMap<Metadata *, std::set<BitSetInfo>> &BitSets); 238 bool tryFindVirtualCallTargets(std::vector<VirtualCallTarget> &TargetsForSlot, 239 const std::set<BitSetInfo> &BitSetInfos, 240 uint64_t ByteOffset); 241 bool trySingleImplDevirt(ArrayRef<VirtualCallTarget> TargetsForSlot, 242 MutableArrayRef<VirtualCallSite> CallSites); 243 bool tryEvaluateFunctionsWithArgs( 244 MutableArrayRef<VirtualCallTarget> TargetsForSlot, 245 ArrayRef<ConstantInt *> Args); 246 bool tryUniformRetValOpt(IntegerType *RetType, 247 ArrayRef<VirtualCallTarget> TargetsForSlot, 248 MutableArrayRef<VirtualCallSite> CallSites); 249 bool tryUniqueRetValOpt(unsigned BitWidth, 250 ArrayRef<VirtualCallTarget> TargetsForSlot, 251 MutableArrayRef<VirtualCallSite> CallSites); 252 bool tryVirtualConstProp(MutableArrayRef<VirtualCallTarget> TargetsForSlot, 253 ArrayRef<VirtualCallSite> CallSites); 254 255 void rebuildGlobal(VTableBits &B); 256 257 bool run(); 258 }; 259 260 struct WholeProgramDevirt : public ModulePass { 261 static char ID; 262 WholeProgramDevirt() : ModulePass(ID) { 263 initializeWholeProgramDevirtPass(*PassRegistry::getPassRegistry()); 264 } 265 bool runOnModule(Module &M) { 266 if (skipModule(M)) 267 return false; 268 269 return DevirtModule(M).run(); 270 } 271 }; 272 273 } // anonymous namespace 274 275 INITIALIZE_PASS(WholeProgramDevirt, "wholeprogramdevirt", 276 "Whole program devirtualization", false, false) 277 char WholeProgramDevirt::ID = 0; 278 279 ModulePass *llvm::createWholeProgramDevirtPass() { 280 return new WholeProgramDevirt; 281 } 282 283 PreservedAnalyses WholeProgramDevirtPass::run(Module &M) { 284 if (!DevirtModule(M).run()) 285 return PreservedAnalyses::all(); 286 return PreservedAnalyses::none(); 287 } 288 289 void DevirtModule::buildBitSets( 290 std::vector<VTableBits> &Bits, 291 DenseMap<Metadata *, std::set<BitSetInfo>> &BitSets) { 292 NamedMDNode *BitSetNM = M.getNamedMetadata("llvm.bitsets"); 293 if (!BitSetNM) 294 return; 295 296 DenseMap<GlobalVariable *, VTableBits *> GVToBits; 297 Bits.reserve(BitSetNM->getNumOperands()); 298 for (auto Op : BitSetNM->operands()) { 299 auto OpConstMD = dyn_cast_or_null<ConstantAsMetadata>(Op->getOperand(1)); 300 if (!OpConstMD) 301 continue; 302 auto BitSetID = Op->getOperand(0).get(); 303 304 Constant *OpConst = OpConstMD->getValue(); 305 if (auto GA = dyn_cast<GlobalAlias>(OpConst)) 306 OpConst = GA->getAliasee(); 307 auto OpGlobal = dyn_cast<GlobalVariable>(OpConst); 308 if (!OpGlobal) 309 continue; 310 311 uint64_t Offset = 312 cast<ConstantInt>( 313 cast<ConstantAsMetadata>(Op->getOperand(2))->getValue()) 314 ->getZExtValue(); 315 316 VTableBits *&BitsPtr = GVToBits[OpGlobal]; 317 if (!BitsPtr) { 318 Bits.emplace_back(); 319 Bits.back().GV = OpGlobal; 320 Bits.back().ObjectSize = M.getDataLayout().getTypeAllocSize( 321 OpGlobal->getInitializer()->getType()); 322 BitsPtr = &Bits.back(); 323 } 324 BitSets[BitSetID].insert({BitsPtr, Offset}); 325 } 326 } 327 328 bool DevirtModule::tryFindVirtualCallTargets( 329 std::vector<VirtualCallTarget> &TargetsForSlot, 330 const std::set<BitSetInfo> &BitSetInfos, uint64_t ByteOffset) { 331 for (const BitSetInfo &BS : BitSetInfos) { 332 if (!BS.Bits->GV->isConstant()) 333 return false; 334 335 auto Init = dyn_cast<ConstantArray>(BS.Bits->GV->getInitializer()); 336 if (!Init) 337 return false; 338 ArrayType *VTableTy = Init->getType(); 339 340 uint64_t ElemSize = 341 M.getDataLayout().getTypeAllocSize(VTableTy->getElementType()); 342 uint64_t GlobalSlotOffset = BS.Offset + ByteOffset; 343 if (GlobalSlotOffset % ElemSize != 0) 344 return false; 345 346 unsigned Op = GlobalSlotOffset / ElemSize; 347 if (Op >= Init->getNumOperands()) 348 return false; 349 350 auto Fn = dyn_cast<Function>(Init->getOperand(Op)->stripPointerCasts()); 351 if (!Fn) 352 return false; 353 354 // We can disregard __cxa_pure_virtual as a possible call target, as 355 // calls to pure virtuals are UB. 356 if (Fn->getName() == "__cxa_pure_virtual") 357 continue; 358 359 TargetsForSlot.push_back({Fn, &BS}); 360 } 361 362 // Give up if we couldn't find any targets. 363 return !TargetsForSlot.empty(); 364 } 365 366 bool DevirtModule::trySingleImplDevirt( 367 ArrayRef<VirtualCallTarget> TargetsForSlot, 368 MutableArrayRef<VirtualCallSite> CallSites) { 369 // See if the program contains a single implementation of this virtual 370 // function. 371 Function *TheFn = TargetsForSlot[0].Fn; 372 for (auto &&Target : TargetsForSlot) 373 if (TheFn != Target.Fn) 374 return false; 375 376 // If so, update each call site to call that implementation directly. 377 for (auto &&VCallSite : CallSites) { 378 VCallSite.CS.setCalledFunction(ConstantExpr::getBitCast( 379 TheFn, VCallSite.CS.getCalledValue()->getType())); 380 } 381 return true; 382 } 383 384 bool DevirtModule::tryEvaluateFunctionsWithArgs( 385 MutableArrayRef<VirtualCallTarget> TargetsForSlot, 386 ArrayRef<ConstantInt *> Args) { 387 // Evaluate each function and store the result in each target's RetVal 388 // field. 389 for (VirtualCallTarget &Target : TargetsForSlot) { 390 if (Target.Fn->arg_size() != Args.size() + 1) 391 return false; 392 for (unsigned I = 0; I != Args.size(); ++I) 393 if (Target.Fn->getFunctionType()->getParamType(I + 1) != 394 Args[I]->getType()) 395 return false; 396 397 Evaluator Eval(M.getDataLayout(), nullptr); 398 SmallVector<Constant *, 2> EvalArgs; 399 EvalArgs.push_back( 400 Constant::getNullValue(Target.Fn->getFunctionType()->getParamType(0))); 401 EvalArgs.insert(EvalArgs.end(), Args.begin(), Args.end()); 402 Constant *RetVal; 403 if (!Eval.EvaluateFunction(Target.Fn, RetVal, EvalArgs) || 404 !isa<ConstantInt>(RetVal)) 405 return false; 406 Target.RetVal = cast<ConstantInt>(RetVal)->getZExtValue(); 407 } 408 return true; 409 } 410 411 bool DevirtModule::tryUniformRetValOpt( 412 IntegerType *RetType, ArrayRef<VirtualCallTarget> TargetsForSlot, 413 MutableArrayRef<VirtualCallSite> CallSites) { 414 // Uniform return value optimization. If all functions return the same 415 // constant, replace all calls with that constant. 416 uint64_t TheRetVal = TargetsForSlot[0].RetVal; 417 for (const VirtualCallTarget &Target : TargetsForSlot) 418 if (Target.RetVal != TheRetVal) 419 return false; 420 421 auto TheRetValConst = ConstantInt::get(RetType, TheRetVal); 422 for (auto Call : CallSites) 423 Call.replaceAndErase(TheRetValConst); 424 return true; 425 } 426 427 bool DevirtModule::tryUniqueRetValOpt( 428 unsigned BitWidth, ArrayRef<VirtualCallTarget> TargetsForSlot, 429 MutableArrayRef<VirtualCallSite> CallSites) { 430 // IsOne controls whether we look for a 0 or a 1. 431 auto tryUniqueRetValOptFor = [&](bool IsOne) { 432 const BitSetInfo *UniqueBitSet = 0; 433 for (const VirtualCallTarget &Target : TargetsForSlot) { 434 if (Target.RetVal == (IsOne ? 1 : 0)) { 435 if (UniqueBitSet) 436 return false; 437 UniqueBitSet = Target.BS; 438 } 439 } 440 441 // We should have found a unique bit set or bailed out by now. We already 442 // checked for a uniform return value in tryUniformRetValOpt. 443 assert(UniqueBitSet); 444 445 // Replace each call with the comparison. 446 for (auto &&Call : CallSites) { 447 IRBuilder<> B(Call.CS.getInstruction()); 448 Value *OneAddr = B.CreateBitCast(UniqueBitSet->Bits->GV, Int8PtrTy); 449 OneAddr = B.CreateConstGEP1_64(OneAddr, UniqueBitSet->Offset); 450 Value *Cmp = B.CreateICmp(IsOne ? ICmpInst::ICMP_EQ : ICmpInst::ICMP_NE, 451 Call.VTable, OneAddr); 452 Call.replaceAndErase(Cmp); 453 } 454 return true; 455 }; 456 457 if (BitWidth == 1) { 458 if (tryUniqueRetValOptFor(true)) 459 return true; 460 if (tryUniqueRetValOptFor(false)) 461 return true; 462 } 463 return false; 464 } 465 466 bool DevirtModule::tryVirtualConstProp( 467 MutableArrayRef<VirtualCallTarget> TargetsForSlot, 468 ArrayRef<VirtualCallSite> CallSites) { 469 // This only works if the function returns an integer. 470 auto RetType = dyn_cast<IntegerType>(TargetsForSlot[0].Fn->getReturnType()); 471 if (!RetType) 472 return false; 473 unsigned BitWidth = RetType->getBitWidth(); 474 if (BitWidth > 64) 475 return false; 476 477 // Make sure that each function does not access memory, takes at least one 478 // argument, does not use its first argument (which we assume is 'this'), 479 // and has the same return type. 480 for (VirtualCallTarget &Target : TargetsForSlot) { 481 if (!Target.Fn->doesNotAccessMemory() || Target.Fn->arg_empty() || 482 !Target.Fn->arg_begin()->use_empty() || 483 Target.Fn->getReturnType() != RetType) 484 return false; 485 } 486 487 // Group call sites by the list of constant arguments they pass. 488 // The comparator ensures deterministic ordering. 489 struct ByAPIntValue { 490 bool operator()(const std::vector<ConstantInt *> &A, 491 const std::vector<ConstantInt *> &B) const { 492 return std::lexicographical_compare( 493 A.begin(), A.end(), B.begin(), B.end(), 494 [](ConstantInt *AI, ConstantInt *BI) { 495 return AI->getValue().ult(BI->getValue()); 496 }); 497 } 498 }; 499 std::map<std::vector<ConstantInt *>, std::vector<VirtualCallSite>, 500 ByAPIntValue> 501 VCallSitesByConstantArg; 502 for (auto &&VCallSite : CallSites) { 503 std::vector<ConstantInt *> Args; 504 if (VCallSite.CS.getType() != RetType) 505 continue; 506 for (auto &&Arg : 507 make_range(VCallSite.CS.arg_begin() + 1, VCallSite.CS.arg_end())) { 508 if (!isa<ConstantInt>(Arg)) 509 break; 510 Args.push_back(cast<ConstantInt>(&Arg)); 511 } 512 if (Args.size() + 1 != VCallSite.CS.arg_size()) 513 continue; 514 515 VCallSitesByConstantArg[Args].push_back(VCallSite); 516 } 517 518 for (auto &&CSByConstantArg : VCallSitesByConstantArg) { 519 if (!tryEvaluateFunctionsWithArgs(TargetsForSlot, CSByConstantArg.first)) 520 continue; 521 522 if (tryUniformRetValOpt(RetType, TargetsForSlot, CSByConstantArg.second)) 523 continue; 524 525 if (tryUniqueRetValOpt(BitWidth, TargetsForSlot, CSByConstantArg.second)) 526 continue; 527 528 // Find an allocation offset in bits in all vtables in the bitset. 529 uint64_t AllocBefore = 530 findLowestOffset(TargetsForSlot, /*IsAfter=*/false, BitWidth); 531 uint64_t AllocAfter = 532 findLowestOffset(TargetsForSlot, /*IsAfter=*/true, BitWidth); 533 534 // Calculate the total amount of padding needed to store a value at both 535 // ends of the object. 536 uint64_t TotalPaddingBefore = 0, TotalPaddingAfter = 0; 537 for (auto &&Target : TargetsForSlot) { 538 TotalPaddingBefore += std::max<int64_t>( 539 (AllocBefore + 7) / 8 - Target.allocatedBeforeBytes() - 1, 0); 540 TotalPaddingAfter += std::max<int64_t>( 541 (AllocAfter + 7) / 8 - Target.allocatedAfterBytes() - 1, 0); 542 } 543 544 // If the amount of padding is too large, give up. 545 // FIXME: do something smarter here. 546 if (std::min(TotalPaddingBefore, TotalPaddingAfter) > 128) 547 continue; 548 549 // Calculate the offset to the value as a (possibly negative) byte offset 550 // and (if applicable) a bit offset, and store the values in the targets. 551 int64_t OffsetByte; 552 uint64_t OffsetBit; 553 if (TotalPaddingBefore <= TotalPaddingAfter) 554 setBeforeReturnValues(TargetsForSlot, AllocBefore, BitWidth, OffsetByte, 555 OffsetBit); 556 else 557 setAfterReturnValues(TargetsForSlot, AllocAfter, BitWidth, OffsetByte, 558 OffsetBit); 559 560 // Rewrite each call to a load from OffsetByte/OffsetBit. 561 for (auto Call : CSByConstantArg.second) { 562 IRBuilder<> B(Call.CS.getInstruction()); 563 Value *Addr = B.CreateConstGEP1_64(Call.VTable, OffsetByte); 564 if (BitWidth == 1) { 565 Value *Bits = B.CreateLoad(Addr); 566 Value *Bit = ConstantInt::get(Int8Ty, 1ULL << OffsetBit); 567 Value *BitsAndBit = B.CreateAnd(Bits, Bit); 568 auto IsBitSet = B.CreateICmpNE(BitsAndBit, ConstantInt::get(Int8Ty, 0)); 569 Call.replaceAndErase(IsBitSet); 570 } else { 571 Value *ValAddr = B.CreateBitCast(Addr, RetType->getPointerTo()); 572 Value *Val = B.CreateLoad(RetType, ValAddr); 573 Call.replaceAndErase(Val); 574 } 575 } 576 } 577 return true; 578 } 579 580 void DevirtModule::rebuildGlobal(VTableBits &B) { 581 if (B.Before.Bytes.empty() && B.After.Bytes.empty()) 582 return; 583 584 // Align each byte array to pointer width. 585 unsigned PointerSize = M.getDataLayout().getPointerSize(); 586 B.Before.Bytes.resize(alignTo(B.Before.Bytes.size(), PointerSize)); 587 B.After.Bytes.resize(alignTo(B.After.Bytes.size(), PointerSize)); 588 589 // Before was stored in reverse order; flip it now. 590 for (size_t I = 0, Size = B.Before.Bytes.size(); I != Size / 2; ++I) 591 std::swap(B.Before.Bytes[I], B.Before.Bytes[Size - 1 - I]); 592 593 // Build an anonymous global containing the before bytes, followed by the 594 // original initializer, followed by the after bytes. 595 auto NewInit = ConstantStruct::getAnon( 596 {ConstantDataArray::get(M.getContext(), B.Before.Bytes), 597 B.GV->getInitializer(), 598 ConstantDataArray::get(M.getContext(), B.After.Bytes)}); 599 auto NewGV = 600 new GlobalVariable(M, NewInit->getType(), B.GV->isConstant(), 601 GlobalVariable::PrivateLinkage, NewInit, "", B.GV); 602 NewGV->setSection(B.GV->getSection()); 603 NewGV->setComdat(B.GV->getComdat()); 604 605 // Build an alias named after the original global, pointing at the second 606 // element (the original initializer). 607 auto Alias = GlobalAlias::create( 608 B.GV->getInitializer()->getType(), 0, B.GV->getLinkage(), "", 609 ConstantExpr::getGetElementPtr( 610 NewInit->getType(), NewGV, 611 ArrayRef<Constant *>{ConstantInt::get(Int32Ty, 0), 612 ConstantInt::get(Int32Ty, 1)}), 613 &M); 614 Alias->setVisibility(B.GV->getVisibility()); 615 Alias->takeName(B.GV); 616 617 B.GV->replaceAllUsesWith(Alias); 618 B.GV->eraseFromParent(); 619 } 620 621 bool DevirtModule::run() { 622 Function *BitSetTestFunc = 623 M.getFunction(Intrinsic::getName(Intrinsic::bitset_test)); 624 if (!BitSetTestFunc || BitSetTestFunc->use_empty()) 625 return false; 626 627 Function *AssumeFunc = M.getFunction(Intrinsic::getName(Intrinsic::assume)); 628 if (!AssumeFunc || AssumeFunc->use_empty()) 629 return false; 630 631 // Find all virtual calls via a virtual table pointer %p under an assumption 632 // of the form llvm.assume(llvm.bitset.test(%p, %md)). This indicates that %p 633 // points to a vtable in the bitset %md. Group calls by (bitset, offset) pair 634 // (effectively the identity of the virtual function) and store to CallSlots. 635 DenseSet<Value *> SeenPtrs; 636 for (auto I = BitSetTestFunc->use_begin(), E = BitSetTestFunc->use_end(); 637 I != E;) { 638 auto CI = dyn_cast<CallInst>(I->getUser()); 639 ++I; 640 if (!CI) 641 continue; 642 643 // Search for virtual calls based on %p and add them to DevirtCalls. 644 SmallVector<DevirtCallSite, 1> DevirtCalls; 645 SmallVector<CallInst *, 1> Assumes; 646 findDevirtualizableCalls(DevirtCalls, Assumes, CI); 647 648 // If we found any, add them to CallSlots. Only do this if we haven't seen 649 // the vtable pointer before, as it may have been CSE'd with pointers from 650 // other call sites, and we don't want to process call sites multiple times. 651 if (!Assumes.empty()) { 652 Metadata *BitSet = 653 cast<MetadataAsValue>(CI->getArgOperand(1))->getMetadata(); 654 Value *Ptr = CI->getArgOperand(0)->stripPointerCasts(); 655 if (SeenPtrs.insert(Ptr).second) { 656 for (DevirtCallSite Call : DevirtCalls) { 657 CallSlots[{BitSet, Call.Offset}].push_back( 658 {CI->getArgOperand(0), Call.CS}); 659 } 660 } 661 } 662 663 // We no longer need the assumes or the bitset test. 664 for (auto Assume : Assumes) 665 Assume->eraseFromParent(); 666 // We can't use RecursivelyDeleteTriviallyDeadInstructions here because we 667 // may use the vtable argument later. 668 if (CI->use_empty()) 669 CI->eraseFromParent(); 670 } 671 672 // Rebuild llvm.bitsets metadata into a map for easy lookup. 673 std::vector<VTableBits> Bits; 674 DenseMap<Metadata *, std::set<BitSetInfo>> BitSets; 675 buildBitSets(Bits, BitSets); 676 if (BitSets.empty()) 677 return true; 678 679 // For each (bitset, offset) pair: 680 bool DidVirtualConstProp = false; 681 for (auto &S : CallSlots) { 682 // Search each of the vtables in the bitset for the virtual function 683 // implementation at offset S.first.ByteOffset, and add to TargetsForSlot. 684 std::vector<VirtualCallTarget> TargetsForSlot; 685 if (!tryFindVirtualCallTargets(TargetsForSlot, BitSets[S.first.BitSetID], 686 S.first.ByteOffset)) 687 continue; 688 689 if (trySingleImplDevirt(TargetsForSlot, S.second)) 690 continue; 691 692 DidVirtualConstProp |= tryVirtualConstProp(TargetsForSlot, S.second); 693 } 694 695 // Rebuild each global we touched as part of virtual constant propagation to 696 // include the before and after bytes. 697 if (DidVirtualConstProp) 698 for (VTableBits &B : Bits) 699 rebuildGlobal(B); 700 701 return true; 702 } 703